From de756c1f5c11ffdd447db05b76ed17c5577465de Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Wed, 9 Feb 2022 11:01:29 +0100 Subject: [PATCH 001/167] Add log4j-slf4j-impl to repository-azure (#83661) repository azure plugin is using azure-core which depends on sl4j for logging. Slf4j requires a bridge to log into log4j2 otherwise it emitts a warning to STDOUT and all further logs also to STDOUT. This commit addes the missing log4j-sl4j-impl which is a bridge between slf4j API and log4j implementation closes #83652 --- docs/changelog/83661.yaml | 6 + modules/repository-azure/build.gradle | 10 +- .../licenses/log4j-slf4j-impl-2.17.1.jar.sha1 | 1 + .../licenses/log4j-slf4j-impl-LICENSE.txt | 202 ++++++++++++++++++ .../licenses/log4j-slf4j-impl-NOTICE.txt | 8 + 5 files changed, 222 insertions(+), 5 deletions(-) create mode 100644 docs/changelog/83661.yaml create mode 100644 modules/repository-azure/licenses/log4j-slf4j-impl-2.17.1.jar.sha1 create mode 100644 modules/repository-azure/licenses/log4j-slf4j-impl-LICENSE.txt create mode 100644 modules/repository-azure/licenses/log4j-slf4j-impl-NOTICE.txt diff --git a/docs/changelog/83661.yaml b/docs/changelog/83661.yaml new file mode 100644 index 0000000000000..e062bdc805e3f --- /dev/null +++ b/docs/changelog/83661.yaml @@ -0,0 +1,6 @@ +pr: 83661 +summary: Add log4j-slf4j-impl to repository-azure +area: "Packaging" +type: bug +issues: + - 83652 diff --git a/modules/repository-azure/build.gradle b/modules/repository-azure/build.gradle index 8da8646fdcc83..ee7a0b39b0696 100644 --- a/modules/repository-azure/build.gradle +++ b/modules/repository-azure/build.gradle @@ -82,7 +82,9 @@ dependencies { api "io.projectreactor:reactor-core:${versions.reactorCore}" api "org.reactivestreams:reactive-streams:${versions.reactiveStreams}" - api "org.slf4j:slf4j-api:${versions.slf4j}" + runtimeOnly("org.slf4j:slf4j-api:${versions.slf4j}") + runtimeOnly("org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}") + testImplementation project(':test:fixtures:azure-fixture') } @@ -268,10 +270,8 @@ tasks.named("thirdPartyAudit").configure { // from com.ctc.wstx.shaded.msv_core.driver.textui.Driver (woodstox-core) 'com.sun.org.apache.xml.internal.resolver.Catalog', 'com.sun.org.apache.xml.internal.resolver.tools.CatalogResolver', - - 'org.slf4j.impl.StaticLoggerBinder', - 'org.slf4j.impl.StaticMDCBinder', - 'org.slf4j.impl.StaticMarkerBinder', + // [missing classes] SLF4j includes an optional class that depends on an extension class. see Log4jLogger#createConverter + 'org.slf4j.ext.EventData' ) ignoreViolations( diff --git a/modules/repository-azure/licenses/log4j-slf4j-impl-2.17.1.jar.sha1 b/modules/repository-azure/licenses/log4j-slf4j-impl-2.17.1.jar.sha1 new file mode 100644 index 0000000000000..894ed8d886c3f --- /dev/null +++ b/modules/repository-azure/licenses/log4j-slf4j-impl-2.17.1.jar.sha1 @@ -0,0 +1 @@ +84692d456bcce689355d33d68167875e486954dd \ No newline at end of file diff --git a/modules/repository-azure/licenses/log4j-slf4j-impl-LICENSE.txt b/modules/repository-azure/licenses/log4j-slf4j-impl-LICENSE.txt new file mode 100644 index 0000000000000..6279e5206de13 --- /dev/null +++ b/modules/repository-azure/licenses/log4j-slf4j-impl-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 1999-2005 The Apache Software Foundation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/modules/repository-azure/licenses/log4j-slf4j-impl-NOTICE.txt b/modules/repository-azure/licenses/log4j-slf4j-impl-NOTICE.txt new file mode 100644 index 0000000000000..ea99ef1d4726b --- /dev/null +++ b/modules/repository-azure/licenses/log4j-slf4j-impl-NOTICE.txt @@ -0,0 +1,8 @@ + +Apache Log4j SLF4J Binding +Copyright 1999-2017 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + + From 2982591e6d8ee20fa5d7d78da1d0b456bbe056a4 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Wed, 9 Feb 2022 12:42:43 +0100 Subject: [PATCH 002/167] Add Health Indicator Plugin (#83205) This change introduces initial model for health indicators as well as extension point that allows to create health indicators outside of server module. Co-authored-by: Tanguy Leroux --- docs/changelog/83205.yaml | 5 + .../rest-api-spec/test/health/10_basic.yml | 1 - ...anceHasMasterHealthIndicatorServiceIT.java | 77 ++++++++ .../health/GetHealthActionIT.java | 174 +++++++++++------- ...stanceHasMasterHealthIndicatorService.java | 73 ++++++++ .../elasticsearch/health/GetHealthAction.java | 53 ++++-- .../health/HealthComponentResult.java | 43 +---- .../health/HealthIndicatorService.java | 2 +- .../elasticsearch/health/HealthService.java | 54 ++++++ .../health/ServerHealthComponents.java | 7 + .../controller/ClusterCoordination.java | 67 ------- .../java/org/elasticsearch/node/Node.java | 15 ++ .../elasticsearch/plugins/HealthPlugin.java | 21 +++ .../org/elasticsearch/plugins/Plugin.java | 1 + .../health/HealthComponentResultTests.java | 37 ---- .../health/HealthServiceTests.java | 58 ++++++ .../elasticsearch/test/ESIntegTestCase.java | 5 + .../xpack/ilm/IndexLifecycle.java | 15 +- 18 files changed, 489 insertions(+), 219 deletions(-) create mode 100644 docs/changelog/83205.yaml create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/InstanceHasMasterHealthIndicatorServiceIT.java create mode 100644 server/src/main/java/org/elasticsearch/cluster/coordination/InstanceHasMasterHealthIndicatorService.java create mode 100644 server/src/main/java/org/elasticsearch/health/HealthService.java delete mode 100644 server/src/main/java/org/elasticsearch/health/components/controller/ClusterCoordination.java create mode 100644 server/src/main/java/org/elasticsearch/plugins/HealthPlugin.java delete mode 100644 server/src/test/java/org/elasticsearch/health/HealthComponentResultTests.java create mode 100644 server/src/test/java/org/elasticsearch/health/HealthServiceTests.java diff --git a/docs/changelog/83205.yaml b/docs/changelog/83205.yaml new file mode 100644 index 0000000000000..5376a79ad9e19 --- /dev/null +++ b/docs/changelog/83205.yaml @@ -0,0 +1,5 @@ +pr: 83205 +summary: Add Health Indicator Plugin +area: Health +type: feature +issues: [] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml index bb1b6db2f2d4c..7cad7c2471710 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml @@ -17,4 +17,3 @@ - is_true: components.cluster_coordination.indicators.instance_has_master.details.coordinating_node.name - is_true: components.cluster_coordination.indicators.instance_has_master.details.master_node.node_id - is_true: components.cluster_coordination.indicators.instance_has_master.details.master_node.name - - match: { components.snapshots.status: "GREEN" } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/InstanceHasMasterHealthIndicatorServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/InstanceHasMasterHealthIndicatorServiceIT.java new file mode 100644 index 0000000000000..c99b090e958f2 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/coordination/InstanceHasMasterHealthIndicatorServiceIT.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.coordination; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.health.GetHealthAction; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.disruption.NetworkDisruption; +import org.elasticsearch.test.transport.MockTransportService; + +import java.util.Collection; +import java.util.List; +import java.util.Set; + +import static org.elasticsearch.cluster.coordination.InstanceHasMasterHealthIndicatorService.NAME; +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.RED; +import static org.elasticsearch.health.ServerHealthComponents.CLUSTER_COORDINATION; +import static org.hamcrest.Matchers.equalTo; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) +public class InstanceHasMasterHealthIndicatorServiceIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return List.of(MockTransportService.TestPlugin.class); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(super.nodeSettings(nodeOrdinal, otherSettings)) + .put(NoMasterBlockService.NO_MASTER_BLOCK_SETTING.getKey(), "all") + .build(); + } + + public void testGetHealthWhenMasterIsElected() throws Exception { + var client = client(); + + var response = client.execute(GetHealthAction.INSTANCE, new GetHealthAction.Request()).get(); + + assertThat(response.findComponent(CLUSTER_COORDINATION).findIndicator(NAME).status(), equalTo(GREEN)); + } + + public void testGetHealthWhenNoMaster() throws Exception { + var client = internalCluster().coordOnlyNodeClient(); + + var disruptionScheme = new NetworkDisruption( + new NetworkDisruption.IsolateAllNodes(Set.of(internalCluster().getNodeNames())), + NetworkDisruption.DISCONNECT + ); + + internalCluster().setDisruptionScheme(disruptionScheme); + disruptionScheme.startDisrupting(); + + try { + assertBusy(() -> { + ClusterState state = client.admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); + assertTrue(state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID)); + + var response = client.execute(GetHealthAction.INSTANCE, new GetHealthAction.Request()).get(); + + assertThat(response.findComponent(CLUSTER_COORDINATION).findIndicator(NAME).status(), equalTo(RED)); + }); + } finally { + internalCluster().clearDisruptionScheme(true); + } + } +} diff --git a/server/src/internalClusterTest/java/org/elasticsearch/health/GetHealthActionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/health/GetHealthActionIT.java index a4426ef074350..2eadb96084c5a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/health/GetHealthActionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/health/GetHealthActionIT.java @@ -8,93 +8,143 @@ package org.elasticsearch.health; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.coordination.NoMasterBlockService; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.health.components.controller.ClusterCoordination; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.plugins.HealthPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.disruption.NetworkDisruption; -import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xcontent.NamedXContentRegistry; -import java.util.Arrays; import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; +import java.util.List; +import java.util.function.Supplier; -@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) +import static org.elasticsearch.common.util.CollectionUtils.appendToCopy; +import static org.hamcrest.Matchers.equalTo; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) public class GetHealthActionIT extends ESIntegTestCase { @Override protected Collection> nodePlugins() { - return Collections.singletonList(MockTransportService.TestPlugin.class); + return appendToCopy(super.nodePlugins(), TestHealthPlugin.class); } - @Override - protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - return Settings.builder() - .put(super.nodeSettings(nodeOrdinal, otherSettings)) - .put(NoMasterBlockService.NO_MASTER_BLOCK_SETTING.getKey(), "all") - .build(); - } + public static final Setting TEST_HEALTH_STATUS = new Setting<>( + "test.health.status", + "GREEN", + HealthStatus::valueOf, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); - public void testGetHealth() throws Exception { - GetHealthAction.Response response = client().execute(GetHealthAction.INSTANCE, new GetHealthAction.Request()).get(); - assertEquals(cluster().getClusterName(), response.getClusterName().value()); - assertEquals(HealthStatus.GREEN, response.getStatus()); + public static final class TestHealthPlugin extends Plugin implements HealthPlugin { + + private final SetOnce healthIndicatorService = new SetOnce<>(); - assertEquals(2, response.getComponents().size()); + @Override + public List> getSettings() { + return List.of(TEST_HEALTH_STATUS); + } - for (HealthComponentResult component : response.getComponents()) { - assertEquals(HealthStatus.GREEN, component.status()); + @Override + public Collection createComponents( + Client client, + ClusterService clusterService, + ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, + ScriptService scriptService, + NamedXContentRegistry xContentRegistry, + Environment environment, + NodeEnvironment nodeEnvironment, + NamedWriteableRegistry namedWriteableRegistry, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier repositoriesServiceSupplier + ) { + var service = new FixedStatusHealthIndicatorService(clusterService); + healthIndicatorService.set(service); + return List.of(service); } - HealthComponentResult controller = response.getComponents() - .stream() - .filter(c -> c.name().equals("cluster_coordination")) - .findAny() - .orElseThrow(); - assertEquals(1, controller.indicators().size()); - HealthIndicatorResult nodeDoesNotHaveMaster = controller.indicators().get(ClusterCoordination.INSTANCE_HAS_MASTER_NAME); - assertEquals(ClusterCoordination.INSTANCE_HAS_MASTER_NAME, nodeDoesNotHaveMaster.name()); - assertEquals(HealthStatus.GREEN, nodeDoesNotHaveMaster.status()); - assertEquals(ClusterCoordination.INSTANCE_HAS_MASTER_GREEN_SUMMARY, nodeDoesNotHaveMaster.summary()); + @Override + public Collection getHealthIndicatorServices() { + return List.of(healthIndicatorService.get()); + } } - public void testGetHealthInstanceNoMaster() throws Exception { - // builds the coordinating-only client before disrupting all nodes - final Client client = internalCluster().coordOnlyNodeClient(); + /** + * This indicator could be used to pre-define health of the cluster with {@code TEST_HEALTH_STATUS} property + * and return it via health API. + */ + public static final class FixedStatusHealthIndicatorService implements HealthIndicatorService { + + private final ClusterService clusterService; + + public FixedStatusHealthIndicatorService(ClusterService clusterService) { + this.clusterService = clusterService; + } + + @Override + public String name() { + return "test_indicator"; + } + + @Override + public String component() { + return "test_component"; + } + + @Override + public HealthIndicatorResult calculate() { + var status = clusterService.getClusterSettings().get(TEST_HEALTH_STATUS); + return createIndicator(status, "Health is set to [" + status + "] by test plugin", HealthIndicatorDetails.EMPTY); + } + } - final NetworkDisruption disruptionScheme = new NetworkDisruption( - new NetworkDisruption.IsolateAllNodes(new HashSet<>(Arrays.asList(internalCluster().getNodeNames()))), - NetworkDisruption.DISCONNECT - ); + public void testGetHealth() throws Exception { - internalCluster().setDisruptionScheme(disruptionScheme); - disruptionScheme.startDisrupting(); + var client = client(); + var status = randomFrom(HealthStatus.values()); try { - assertBusy(() -> { - ClusterState state = client.admin().cluster().prepareState().setLocal(true).execute().actionGet().getState(); - assertTrue(state.blocks().hasGlobalBlockWithId(NoMasterBlockService.NO_MASTER_BLOCK_ID)); - - GetHealthAction.Response response = client.execute(GetHealthAction.INSTANCE, new GetHealthAction.Request()).get(); - assertEquals(HealthStatus.RED, response.getStatus()); - assertEquals(2, response.getComponents().size()); - HealthComponentResult controller = response.getComponents() - .stream() - .filter(c -> c.name().equals("cluster_coordination")) - .findAny() - .orElseThrow(); - assertEquals(1, controller.indicators().size()); - HealthIndicatorResult instanceHasMaster = controller.indicators().get(ClusterCoordination.INSTANCE_HAS_MASTER_NAME); - assertEquals(ClusterCoordination.INSTANCE_HAS_MASTER_NAME, instanceHasMaster.name()); - assertEquals(HealthStatus.RED, instanceHasMaster.status()); - assertEquals(ClusterCoordination.INSTANCE_HAS_MASTER_RED_SUMMARY, instanceHasMaster.summary()); - }); + updateClusterSettings(Settings.builder().put(TEST_HEALTH_STATUS.getKey(), status)); + + var response = client.execute(GetHealthAction.INSTANCE, new GetHealthAction.Request()).get(); + + assertThat(response.getStatus(), equalTo(status)); + assertThat(response.getClusterName(), equalTo(new ClusterName(cluster().getClusterName()))); + assertThat( + response.findComponent("test_component"), + equalTo( + new HealthComponentResult( + "test_component", + status, + List.of( + new HealthIndicatorResult( + "test_indicator", + "test_component", + status, + "Health is set to [" + status + "] by test plugin", + HealthIndicatorDetails.EMPTY + ) + ) + ) + ) + ); } finally { - internalCluster().clearDisruptionScheme(true); + updateClusterSettings(Settings.builder().putNull(TEST_HEALTH_STATUS.getKey())); } } } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/InstanceHasMasterHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/cluster/coordination/InstanceHasMasterHealthIndicatorService.java new file mode 100644 index 0000000000000..5bcf24992cbed --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/InstanceHasMasterHealthIndicatorService.java @@ -0,0 +1,73 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.coordination; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.HealthIndicatorService; +import org.elasticsearch.health.HealthStatus; + +import static org.elasticsearch.health.ServerHealthComponents.CLUSTER_COORDINATION; + +public class InstanceHasMasterHealthIndicatorService implements HealthIndicatorService { + + public static final String NAME = "instance_has_master"; + + private static final String INSTANCE_HAS_MASTER_GREEN_SUMMARY = "Health coordinating instance has a master node."; + private static final String INSTANCE_HAS_MASTER_RED_SUMMARY = "Health coordinating instance does not have a master node."; + + private final ClusterService clusterService; + + public InstanceHasMasterHealthIndicatorService(ClusterService clusterService) { + this.clusterService = clusterService; + } + + @Override + public String name() { + return NAME; + } + + @Override + public String component() { + return CLUSTER_COORDINATION; + } + + @Override + public HealthIndicatorResult calculate() { + + DiscoveryNode coordinatingNode = clusterService.localNode(); + ClusterState clusterState = clusterService.state(); + DiscoveryNodes nodes = clusterState.nodes(); + DiscoveryNode masterNode = nodes.getMasterNode(); + + HealthStatus instanceHasMasterStatus = masterNode == null ? HealthStatus.RED : HealthStatus.GREEN; + String instanceHasMasterSummary = masterNode == null ? INSTANCE_HAS_MASTER_RED_SUMMARY : INSTANCE_HAS_MASTER_GREEN_SUMMARY; + + return createIndicator(instanceHasMasterStatus, instanceHasMasterSummary, (builder, params) -> { + builder.startObject(); + builder.object("coordinating_node", xContentBuilder -> { + builder.field("node_id", coordinatingNode.getId()); + builder.field("name", coordinatingNode.getName()); + }); + builder.object("master_node", xContentBuilder -> { + if (masterNode != null) { + builder.field("node_id", masterNode.getId()); + builder.field("name", masterNode.getName()); + } else { + builder.nullField("node_id"); + builder.nullField("name"); + } + }); + return builder.endObject(); + }); + } +} diff --git a/server/src/main/java/org/elasticsearch/health/GetHealthAction.java b/server/src/main/java/org/elasticsearch/health/GetHealthAction.java index 7fc0c97579540..309643656802b 100644 --- a/server/src/main/java/org/elasticsearch/health/GetHealthAction.java +++ b/server/src/main/java/org/elasticsearch/health/GetHealthAction.java @@ -15,12 +15,10 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.health.components.controller.ClusterCoordination; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.ToXContent; @@ -28,9 +26,9 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; import java.util.List; +import java.util.NoSuchElementException; +import java.util.Objects; public class GetHealthAction extends ActionType { @@ -69,6 +67,13 @@ public List getComponents() { return components; } + public HealthComponentResult findComponent(String name) { + return components.stream() + .filter(c -> Objects.equals(c.name(), name)) + .findFirst() + .orElseThrow(() -> new NoSuchElementException("Component [" + name + "] is not found")); + } + @Override public void writeTo(StreamOutput out) throws IOException { throw new AssertionError("GetHealthAction should not be sent over the wire."); @@ -87,6 +92,28 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par builder.endObject(); return builder.endObject(); } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Response response = (Response) o; + return clusterName.equals(response.clusterName) && status == response.status && components.equals(response.components); + } + + @Override + public int hashCode() { + return Objects.hash(clusterName, status, components); + } + + @Override + public String toString() { + return "Response{clusterName=" + clusterName + ", status=" + status + ", components=" + components + '}'; + } } public static class Request extends ActionRequest { @@ -100,27 +127,23 @@ public ActionRequestValidationException validate() { public static class TransportAction extends org.elasticsearch.action.support.TransportAction { private final ClusterService clusterService; + private final HealthService healthService; @Inject public TransportAction( - final ActionFilters actionFilters, - final TransportService transportService, - final ClusterService clusterService + ActionFilters actionFilters, + TransportService transportService, + ClusterService clusterService, + HealthService healthService ) { super(NAME, actionFilters, transportService.getTaskManager()); this.clusterService = clusterService; + this.healthService = healthService; } @Override protected void doExecute(Task task, Request request, ActionListener listener) { - final ClusterState clusterState = clusterService.state(); - final HealthComponentResult controller = ClusterCoordination.createClusterCoordinationComponent( - clusterService.localNode(), - clusterState - ); - final HealthComponentResult snapshots = new HealthComponentResult("snapshots", HealthStatus.GREEN, Collections.emptyMap()); - final ClusterName clusterName = clusterService.getClusterName(); - listener.onResponse(new Response(clusterName, Arrays.asList(controller, snapshots))); + listener.onResponse(new Response(clusterService.getClusterName(), healthService.getHealth())); } } } diff --git a/server/src/main/java/org/elasticsearch/health/HealthComponentResult.java b/server/src/main/java/org/elasticsearch/health/HealthComponentResult.java index 645b53960c965..625ae6fb0a926 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthComponentResult.java +++ b/server/src/main/java/org/elasticsearch/health/HealthComponentResult.java @@ -12,41 +12,17 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; -import java.util.Collection; import java.util.List; -import java.util.Map; -import java.util.TreeMap; -import java.util.stream.Collectors; +import java.util.NoSuchElementException; +import java.util.Objects; -import static java.util.stream.Collectors.collectingAndThen; -import static java.util.stream.Collectors.groupingBy; -import static java.util.stream.Collectors.toList; +public record HealthComponentResult(String name, HealthStatus status, List indicators) implements ToXContentObject { -public record HealthComponentResult(String name, HealthStatus status, Map indicators) - implements - ToXContentObject { - - public static Collection createComponentsFromIndicators(Collection indicators) { + public HealthIndicatorResult findIndicator(String name) { return indicators.stream() - .collect( - groupingBy( - HealthIndicatorResult::component, - TreeMap::new, - collectingAndThen(toList(), HealthComponentResult::createComponentFromIndicators) - ) - ) - .values(); - } - - private static HealthComponentResult createComponentFromIndicators(List indicators) { - assert indicators.size() > 0 : "Component should not be non empty"; - assert indicators.stream().map(HealthIndicatorResult::component).distinct().count() == 1L - : "Should not mix indicators from different components"; - return new HealthComponentResult( - indicators.get(0).component(), - HealthStatus.merge(indicators.stream().map(HealthIndicatorResult::status)), - indicators.stream().collect(Collectors.toMap(HealthIndicatorResult::name, i -> i)) - ); + .filter(i -> Objects.equals(i.name(), name)) + .findFirst() + .orElseThrow(() -> new NoSuchElementException("Indicator [" + name + "] is not found")); } @Override @@ -54,9 +30,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(); builder.field("status", status); builder.startObject("indicators"); - for (Map.Entry indicator : indicators.entrySet()) { - builder.field(indicator.getKey()); - indicator.getValue().toXContent(builder, params); + for (HealthIndicatorResult indicator : indicators) { + builder.field(indicator.name(), indicator, params); } builder.endObject(); return builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/health/HealthIndicatorService.java b/server/src/main/java/org/elasticsearch/health/HealthIndicatorService.java index 44a979efaaff1..e15b0e60cdf09 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/health/HealthIndicatorService.java @@ -9,7 +9,7 @@ package org.elasticsearch.health; /** - * This is a service interface used to report health indicators from the different plugins. + * This is a service interface used to calculate health indicator from the different modules or plugins. */ public interface HealthIndicatorService { diff --git a/server/src/main/java/org/elasticsearch/health/HealthService.java b/server/src/main/java/org/elasticsearch/health/HealthService.java new file mode 100644 index 0000000000000..5a82929f69984 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/health/HealthService.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health; + +import java.util.List; +import java.util.TreeMap; + +import static java.util.stream.Collectors.collectingAndThen; +import static java.util.stream.Collectors.groupingBy; +import static java.util.stream.Collectors.toList; + +/** + * This service collects health indicators from all modules and plugins of elasticsearch + */ +public class HealthService { + + private final List healthIndicatorServices; + + public HealthService(List healthIndicatorServices) { + this.healthIndicatorServices = healthIndicatorServices; + } + + public List getHealth() { + return List.copyOf( + healthIndicatorServices.stream() + .map(HealthIndicatorService::calculate) + .collect( + groupingBy( + HealthIndicatorResult::component, + TreeMap::new, + collectingAndThen(toList(), HealthService::createComponentFromIndicators) + ) + ) + .values() + ); + } + + private static HealthComponentResult createComponentFromIndicators(List indicators) { + assert indicators.size() > 0 : "Component should not be non empty"; + assert indicators.stream().map(HealthIndicatorResult::component).distinct().count() == 1L + : "Should not mix indicators from different components"; + return new HealthComponentResult( + indicators.get(0).component(), + HealthStatus.merge(indicators.stream().map(HealthIndicatorResult::status)), + indicators + ); + } +} diff --git a/server/src/main/java/org/elasticsearch/health/ServerHealthComponents.java b/server/src/main/java/org/elasticsearch/health/ServerHealthComponents.java index c07273c731352..1a4d2458263dd 100644 --- a/server/src/main/java/org/elasticsearch/health/ServerHealthComponents.java +++ b/server/src/main/java/org/elasticsearch/health/ServerHealthComponents.java @@ -8,7 +8,14 @@ package org.elasticsearch.health; +/** + * This class defines common component names for the health indicators. + */ public class ServerHealthComponents { + + public static final String CLUSTER_COORDINATION = "cluster_coordination"; public static final String DATA = "data"; public static final String SNAPSHOT = "snapshot"; + + private ServerHealthComponents() {} } diff --git a/server/src/main/java/org/elasticsearch/health/components/controller/ClusterCoordination.java b/server/src/main/java/org/elasticsearch/health/components/controller/ClusterCoordination.java deleted file mode 100644 index fd3988f38b5de..0000000000000 --- a/server/src/main/java/org/elasticsearch/health/components/controller/ClusterCoordination.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.health.components.controller; - -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.health.HealthComponentResult; -import org.elasticsearch.health.HealthIndicatorResult; -import org.elasticsearch.health.HealthStatus; - -import java.util.Collections; - -public final class ClusterCoordination { - - public static final String NAME = "cluster_coordination"; - - public static final String INSTANCE_HAS_MASTER_NAME = "instance_has_master"; - public static final String INSTANCE_HAS_MASTER_GREEN_SUMMARY = "Health coordinating instance has a master node."; - public static final String INSTANCE_HAS_MASTER_RED_SUMMARY = "Health coordinating instance does not have a master node."; - - private ClusterCoordination() {} - - public static HealthComponentResult createClusterCoordinationComponent( - final DiscoveryNode coordinatingNode, - final ClusterState clusterState - ) { - final DiscoveryNodes nodes = clusterState.nodes(); - final DiscoveryNode masterNode = nodes.getMasterNode(); - - HealthStatus instanceHasMasterStatus = masterNode == null ? HealthStatus.RED : HealthStatus.GREEN; - String instanceHasMasterSummary = masterNode == null ? INSTANCE_HAS_MASTER_RED_SUMMARY : INSTANCE_HAS_MASTER_GREEN_SUMMARY; - HealthIndicatorResult instanceHasMaster = new HealthIndicatorResult( - INSTANCE_HAS_MASTER_NAME, - NAME, - instanceHasMasterStatus, - instanceHasMasterSummary, - (builder, params) -> { - builder.startObject(); - builder.object("coordinating_node", xContentBuilder -> { - builder.field("node_id", coordinatingNode.getId()); - builder.field("name", coordinatingNode.getName()); - }); - builder.object("master_node", xContentBuilder -> { - if (masterNode != null) { - builder.field("node_id", masterNode.getId()); - builder.field("name", masterNode.getName()); - } else { - builder.nullField("node_id"); - builder.nullField("name"); - } - }); - return builder.endObject(); - } - ); - - // Only a single indicator currently so it determines the status - final HealthStatus status = instanceHasMaster.status(); - return new HealthComponentResult(NAME, status, Collections.singletonMap(INSTANCE_HAS_MASTER_NAME, instanceHasMaster)); - } -} diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index f20e0da799bfd..3c4d699160799 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -40,6 +40,7 @@ import org.elasticsearch.cluster.NodeConnectionsService; import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.coordination.Coordinator; +import org.elasticsearch.cluster.coordination.InstanceHasMasterHealthIndicatorService; import org.elasticsearch.cluster.desirednodes.DesiredNodesSettingsValidator; import org.elasticsearch.cluster.metadata.IndexMetadataVerifier; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; @@ -95,6 +96,8 @@ import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.gateway.MetaStateService; import org.elasticsearch.gateway.PersistedClusterStateService; +import org.elasticsearch.health.HealthIndicatorService; +import org.elasticsearch.health.HealthService; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.IndexSettingProviders; import org.elasticsearch.index.IndexSettings; @@ -135,6 +138,7 @@ import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.EnginePlugin; +import org.elasticsearch.plugins.HealthPlugin; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.IngestPlugin; import org.elasticsearch.plugins.MapperPlugin; @@ -209,6 +213,7 @@ import javax.net.ssl.SNIHostName; import static java.util.stream.Collectors.toList; +import static org.elasticsearch.common.util.CollectionUtils.concatLists; import static org.elasticsearch.core.Types.forciblyCast; /** @@ -895,6 +900,15 @@ protected Node( clusterService.getClusterSettings() ); + List serverHealthIndicatorServices = List.of( + new InstanceHasMasterHealthIndicatorService(clusterService) + ); + List pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) + .stream() + .flatMap(plugin -> plugin.getHealthIndicatorServices().stream()) + .toList(); + HealthService healthService = new HealthService(concatLists(serverHealthIndicatorServices, pluginHealthIndicatorServices)); + modules.add(b -> { b.bind(Node.class).toInstance(this); b.bind(NodeService.class).toInstance(nodeService); @@ -975,6 +989,7 @@ protected Node( b.bind(ExecutorSelector.class).toInstance(executorSelector); b.bind(IndexSettingProviders.class).toInstance(indexSettingProviders); b.bind(DesiredNodesSettingsValidator.class).toInstance(desiredNodesSettingsValidator); + b.bind(HealthService.class).toInstance(healthService); }); injector = modules.createInjector(); diff --git a/server/src/main/java/org/elasticsearch/plugins/HealthPlugin.java b/server/src/main/java/org/elasticsearch/plugins/HealthPlugin.java new file mode 100644 index 0000000000000..c1b035ad50e0a --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/HealthPlugin.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.plugins; + +import org.elasticsearch.health.HealthIndicatorService; + +import java.util.Collection; + +/** + * An additional extension point for {@link Plugin}s that extends Elasticsearch's health indicators functionality. + */ +public interface HealthPlugin { + + Collection getHealthIndicatorServices(); +} diff --git a/server/src/main/java/org/elasticsearch/plugins/Plugin.java b/server/src/main/java/org/elasticsearch/plugins/Plugin.java index e67809e2ece93..77f1c36ac1368 100644 --- a/server/src/main/java/org/elasticsearch/plugins/Plugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/Plugin.java @@ -48,6 +48,7 @@ *
  • {@link AnalysisPlugin} *
  • {@link ClusterPlugin} *
  • {@link DiscoveryPlugin} + *
  • {@link HealthPlugin} *
  • {@link IngestPlugin} *
  • {@link MapperPlugin} *
  • {@link NetworkPlugin} diff --git a/server/src/test/java/org/elasticsearch/health/HealthComponentResultTests.java b/server/src/test/java/org/elasticsearch/health/HealthComponentResultTests.java deleted file mode 100644 index 3f6f271530ada..0000000000000 --- a/server/src/test/java/org/elasticsearch/health/HealthComponentResultTests.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.health; - -import org.elasticsearch.test.ESTestCase; - -public class HealthComponentResultTests extends ESTestCase { - - public void testGroupIndicators() { - - // var indicator1 = new HealthIndicatorResult("indicator1", "component1", GREEN, null, null); - // var indicator2 = new HealthIndicatorResult("indicator2", "component1", YELLOW, null, null); - // var indicator3 = new HealthIndicatorResult("indicator3", "component2", GREEN, null, null); - // - // var components = HealthComponentResult.createComponentsFromIndicators(List.of(indicator1, indicator2, indicator3)); - // - // assertThat( - // components, - // anyOf( - // hasItems( - // new HealthComponentResult("component1", YELLOW, List.of(indicator2, indicator1)), - // new HealthComponentResult("component2", GREEN, List.of(indicator3)) - // ), - // hasItems( - // new HealthComponentResult("component1", YELLOW, List.of(indicator1, indicator2)), - // new HealthComponentResult("component2", GREEN, List.of(indicator3)) - // ) - // ) - // ); - } -} diff --git a/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java b/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java new file mode 100644 index 0000000000000..3edb276d0fe77 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/health/HealthServiceTests.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.health; + +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.YELLOW; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.hasItems; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class HealthServiceTests extends ESTestCase { + + public void testShouldReturnGroupedIndicators() { + + var indicator1 = new HealthIndicatorResult("indicator1", "component1", GREEN, null, null); + var indicator2 = new HealthIndicatorResult("indicator2", "component1", YELLOW, null, null); + var indicator3 = new HealthIndicatorResult("indicator3", "component2", GREEN, null, null); + + var service = new HealthService( + List.of( + createMockHealthIndicatorService(indicator1), + createMockHealthIndicatorService(indicator2), + createMockHealthIndicatorService(indicator3) + ) + ); + + assertThat( + service.getHealth(), + anyOf( + hasItems( + new HealthComponentResult("component1", YELLOW, List.of(indicator2, indicator1)), + new HealthComponentResult("component2", GREEN, List.of(indicator3)) + ), + hasItems( + new HealthComponentResult("component1", YELLOW, List.of(indicator1, indicator2)), + new HealthComponentResult("component2", GREEN, List.of(indicator3)) + ) + ) + ); + } + + private static HealthIndicatorService createMockHealthIndicatorService(HealthIndicatorResult result) { + var healthIndicatorService = mock(HealthIndicatorService.class); + when(healthIndicatorService.calculate()).thenReturn(result); + return healthIndicatorService; + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index fbee26415c388..a7765ff11c8fa 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -1627,6 +1627,11 @@ public static void setClusterReadOnly(boolean value) { assertAcked(client().admin().cluster().prepareUpdateSettings().setPersistentSettings(settings).get()); } + /** Sets cluster persistent settings **/ + public void updateClusterSettings(Settings.Builder persistentSettings) { + assertAcked(client().admin().cluster().prepareUpdateSettings().setPersistentSettings(persistentSettings).get()); + } + private static CountDownLatch newLatch(List latches) { CountDownLatch l = new CountDownLatch(1); latches.add(l); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java index b9b93e37cf67f..b2d943207f16c 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycle.java @@ -26,9 +26,11 @@ import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; +import org.elasticsearch.health.HealthIndicatorService; import org.elasticsearch.index.IndexModule; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.HealthPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.repositories.RepositoriesService; import org.elasticsearch.rest.RestController; @@ -107,6 +109,7 @@ import org.elasticsearch.xpack.ilm.history.ILMHistoryTemplateRegistry; import org.elasticsearch.xpack.slm.SLMInfoTransportAction; import org.elasticsearch.xpack.slm.SLMUsageTransportAction; +import org.elasticsearch.xpack.slm.SlmHealthIndicatorService; import org.elasticsearch.xpack.slm.SnapshotLifecycleService; import org.elasticsearch.xpack.slm.SnapshotLifecycleTask; import org.elasticsearch.xpack.slm.SnapshotRetentionService; @@ -144,7 +147,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.INDEX_LIFECYCLE_ORIGIN; -public class IndexLifecycle extends Plugin implements ActionPlugin { +public class IndexLifecycle extends Plugin implements ActionPlugin, HealthPlugin { public static final List NAMED_X_CONTENT_ENTRIES = xContentEntries(); @@ -153,6 +156,8 @@ public class IndexLifecycle extends Plugin implements ActionPlugin { private final SetOnce snapshotLifecycleService = new SetOnce<>(); private final SetOnce snapshotRetentionService = new SetOnce<>(); private final SetOnce snapshotHistoryStore = new SetOnce<>(); + private final SetOnce ilmHealthIndicatorService = new SetOnce<>(); + private final SetOnce slmHealthIndicatorService = new SetOnce<>(); private final Settings settings; public IndexLifecycle(Settings settings) { @@ -264,7 +269,8 @@ public Collection createComponents( ); snapshotRetentionService.get().init(clusterService); components.addAll(Arrays.asList(snapshotLifecycleService.get(), snapshotHistoryStore.get(), snapshotRetentionService.get())); - + ilmHealthIndicatorService.set(new IlmHealthIndicatorService(clusterService)); + slmHealthIndicatorService.set(new SlmHealthIndicatorService(clusterService)); return components; } @@ -414,6 +420,11 @@ public List getRestHandlers( return actions; } + @Override + public Collection getHealthIndicatorServices() { + return List.of(ilmHealthIndicatorService.get(), slmHealthIndicatorService.get()); + } + @Override public void onIndexModule(IndexModule indexModule) { assert indexLifecycleInitialisationService.get() != null; From 0deb0f6cd8cc9e9b025446829d4146341506c424 Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Wed, 9 Feb 2022 12:22:54 +0000 Subject: [PATCH 003/167] Add Alma Linux 8 ARM to CI (#83713) Since CentOS 8 on ARM is defunct, add Alma Linux 8 on ARM into the CI config. --- .../elastic+elasticsearch+multijob+platform-support-arm.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.ci/jobs.t/elastic+elasticsearch+multijob+platform-support-arm.yml b/.ci/jobs.t/elastic+elasticsearch+multijob+platform-support-arm.yml index 2753cae719faa..a45989ae00fbf 100644 --- a/.ci/jobs.t/elastic+elasticsearch+multijob+platform-support-arm.yml +++ b/.ci/jobs.t/elastic+elasticsearch+multijob+platform-support-arm.yml @@ -14,6 +14,7 @@ type: label-expression name: os values: + - "almalinux-8-aarch64&&immutable" - "ubuntu-1804-aarch64&&immutable" builders: - inject: From b645b4df65f97c2ef0b07d59d5d08ca8fb11b02c Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 9 Feb 2022 13:52:29 +0100 Subject: [PATCH 004/167] Avoid HashMap Resizing in IngestDocument Constructor (#83714) This map is getting resized quite a bit in some benchmarks, lets make this a little cheaper by presizing it correctly. --- .../src/main/java/org/elasticsearch/ingest/IngestDocument.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index bc2505b88323e..61c9921d74b7d 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -55,7 +55,8 @@ public final class IngestDocument { private final Set executedPipelines = new LinkedHashSet<>(); public IngestDocument(String index, String id, String routing, Long version, VersionType versionType, Map source) { - this.sourceAndMetadata = new HashMap<>(); + // source + at max 5 extra fields + this.sourceAndMetadata = Maps.newMapWithExpectedSize(source.size() + 5); this.sourceAndMetadata.putAll(source); this.sourceAndMetadata.put(Metadata.INDEX.getFieldName(), index); this.sourceAndMetadata.put(Metadata.ID.getFieldName(), id); From 07fa7a5b194f57c50f7639f5205b528502a058fb Mon Sep 17 00:00:00 2001 From: Adam Locke Date: Wed, 9 Feb 2022 09:00:53 -0500 Subject: [PATCH 005/167] [DOCS] Remove beta admon for security-on-by-default (#83600) Removes the `beta::` admonition for the security-on-by-default feature page. --- x-pack/docs/en/security/configuring-stack-security.asciidoc | 2 -- 1 file changed, 2 deletions(-) diff --git a/x-pack/docs/en/security/configuring-stack-security.asciidoc b/x-pack/docs/en/security/configuring-stack-security.asciidoc index 4fb41a7f4d8a4..8c1623a079e70 100644 --- a/x-pack/docs/en/security/configuring-stack-security.asciidoc +++ b/x-pack/docs/en/security/configuring-stack-security.asciidoc @@ -1,8 +1,6 @@ [[configuring-stack-security]] == Start the Elastic Stack with security enabled -beta::[This functionality is in beta and is subject to change. The design and code is less mature than official GA features and is being provided as-is with no warranties. Beta features are not subject to the support SLA of official GA features.] - When you start {es} for the first time, the following security configuration occurs automatically: From bf9879fbbf270df22afff34603e9ef9380d0506a Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 9 Feb 2022 10:15:49 -0500 Subject: [PATCH 006/167] [ML] scaling metric aggregation counts and sums when aggregations are sampled (#83263) This is a follow up to: https://github.com/elastic/elasticsearch/pull/81228 This commits allows aggregations to be aware that they are being reduced within a sampling context. Currently the only sampling context is provided by the `random_sampler` aggregation. This commit also enables the following metric aggregations to be sampled and scales the values appropriately before serializing back to the user: - percentiles - avg - extended_stats - geo_bounds - geo_centroid - max - median_absolute_deviation - min - scripted_metric - stats - sum - top_hits - value_count - weighted_avg - rate - string_stats No multi-bucket aggregation support is added in this commit, that will be in a later commit. --- .../matrix/stats/InternalMatrixStats.java | 15 ++++ .../stats/MatrixStatsAggregationBuilder.java | 5 ++ .../matrix/stats/MatrixStatsResults.java | 13 ++++ .../aggregations/AggregationBuilder.java | 14 ++++ .../aggregations/InternalAggregation.java | 12 ++- .../sampler/random/InternalRandomSampler.java | 45 ++++++++++- .../RandomSamplerAggregationBuilder.java | 17 +++-- .../random/RandomSamplerAggregator.java | 6 +- .../RandomSamplerAggregatorFactory.java | 2 +- .../AbstractInternalHDRPercentiles.java | 6 ++ .../AbstractInternalTDigestPercentiles.java | 6 ++ ...AbstractPercentilesAggregationBuilder.java | 5 ++ .../metrics/AvgAggregationBuilder.java | 5 ++ .../ExtendedStatsAggregationBuilder.java | 5 ++ .../metrics/GeoBoundsAggregationBuilder.java | 5 ++ .../GeoCentroidAggregationBuilder.java | 5 ++ .../aggregations/metrics/InternalAvg.java | 6 ++ .../metrics/InternalExtendedStats.java | 16 ++++ .../metrics/InternalGeoBounds.java | 6 ++ .../metrics/InternalGeoCentroid.java | 6 ++ .../aggregations/metrics/InternalMax.java | 6 ++ .../InternalMedianAbsoluteDeviation.java | 6 ++ .../aggregations/metrics/InternalMin.java | 6 ++ .../metrics/InternalScriptedMetric.java | 6 ++ .../aggregations/metrics/InternalStats.java | 14 ++++ .../aggregations/metrics/InternalTopHits.java | 6 ++ .../metrics/InternalValueCount.java | 6 ++ .../metrics/InternalWeightedAvg.java | 6 ++ .../metrics/MaxAggregationBuilder.java | 5 ++ ...anAbsoluteDeviationAggregationBuilder.java | 5 ++ .../metrics/MinAggregationBuilder.java | 5 ++ .../ScriptedMetricAggregationBuilder.java | 5 ++ .../metrics/StatsAggregationBuilder.java | 5 ++ .../search/aggregations/metrics/Sum.java | 6 ++ .../metrics/SumAggregationBuilder.java | 5 ++ .../metrics/TopHitsAggregationBuilder.java | 5 ++ .../metrics/ValueCountAggregationBuilder.java | 5 ++ .../WeightedAvgAggregationBuilder.java | 5 ++ .../aggregations/support/SamplingContext.java | 76 +++++++++++++++++++ .../metrics/InternalAvgTests.java | 11 +++ .../metrics/InternalExtendedStatsTests.java | 16 ++++ .../metrics/InternalGeoBoundsTests.java | 16 ++++ .../metrics/InternalGeoCentroidTests.java | 13 ++++ .../InternalHDRPercentilesRanksTests.java | 18 +++++ .../metrics/InternalHDRPercentilesTests.java | 16 ++++ .../metrics/InternalMaxTests.java | 13 ++++ .../InternalMedianAbsoluteDeviationTests.java | 17 +++++ .../metrics/InternalMinTests.java | 13 ++++ .../metrics/InternalScriptedMetricTests.java | 11 +++ .../metrics/InternalStatsTests.java | 14 ++++ .../InternalTDigestPercentilesRanksTests.java | 22 ++++++ .../InternalTDigestPercentilesTests.java | 16 ++++ .../metrics/InternalTopHitsTests.java | 12 +++ .../metrics/InternalValueCountTests.java | 13 ++++ .../metrics/InternalWeightedAvgTests.java | 13 ++++ .../search/aggregations/metrics/SumTests.java | 10 +++ .../support/SamplingContextTests.java | 55 ++++++++++++++ .../aggregations/AggregatorTestCase.java | 15 +++- .../test/InternalAggregationTestCase.java | 15 ++++ .../xpack/analytics/rate/InternalRate.java | 6 ++ .../rate/RateAggregationBuilder.java | 5 ++ .../xpack/analytics/ttest/InternalTTest.java | 6 ++ .../ttest/TTestAggregationBuilder.java | 5 ++ .../analytics/rate/InternalRateTests.java | 12 +++ .../analytics/ttest/InternalTTestTests.java | 11 +++ 65 files changed, 743 insertions(+), 14 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/support/SamplingContext.java create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/support/SamplingContextTests.java diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java index 924f925d3b6da..08d34a37d1fd6 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/InternalMatrixStats.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -67,6 +68,9 @@ public String getWriteableName() { /** get the number of documents */ @Override public long getDocCount() { + if (results != null) { + return results.getDocCount(); + } if (stats == null) { return 0; } @@ -241,6 +245,17 @@ public InternalAggregation reduce(List aggregations, Aggreg return new InternalMatrixStats(name, runningStats.docCount, runningStats, null, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return new InternalMatrixStats( + name, + samplingContext.inverseScale(getDocCount()), + stats, + new MatrixStatsResults(stats, samplingContext), + getMetadata() + ); + } + @Override protected boolean mustReduceOnSingleInternalAgg() { return true; diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java index dfe395df936b0..c4c476ff65e93 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregationBuilder.java @@ -46,6 +46,11 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu return new MatrixStatsAggregationBuilder(this, factoriesBuilder, metadata); } + @Override + public boolean supportsSampling() { + return true; + } + /** * Read from a stream. */ diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java index 563001d4cc9f3..58b14dc390f44 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsResults.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.search.aggregations.support.SamplingContext; import java.io.IOException; import java.util.Collections; @@ -41,6 +42,18 @@ class MatrixStatsResults implements Writeable { this.compute(); } + /** creates and computes the result from the provided stats, scaling as necessary given the sampling context */ + MatrixStatsResults(RunningStats stats, SamplingContext samplingContext) { + this.results = stats.clone(); + this.correlation = new HashMap<>(); + this.compute(); + // Note: it is important to scale counts AFTER compute as scaling before could introduce bias + this.results.docCount = samplingContext.inverseScale(this.results.docCount); + for (String field : this.results.counts.keySet()) { + this.results.counts.computeIfPresent(field, (k, v) -> samplingContext.inverseScale(v)); + } + } + /** creates a results object from the given stream */ @SuppressWarnings("unchecked") protected MatrixStatsResults(StreamInput in) { diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java index 8181d387a0dbb..77638ac0766f6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationBuilder.java @@ -14,6 +14,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; import org.elasticsearch.search.aggregations.support.AggregationContext; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentParser; @@ -185,6 +186,19 @@ public static final class CommonFields extends ParseField.CommonFields { public static final ParseField VALUE_TYPE = new ParseField("value_type"); } + /** + * Does this aggregation support running with in a sampling context. + * + * By default, it's false for all aggregations. + * + * If the sub-classed builder supports sampling, be sure of the following that the resulting internal aggregation objects + * override the {@link InternalAggregation#finalizeSampling(SamplingContext)} and scales any values that require scaling. + * @return does this aggregation builder support sampling + */ + public boolean supportsSampling() { + return false; + } + @Override public String toString() { return Strings.toString(this); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java index 8df70a847c347..8d38ad08035f6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java @@ -15,6 +15,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; import org.elasticsearch.search.aggregations.support.AggregationPath; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -121,7 +122,16 @@ public InternalAggregation reducePipelines( public abstract InternalAggregation reduce(List aggregations, AggregationReduceContext reduceContext); /** - * Signal the framework if the {@linkplain InternalAggregation#reduce(List, ReduceContext)} phase needs to be called + * Called by the parent sampling context. Should only ever be called once as some aggregations scale their internal values + * @param samplingContext the current sampling context + * @return new aggregation with the sampling context applied, could be the same aggregation instance if nothing needs to be done + */ + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + throw new UnsupportedOperationException(getWriteableName() + " aggregation [" + getName() + "] does not support sampling"); + } + + /** + * Signal the framework if the {@linkplain InternalAggregation#reduce(List, AggregationReduceContext)} phase needs to be called * when there is only one {@linkplain InternalAggregation}. */ protected abstract boolean mustReduceOnSingleInternalAgg(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java index 15547dd2a9c8b..326a5fe29d2b2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/InternalRandomSampler.java @@ -10,23 +10,38 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.aggregations.AggregationReduceContext; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.InternalSingleBucketAggregation; import org.elasticsearch.search.aggregations.bucket.sampler.Sampler; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.ArrayList; +import java.util.List; import java.util.Map; +import java.util.stream.Collectors; public class InternalRandomSampler extends InternalSingleBucketAggregation implements Sampler { public static final String NAME = "mapped_random_sampler"; public static final String PARSER_NAME = "random_sampler"; private final int seed; + private final double probability; - InternalRandomSampler(String name, long docCount, int seed, InternalAggregations subAggregations, Map metadata) { + InternalRandomSampler( + String name, + long docCount, + int seed, + double probability, + InternalAggregations subAggregations, + Map metadata + ) { super(name, docCount, subAggregations, metadata); this.seed = seed; + this.probability = probability; } /** @@ -35,12 +50,14 @@ public class InternalRandomSampler extends InternalSingleBucketAggregation imple public InternalRandomSampler(StreamInput in) throws IOException { super(in); this.seed = in.readInt(); + this.probability = in.readDouble(); } @Override protected void doWriteTo(StreamOutput out) throws IOException { super.doWriteTo(out); out.writeInt(seed); + out.writeDouble(probability); } @Override @@ -55,12 +72,36 @@ public String getType() { @Override protected InternalSingleBucketAggregation newAggregation(String name, long docCount, InternalAggregations subAggregations) { - return new InternalRandomSampler(name, docCount, seed, subAggregations, metadata); + return new InternalRandomSampler(name, docCount, seed, probability, subAggregations, metadata); + } + + @Override + public InternalAggregation reduce(List aggregations, AggregationReduceContext reduceContext) { + long docCount = 0L; + List subAggregationsList = new ArrayList<>(aggregations.size()); + for (InternalAggregation aggregation : aggregations) { + docCount += ((InternalSingleBucketAggregation) aggregation).getDocCount(); + subAggregationsList.add(((InternalSingleBucketAggregation) aggregation).getAggregations()); + } + InternalAggregations aggs = InternalAggregations.reduce(subAggregationsList, reduceContext); + if (reduceContext.isFinalReduce() && aggs != null) { + SamplingContext context = buildContext(); + aggs = InternalAggregations.from( + aggs.asList().stream().map(agg -> ((InternalAggregation) agg).finalizeSampling(context)).collect(Collectors.toList()) + ); + } + + return newAggregation(getName(), docCount, aggs); + } + + public SamplingContext buildContext() { + return new SamplingContext(probability, seed); } @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(RandomSamplerAggregationBuilder.SEED.getPreferredName(), seed); + builder.field(RandomSamplerAggregationBuilder.PROBABILITY.getPreferredName(), probability); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); getAggregations().toXContentInternal(builder, params); return builder; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregationBuilder.java index f6e7903c52222..fa890c39b61f5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregationBuilder.java @@ -16,10 +16,6 @@ import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.AggregatorFactory; -import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.sampler.DiversifiedAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.CardinalityAggregationBuilder; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; @@ -78,6 +74,10 @@ public RandomSamplerAggregationBuilder(StreamInput in) throws IOException { this.seed = in.readInt(); } + public double getProbability() { + return p; + } + protected RandomSamplerAggregationBuilder( RandomSamplerAggregationBuilder clone, AggregatorFactories.Builder factoriesBuilder, @@ -118,10 +118,7 @@ protected AggregatorFactory doBuild( } recursivelyCheckSubAggs(subfactoriesBuilder.getAggregatorFactories(), builder -> { // TODO add a method or interface to aggregation builder that defaults to false - if (builder instanceof CardinalityAggregationBuilder - || builder instanceof NestedAggregationBuilder - || builder instanceof SamplerAggregationBuilder - || builder instanceof DiversifiedAggregationBuilder) { + if (builder.supportsSampling() == false) { throw new IllegalArgumentException( "[random_sampler] aggregation [" + getName() @@ -136,6 +133,10 @@ protected AggregatorFactory doBuild( return new RandomSamplerAggregatorFactory(name, seed, p, context, parent, subfactoriesBuilder, metadata); } + public int getSeed() { + return seed; + } + @Override protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java index 2c28d96648678..366a06f55ed61 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregator.java @@ -29,11 +29,13 @@ public class RandomSamplerAggregator extends BucketsAggregator implements SingleBucketAggregator { private final int seed; + private final double probability; private final CheckedSupplier weightSupplier; RandomSamplerAggregator( String name, int seed, + double probability, CheckedSupplier weightSupplier, AggregatorFactories factories, AggregationContext context, @@ -43,6 +45,7 @@ public class RandomSamplerAggregator extends BucketsAggregator implements Single ) throws IOException { super(name, factories, context, parent, cardinalityUpperBound, metadata); this.seed = seed; + this.probability = probability; if (this.subAggregators().length == 0) { throw new IllegalArgumentException( RandomSamplerAggregationBuilder.NAME + " aggregation [" + name + "] must have sub aggregations configured" @@ -59,6 +62,7 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I name, bucketDocCount(owningBucketOrd), seed, + probability, subAggregationResults, metadata() ) @@ -67,7 +71,7 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I @Override public InternalAggregation buildEmptyAggregation() { - return new InternalRandomSampler(name, 0, seed, buildEmptySubAggregations(), metadata()); + return new InternalRandomSampler(name, 0, seed, probability, buildEmptySubAggregations(), metadata()); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java index 0776ad1f91698..1e5990b6bab57 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/random/RandomSamplerAggregatorFactory.java @@ -44,7 +44,7 @@ public class RandomSamplerAggregatorFactory extends AggregatorFactory { @Override public Aggregator createInternal(Aggregator parent, CardinalityUpperBound cardinality, Map metadata) throws IOException { - return new RandomSamplerAggregator(name, seed, this::getWeight, factories, context, parent, cardinality, metadata); + return new RandomSamplerAggregator(name, seed, probability, this::getWeight, factories, context, parent, cardinality, metadata); } /** diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java index 15f703c9e71c4..48c6a85bd60cf 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalHDRPercentiles.java @@ -14,6 +14,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -133,6 +134,11 @@ public AbstractInternalHDRPercentiles reduce(List aggregati return createReduced(getName(), keys, merged, keyed, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + protected abstract AbstractInternalHDRPercentiles createReduced( String name, double[] keys, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java index 551e36f057c11..342d6cabedb08 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractInternalTDigestPercentiles.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -116,6 +117,11 @@ public AbstractInternalTDigestPercentiles reduce(List aggre return createReduced(getName(), keys, merged, keyed, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + protected abstract AbstractInternalTDigestPercentiles createReduced( String name, double[] keys, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java index 1e5a710d3b560..6acab88ca2558 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AbstractPercentilesAggregationBuilder.java @@ -157,6 +157,11 @@ public static > ConstructingO } } + @Override + public boolean supportsSampling() { + return true; + } + @Override protected void innerWriteTo(StreamOutput out) throws IOException { out.writeDoubleArray(values); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java index b31c9b443bf8b..bbabe974bad20 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/AvgAggregationBuilder.java @@ -65,6 +65,11 @@ public AvgAggregationBuilder(StreamInput in) throws IOException { super(in); } + @Override + public boolean supportsSampling() { + return true; + } + @Override protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map metadata) { return new AvgAggregationBuilder(this, factoriesBuilder, metadata); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java index 6653151f03590..33cc4052c34e9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ExtendedStatsAggregationBuilder.java @@ -77,6 +77,11 @@ public ExtendedStatsAggregationBuilder(StreamInput in) throws IOException { sigma = in.readDouble(); } + @Override + public boolean supportsSampling() { + return true; + } + @Override public Set metricNames() { return InternalExtendedStats.METRIC_NAMES; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java index 621e6d4d59649..3201426cc4f41 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoBoundsAggregationBuilder.java @@ -67,6 +67,11 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu return new GeoBoundsAggregationBuilder(this, factoriesBuilder, metadata); } + @Override + public boolean supportsSampling() { + return true; + } + /** * Read from a stream. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java index a0e62eb3b8fb6..1d5f7fbb761e6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/GeoCentroidAggregationBuilder.java @@ -77,6 +77,11 @@ public GeoCentroidAggregationBuilder(StreamInput in) throws IOException { super(in); } + @Override + public boolean supportsSampling() { + return true; + } + @Override protected void innerWriteTo(StreamOutput out) { // Do nothing, no extra state to write to stream diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java index 9d74fc0b99612..fd706e92f19a5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalAvg.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -88,6 +89,11 @@ public InternalAvg reduce(List aggregations, AggregationRed return new InternalAvg(getName(), kahanSummation.value(), count, format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(CommonFields.VALUE.getPreferredName(), count != 0 ? getValue() : null); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java index 393149239cbab..ba3c36c7c386e 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStats.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -273,6 +274,21 @@ public InternalExtendedStats reduce(List aggregations, Aggr ); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return new InternalExtendedStats( + name, + samplingContext.inverseScale(count), + samplingContext.inverseScale(sum), + min, + max, + samplingContext.inverseScale(sumOfSqrs), + sigma, + format, + getMetadata() + ); + } + static class Fields { public static final String SUM_OF_SQRS = "sum_of_squares"; public static final String SUM_OF_SQRS_AS_STRING = "sum_of_squares_as_string"; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java index b2d1bb4198c5d..113ac7a562c0c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoBounds.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -115,6 +116,11 @@ public InternalAggregation reduce(List aggregations, Aggreg return new InternalGeoBounds(name, top, bottom, posLeft, posRight, negLeft, negRight, wrapLongitude, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override protected boolean mustReduceOnSingleInternalAgg() { return false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java index 4fc6ddad9a532..d4a3249177d61 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalGeoCentroid.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; @@ -124,6 +125,11 @@ public InternalGeoCentroid reduce(List aggregations, Aggreg return new InternalGeoCentroid(name, result, totalCount, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return new InternalGeoCentroid(name, centroid, samplingContext.inverseScale(count), getMetadata()); + } + @Override protected boolean mustReduceOnSingleInternalAgg() { return false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMax.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMax.java index 50dde6105b3fc..6dec5d6603795 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMax.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMax.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -67,6 +68,11 @@ public InternalMax reduce(List aggregations, AggregationRed return new InternalMax(name, max, format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { boolean hasValue = Double.isInfinite(max) == false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java index 04084bcd435ac..0d5f3b737b9f1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -74,6 +75,11 @@ public InternalAggregation reduce(List aggregations, Aggreg return new InternalMedianAbsoluteDeviation(name, metadata, format, valueMerged); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { final boolean anyResults = valuesSketch.size() > 0; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMin.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMin.java index 768c45902d70d..bb0f0ecd7750a 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMin.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMin.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -67,6 +68,11 @@ public InternalMin reduce(List aggregations, AggregationRed return new InternalMin(getName(), min, this.format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { boolean hasValue = Double.isInfinite(min) == false; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java index 55fc00873de41..7c1c5e5dcafab 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetric.java @@ -16,6 +16,7 @@ import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -120,6 +121,11 @@ public InternalAggregation reduce(List aggregations, Aggreg return new InternalScriptedMetric(firstAggregation.getName(), aggregation, firstAggregation.reduceScript, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override protected boolean mustReduceOnSingleInternalAgg() { return true; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java index 97298df7b3785..441b06de21371 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalStats.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -173,6 +174,19 @@ public InternalStats reduce(List aggregations, AggregationR return new InternalStats(name, count, kahanSummation.value(), min, max, format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return new InternalStats( + name, + samplingContext.inverseScale(count), + samplingContext.inverseScale(sum), + min, + max, + format, + getMetadata() + ); + } + static class Fields { public static final String COUNT = "count"; public static final String MIN = "min"; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java index 87ab160025ebe..b87df7c87ee27 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalTopHits.java @@ -21,6 +21,7 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -169,6 +170,11 @@ public InternalAggregation reduce(List aggregations, Aggreg ); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override protected boolean mustReduceOnSingleInternalAgg() { return true; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalValueCount.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalValueCount.java index f453243ab29ee..c6ab257ac0fba 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalValueCount.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalValueCount.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -66,6 +67,11 @@ public InternalAggregation reduce(List aggregations, Aggreg return new InternalValueCount(name, valueCount, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return new InternalValueCount(name, samplingContext.inverseScale(value), getMetadata()); + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(CommonFields.VALUE.getPreferredName(), value); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java index bc772cf8efd57..261bfc5a47ad8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalWeightedAvg.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -90,6 +91,11 @@ public InternalWeightedAvg reduce(List aggregations, Aggreg return new InternalWeightedAvg(getName(), sumCompensation.value(), weightCompensation.value(), format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(CommonFields.VALUE.getPreferredName(), weight != 0 ? getValue() : null); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java index adcd5d85738d6..ccbf7e69c1f8d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregationBuilder.java @@ -74,6 +74,11 @@ public MaxAggregationBuilder(StreamInput in) throws IOException { super(in); } + @Override + public boolean supportsSampling() { + return true; + } + @Override protected void innerWriteTo(StreamOutput out) { // Do nothing, no extra state to write to stream diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java index 32af21ba7b881..a63c7bde037a8 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationAggregationBuilder.java @@ -99,6 +99,11 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu return new MedianAbsoluteDeviationAggregationBuilder(this, factoriesBuilder, metadata); } + @Override + public boolean supportsSampling() { + return true; + } + @Override protected ValuesSourceType defaultValueSourceType() { return CoreValuesSourceType.NUMERIC; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java index df76bb0fee2f9..c72685d6302ae 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregationBuilder.java @@ -79,6 +79,11 @@ protected void innerWriteTo(StreamOutput out) { // Do nothing, no extra state to write to stream } + @Override + public boolean supportsSampling() { + return true; + } + @Override protected MinAggregatorFactory innerBuild( AggregationContext context, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java index 0895672584da7..daa7259a47fac 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java @@ -110,6 +110,11 @@ protected void doWriteTo(StreamOutput out) throws IOException { } } + @Override + public boolean supportsSampling() { + return true; + } + /** * Set the {@code init} script. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java index 6910e10aa89c6..d3306bf732def 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/StatsAggregationBuilder.java @@ -71,6 +71,11 @@ public StatsAggregationBuilder(StreamInput in) throws IOException { super(in); } + @Override + public boolean supportsSampling() { + return true; + } + @Override public Set metricNames() { return InternalStats.METRIC_NAMES; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Sum.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Sum.java index 3f9f9b5ca46a8..93557168407f2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Sum.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/Sum.java @@ -12,6 +12,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -65,6 +66,11 @@ public Sum reduce(List aggregations, AggregationReduceConte return new Sum(name, kahanSummation.value(), format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return new Sum(name, samplingContext.inverseScale(sum), format, getMetadata()); + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(CommonFields.VALUE.getPreferredName(), sum); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java index d9d938a116aaa..d00ce1024a261 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregationBuilder.java @@ -62,6 +62,11 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu return new SumAggregationBuilder(this, factoriesBuilder, metadata); } + @Override + public boolean supportsSampling() { + return true; + } + /** * Read from a stream. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java index 69bb5d53220f8..1f341e8fb8f76 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/TopHitsAggregationBuilder.java @@ -596,6 +596,11 @@ public TopHitsAggregationBuilder subAggregations(Builder subFactories) { ); } + @Override + public boolean supportsSampling() { + return true; + } + @Override public BucketCardinality bucketCardinality() { return BucketCardinality.NONE; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java index 9f1e9b628ccbe..f391c808eb2aa 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ValueCountAggregationBuilder.java @@ -70,6 +70,11 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu return new ValueCountAggregationBuilder(this, factoriesBuilder, metadata); } + @Override + public boolean supportsSampling() { + return true; + } + /** * Read from a stream. */ diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java index 8b559dcc89b97..fb426be5b05b5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/WeightedAvgAggregationBuilder.java @@ -86,6 +86,11 @@ protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map inputs) { assertEquals(sum / counts, reduced.value(), 0.0000001); } + @Override + protected boolean supportsSampling() { + return true; + } + + @Override + protected void assertSampled(InternalAvg sampled, InternalAvg reduced, SamplingContext samplingContext) { + assertEquals(sampled.value(), reduced.value(), 1e-12); + } + public void testSummationAccuracy() { double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; verifyAvgOfDoubles(values, 0.9, 0d); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java index b1c37581f128e..6df82fa0721d8 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.metrics.ExtendedStats.Bounds; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import java.util.ArrayList; @@ -85,6 +86,21 @@ protected void assertReduced(InternalExtendedStats reduced, List { @Override @@ -45,6 +49,20 @@ protected void assertReduced(InternalHDRPercentileRanks reduced, List it1 = sampled.iterator(); + Iterator it2 = reduced.iterator(); + while (it1.hasNext() && it2.hasNext()) { + assertThat(it1.next(), equalTo(it2.next())); + } + } + @Override protected Class implementationClass() { return ParsedHDRPercentileRanks.class; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java index 8b47852ff5736..6e986f69c1c50 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalHDRPercentilesTests.java @@ -11,6 +11,7 @@ import org.HdrHistogram.DoubleHistogram; import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.support.SamplingContext; import java.util.Arrays; import java.util.HashMap; @@ -19,6 +20,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.equalTo; public class InternalHDRPercentilesTests extends InternalPercentilesTestCase { @@ -48,6 +50,20 @@ protected void assertReduced(InternalHDRPercentiles reduced, List it1 = sampled.iterator(); + Iterator it2 = reduced.iterator(); + while (it1.hasNext() && it2.hasNext()) { + assertThat(it1.next(), equalTo(it2.next())); + } + } + @Override protected Class implementationClass() { return ParsedHDRPercentiles.class; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java index 6098f6fc66d96..af922a36db612 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java @@ -11,12 +11,15 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class InternalMaxTests extends InternalAggregationTestCase { @Override @@ -31,6 +34,16 @@ protected void assertReduced(InternalMax reduced, List inputs) { assertEquals(inputs.stream().mapToDouble(InternalMax::value).max().getAsDouble(), reduced.value(), 0); } + @Override + protected boolean supportsSampling() { + return true; + } + + @Override + protected void assertSampled(InternalMax sampled, InternalMax reduced, SamplingContext samplingContext) { + assertThat(sampled.getValue(), equalTo(reduced.getValue())); + } + @Override protected void assertFromXContent(InternalMax max, ParsedAggregation parsedAggregation) { ParsedMax parsed = ((ParsedMax) parsedAggregation); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java index 67f0da1c735bc..cd5afc42e863d 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviationTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import java.io.IOException; @@ -17,6 +18,8 @@ import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class InternalMedianAbsoluteDeviationTests extends InternalAggregationTestCase { @Override @@ -47,6 +50,20 @@ protected void assertReduced(InternalMedianAbsoluteDeviation reduced, List { @Override protected InternalMin createTestInstance(String name, Map metadata) { @@ -30,6 +33,16 @@ protected void assertReduced(InternalMin reduced, List inputs) { assertEquals(inputs.stream().mapToDouble(InternalMin::value).min().getAsDouble(), reduced.value(), 0); } + @Override + protected boolean supportsSampling() { + return true; + } + + @Override + protected void assertSampled(InternalMin sampled, InternalMin reduced, SamplingContext samplingContext) { + assertThat(sampled.getValue(), equalTo(reduced.getValue())); + } + @Override protected void assertFromXContent(InternalMin min, ParsedAggregation parsedAggregation) { ParsedMin parsed = ((ParsedMin) parsedAggregation); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java index df98c4500bb59..000c66d44f521 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalScriptedMetricTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.test.VersionUtils; @@ -161,6 +162,16 @@ protected void assertReduced(InternalScriptedMetric reduced, List inputs) assertEquals(expectedMax, reduced.getMax(), 0d); } + @Override + protected boolean supportsSampling() { + return true; + } + + @Override + protected void assertSampled(InternalStats sampled, InternalStats reduced, SamplingContext samplingContext) { + assertEquals(sampled.getCount(), samplingContext.inverseScale(reduced.getCount())); + assertEquals(sampled.getSum(), samplingContext.inverseScale(reduced.getSum()), 1e-7); + assertEquals(sampled.getMin(), reduced.getMin(), 0d); + assertEquals(sampled.getMax(), reduced.getMax(), 0d); + } + public void testSummationAccuracy() { double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; verifyStatsOfDoubles(values, 13.5, 0.9, 0d); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java index 7b28517dae453..27d3fafee1319 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesRanksTests.java @@ -10,12 +10,16 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.support.SamplingContext; import java.util.Arrays; import java.util.HashMap; +import java.util.Iterator; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class InternalTDigestPercentilesRanksTests extends InternalPercentilesRanksTestCase { @Override @@ -57,6 +61,24 @@ protected void assertReduced(InternalTDigestPercentileRanks reduced, List it1 = sampled.iterator(); + Iterator it2 = reduced.iterator(); + while (it1.hasNext() && it2.hasNext()) { + assertThat(it1.next(), equalTo(it2.next())); + } + } + @Override protected Class implementationClass() { return ParsedTDigestPercentileRanks.class; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java index b6894d340a20e..5c8e6aa2700c9 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTDigestPercentilesTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.support.SamplingContext; import java.util.Arrays; import java.util.HashMap; @@ -18,6 +19,7 @@ import java.util.Map; import static java.util.Collections.emptyMap; +import static org.hamcrest.Matchers.equalTo; public class InternalTDigestPercentilesTests extends InternalPercentilesTestCase { @@ -55,6 +57,20 @@ protected void assertReduced(InternalTDigestPercentiles reduced, List it1 = sampled.iterator(); + Iterator it2 = reduced.iterator(); + while (it1.hasNext() && it2.hasNext()) { + assertThat(it1.next(), equalTo(it2.next())); + } + } + @Override protected Class implementationClass() { return ParsedTDigestPercentiles.class; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index ba63452e56472..296a9c8d6ab5c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.test.NotEqualMessageBuilder; @@ -54,6 +55,7 @@ import static java.lang.Math.min; import static java.util.Comparator.comparing; import static java.util.stream.Collectors.toList; +import static org.hamcrest.Matchers.equalTo; import static org.mockito.Mockito.mock; public class InternalTopHitsTests extends InternalAggregationTestCase { @@ -271,6 +273,16 @@ protected void assertReduced(InternalTopHits reduced, List inpu assertEqualsWithErrorMessageFromXContent(expectedHits, actualHits); } + @Override + protected boolean supportsSampling() { + return true; + } + + @Override + protected void assertSampled(InternalTopHits sampled, InternalTopHits reduced, SamplingContext samplingContext) { + assertThat(sampled.getHits(), equalTo(reduced.getHits())); + } + /** * Assert that two objects are equals, calling {@link ToXContent#toXContent(XContentBuilder, ToXContent.Params)} to print out their * differences if they aren't equal. diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java index 31ebb152adcbc..618d9bec1ef1c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalValueCountTests.java @@ -10,12 +10,15 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class InternalValueCountTests extends InternalAggregationTestCase { @Override @@ -28,6 +31,16 @@ protected void assertReduced(InternalValueCount reduced, List { @Override @@ -44,6 +47,16 @@ protected void assertReduced(InternalWeightedAvg reduced, List metadata) { return new Sum(name, value, formatter, metadata); } + @Override + protected boolean supportsSampling() { + return true; + } + @Override protected void assertReduced(Sum reduced, List inputs) { double expectedSum = inputs.stream().mapToDouble(Sum::value).sum(); assertEquals(expectedSum, reduced.value(), 0.0001d); } + protected void assertSampled(Sum sampled, Sum reduced, SamplingContext samplingContext) { + assertEquals(sampled.value(), samplingContext.inverseScale(reduced.value()), 1e-7); + } + public void testSummationAccuracy() { // Summing up a normal array and expect an accurate value double[] values = new double[] { 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.9, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7 }; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/SamplingContextTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/SamplingContextTests.java new file mode 100644 index 0000000000000..b5e761e9061ce --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/SamplingContextTests.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.aggregations.support; + +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.lessThanOrEqualTo; + +public class SamplingContextTests extends ESTestCase { + protected static final int NUMBER_OF_TEST_RUNS = 20; + + private static SamplingContext randomContext() { + return new SamplingContext(randomDoubleBetween(1e-8, 0.1, false), randomInt()); + } + + public void testScaling() { + for (int i = 0; i < 20; i++) { + SamplingContext samplingContext = randomContext(); + long randomLong = randomLongBetween(100_000_000L, Long.MAX_VALUE); + double randomDouble = randomDouble(); + long rescaled = samplingContext.inverseScale(samplingContext.scale(randomLong)); + // No matter how you scale `long` values, the inverse back may be a little off + long error = (long) (rescaled * 1e-15); + assertThat( + Double.toString(samplingContext.probability()), + rescaled, + allOf(greaterThanOrEqualTo(randomLong - error), lessThanOrEqualTo(randomLong + error)) + ); + assertThat( + Double.toString(samplingContext.probability()), + randomDouble, + closeTo(samplingContext.inverseScale(samplingContext.scale(randomDouble)), 1e-12) + ); + } + } + + public void testNoScaling() { + SamplingContext samplingContext = new SamplingContext(1.0, randomInt()); + long randomLong = randomLong(); + double randomDouble = randomDouble(); + assertThat(randomLong, equalTo(samplingContext.scale(randomLong))); + assertThat(randomDouble, equalTo(samplingContext.scale(randomDouble))); + } + +} diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 36062d69ca401..369d07ab26446 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -123,6 +123,7 @@ import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.AggregationContext.ProductionAggregationContext; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -1013,7 +1014,19 @@ public void testSupportedFieldTypes() throws IOException { // TODO in the future we can make this more explicit with expectThrows(), when the exceptions are standardized AssertionError failure = null; try { - searchAndReduce(indexSearcher, new MatchAllDocsQuery(), aggregationBuilder, fieldType); + InternalAggregation internalAggregation = searchAndReduce( + indexSearcher, + new MatchAllDocsQuery(), + aggregationBuilder, + fieldType + ); + // We should make sure if the builder says it supports sampling, that the internal aggregations returned override + // finalizeSampling + if (aggregationBuilder.supportsSampling()) { + SamplingContext randomSamplingContext = new SamplingContext(randomDoubleBetween(1e-8, 0.1, false), randomInt()); + InternalAggregation sampledResult = internalAggregation.finalizeSampling(randomSamplingContext); + assertThat(sampledResult.getClass(), equalTo(internalAggregation.getClass())); + } if (supportedVSTypes.contains(vst) == false || unsupportedMappedFieldTypes.contains(fieldType.typeName())) { failure = new AssertionError( "Aggregator [" diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java index f40af339af193..95ecf3f3a24fe 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java @@ -142,6 +142,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; import org.elasticsearch.search.aggregations.pipeline.StatsBucketPipelineAggregationBuilder; import org.elasticsearch.search.aggregations.support.AggregationContext; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.search.aggregations.timeseries.ParsedTimeSeries; import org.elasticsearch.search.aggregations.timeseries.TimeSeriesAggregationBuilder; import org.elasticsearch.xcontent.ContextParser; @@ -472,6 +473,12 @@ public void testReduceRandom() throws IOException { T reduced = (T) inputs.toReduce().get(0).reduce(toReduce, context); doAssertReducedMultiBucketConsumer(reduced, bucketConsumer); assertReduced(reduced, inputs.toReduce()); + if (supportsSampling()) { + SamplingContext randomContext = new SamplingContext(randomDoubleBetween(1e-8, 0.1, false), randomInt()); + @SuppressWarnings("unchecked") + T sampled = (T) reduced.finalizeSampling(randomContext); + assertSampled(sampled, reduced, randomContext); + } } protected void doAssertReducedMultiBucketConsumer(Aggregation agg, MultiBucketConsumerService.MultiBucketConsumer bucketConsumer) { @@ -487,11 +494,19 @@ protected ScriptService mockScriptService() { protected abstract void assertReduced(T reduced, List inputs); + protected void assertSampled(T sampled, T reduced, SamplingContext samplingContext) { + throw new UnsupportedOperationException("aggregation supports sampling but does not implement assertSampled"); + } + @Override public final T createTestInstance() { return createTestInstance(randomAlphaOfLength(5)); } + protected boolean supportsSampling() { + return false; + } + public final Map createTestMetadata() { Map metadata = null; if (randomBoolean()) { diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalRate.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalRate.java index 5617611219aa4..77b034c911036 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalRate.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/InternalRate.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.CompensatedSum; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -84,6 +85,11 @@ public InternalRate reduce(List aggregations, AggregationRe return new InternalRate(name, kahanSummation.value(), firstDivisor, format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return new InternalRate(name, samplingContext.inverseScale(sum), divisor, format, getMetadata()); + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.field(CommonFields.VALUE.getPreferredName(), value()); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java index 7872c362fcc0b..5e28e700399e7 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/rate/RateAggregationBuilder.java @@ -74,6 +74,11 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu return new RateAggregationBuilder(this, factoriesBuilder, metadata); } + @Override + public boolean supportsSampling() { + return true; + } + /** * Read from a stream. */ diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/InternalTTest.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/InternalTTest.java index d04a42d7452f7..3425eb206892e 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/InternalTTest.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/InternalTTest.java @@ -13,6 +13,7 @@ import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; @@ -61,6 +62,11 @@ public InternalTTest reduce(List aggregations, AggregationR return new InternalTTest(name, reduced, format, getMetadata()); } + @Override + public InternalAggregation finalizeSampling(SamplingContext samplingContext) { + return this; + } + @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { double value = state.getValue(); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java index 606fdbacfbb55..36b1218411f0b 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/ttest/TTestAggregationBuilder.java @@ -110,6 +110,11 @@ protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBu return new TTestAggregationBuilder(this, factoriesBuilder, metadata); } + @Override + public boolean supportsSampling() { + return true; + } + @Override public BucketCardinality bucketCardinality() { return BucketCardinality.NONE; diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java index 8bc1cd7ffa0db..b2e0099bf32c4 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/rate/InternalRateTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; @@ -24,6 +25,7 @@ import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.closeTo; import static org.mockito.Mockito.mock; public class InternalRateTests extends InternalAggregationTestCase { @@ -53,6 +55,16 @@ protected BuilderAndToReduce randomResultsToReduce(String name, in return new BuilderAndToReduce<>(mock(AggregationBuilder.class), inputs); } + @Override + protected boolean supportsSampling() { + return true; + } + + @Override + protected void assertSampled(InternalRate sampled, InternalRate reduced, SamplingContext samplingContext) { + assertThat(sampled.getValue(), closeTo(samplingContext.inverseScale(reduced.getValue()), 1e-10)); + } + @Override protected void assertReduced(InternalRate reduced, List inputs) { double expected = inputs.stream().mapToDouble(a -> a.sum).sum() / reduced.divisor; diff --git a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java index d9c9bfa80ac6a..7a8bb0cc6471f 100644 --- a/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java +++ b/x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/ttest/InternalTTestTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.ParsedAggregation; +import org.elasticsearch.search.aggregations.support.SamplingContext; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; @@ -77,6 +78,16 @@ protected void assertReduced(InternalTTest reduced, List inputs) assertEquals(expected.getValue(), reduced.getValue(), 0.00001); } + @Override + protected void assertSampled(InternalTTest sampled, InternalTTest reduced, SamplingContext samplingContext) { + assertEquals(sampled.getValue(), reduced.getValue(), 1e-12); + } + + @Override + protected boolean supportsSampling() { + return true; + } + @Override protected void assertFromXContent(InternalTTest min, ParsedAggregation parsedAggregation) { // There is no ParsedTTest yet so we cannot test it here From 056a3c3b4cfecb1e10bd7b93cc81e674df7e5915 Mon Sep 17 00:00:00 2001 From: Dan Hermann Date: Wed, 9 Feb 2022 09:17:10 -0600 Subject: [PATCH 007/167] Remove tasks client from HRLC (#83649) --- .../client/RestHighLevelClient.java | 10 - .../org/elasticsearch/client/TasksClient.java | 164 ------------- .../client/TasksRequestConverters.java | 64 ----- .../client/tasks/CancelTasksRequest.java | 167 ------------- .../client/tasks/CancelTasksResponse.java | 92 -------- .../client/tasks/ElasticsearchException.java | 221 ------------------ .../client/tasks/GetTaskRequest.java | 97 -------- .../client/tasks/GetTaskResponse.java | 49 ---- .../client/tasks/ListTasksResponse.java | 122 ---------- .../elasticsearch/client/tasks/NodeData.java | 159 ------------- .../elasticsearch/client/tasks/TaskGroup.java | 82 ------- .../elasticsearch/client/tasks/TaskInfo.java | 217 ----------------- .../client/tasks/TaskOperationFailure.java | 107 --------- .../integration/TransformIntegTestCase.java | 22 +- 14 files changed, 14 insertions(+), 1559 deletions(-) delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/TasksClient.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/CancelTasksRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/CancelTasksResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/ElasticsearchException.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/GetTaskRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/GetTaskResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/ListTasksResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/NodeData.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskGroup.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskInfo.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskOperationFailure.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 3650662d733cf..aa3202bde6084 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -279,7 +279,6 @@ public class RestHighLevelClient implements Closeable { private final ClusterClient clusterClient = new ClusterClient(this); private final IngestClient ingestClient = new IngestClient(this); private final SnapshotClient snapshotClient = new SnapshotClient(this); - private final TasksClient tasksClient = new TasksClient(this); private final XPackClient xPackClient = new XPackClient(this); private final MachineLearningClient machineLearningClient = new MachineLearningClient(this); private final SecurityClient securityClient = new SecurityClient(this); @@ -394,15 +393,6 @@ public final SnapshotClient snapshot() { return snapshotClient; } - /** - * Provides a {@link TasksClient} which can be used to access the Tasks API. - * - * See Task Management API on elastic.co - */ - public final TasksClient tasks() { - return tasksClient; - } - /** * Provides methods for accessing the Elastic Licensed X-Pack Info * and Usage APIs that are shipped with the default distribution of diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksClient.java deleted file mode 100644 index 83f2185f5104b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksClient.java +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.elasticsearch.client.tasks.CancelTasksRequest; -import org.elasticsearch.client.tasks.CancelTasksResponse; -import org.elasticsearch.client.tasks.GetTaskRequest; -import org.elasticsearch.client.tasks.GetTaskResponse; - -import java.io.IOException; -import java.util.Optional; - -import static java.util.Collections.emptySet; - -/** - * A wrapper for the {@link RestHighLevelClient} that provides methods for accessing the Tasks API. - *

    - * See Task Management API on elastic.co - * - * @deprecated The High Level Rest Client is deprecated in favor of the - * - * Elasticsearch Java API Client - */ -@Deprecated(since = "7.16.0", forRemoval = true) -@SuppressWarnings("removal") -public final class TasksClient { - private final RestHighLevelClient restHighLevelClient; - - TasksClient(RestHighLevelClient restHighLevelClient) { - this.restHighLevelClient = restHighLevelClient; - } - - /** - * Get current tasks using the Task Management API. - * See - * Task Management API on elastic.co - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public ListTasksResponse list(ListTasksRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - TasksRequestConverters::listTasks, - options, - ListTasksResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously get current tasks using the Task Management API. - * See - * Task Management API on elastic.co - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable listAsync(ListTasksRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - TasksRequestConverters::listTasks, - options, - ListTasksResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Get a task using the Task Management API. - * See - * Task Management API on elastic.co - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public Optional get(GetTaskRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseOptionalEntity( - request, - TasksRequestConverters::getTask, - options, - GetTaskResponse::fromXContent - ); - } - - /** - * Get a task using the Task Management API. - * See - * Task Management API on elastic.co - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener an actionlistener that takes an optional response (404s are returned as an empty Optional) - * @return cancellable that may be used to cancel the request - */ - public Cancellable getAsync(GetTaskRequest request, RequestOptions options, ActionListener> listener) { - - return restHighLevelClient.performRequestAsyncAndParseOptionalEntity( - request, - TasksRequestConverters::getTask, - options, - GetTaskResponse::fromXContent, - listener - ); - } - - /** - * Cancel one or more cluster tasks using the Task Management API. - * - * See - * Task Management API on elastic.co - * @param cancelTasksRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - * - */ - public CancelTasksResponse cancel(CancelTasksRequest cancelTasksRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - cancelTasksRequest, - TasksRequestConverters::cancelTasks, - options, - CancelTasksResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously cancel one or more cluster tasks using the Task Management API. - * - * See - * Task Management API on elastic.co - * @param cancelTasksRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable cancelAsync( - CancelTasksRequest cancelTasksRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - cancelTasksRequest, - TasksRequestConverters::cancelTasks, - options, - CancelTasksResponse::fromXContent, - listener, - emptySet() - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java deleted file mode 100644 index 54525a8cd304d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/TasksRequestConverters.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; -import org.elasticsearch.client.RequestConverters.EndpointBuilder; -import org.elasticsearch.client.tasks.CancelTasksRequest; -import org.elasticsearch.client.tasks.GetTaskRequest; - -final class TasksRequestConverters { - - private TasksRequestConverters() {} - - static Request cancelTasks(CancelTasksRequest req) { - Request request = new Request(HttpPost.METHOD_NAME, "/_tasks/_cancel"); - RequestConverters.Params params = new RequestConverters.Params(); - req.getTimeout().ifPresent(params::withTimeout); - req.getTaskId().ifPresent(params::withTaskId); - req.getParentTaskId().ifPresent(params::withParentTaskId); - params.withNodes(req.getNodes()).withActions(req.getActions()); - if (req.getWaitForCompletion() != null) { - params.withWaitForCompletion(req.getWaitForCompletion()); - } - request.addParameters(params.asMap()); - return request; - } - - static Request listTasks(ListTasksRequest listTaskRequest) { - if (listTaskRequest.getTargetTaskId() != null && listTaskRequest.getTargetTaskId().isSet()) { - throw new IllegalArgumentException("TargetTaskId cannot be used for list tasks request"); - } - Request request = new Request(HttpGet.METHOD_NAME, "/_tasks"); - RequestConverters.Params params = new RequestConverters.Params(); - params.withTimeout(listTaskRequest.getTimeout()) - .withDetailed(listTaskRequest.getDetailed()) - .withWaitForCompletion(listTaskRequest.getWaitForCompletion()) - .withParentTaskId(listTaskRequest.getTargetParentTaskId()) - .withNodes(listTaskRequest.getNodes()) - .withActions(listTaskRequest.getActions()) - .putParam("group_by", "none"); - request.addParameters(params.asMap()); - return request; - } - - static Request getTask(GetTaskRequest getTaskRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_tasks") - .addPathPartAsIs(getTaskRequest.getNodeId() + ":" + Long.toString(getTaskRequest.getTaskId())) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - params.withTimeout(getTaskRequest.getTimeout()).withWaitForCompletion(getTaskRequest.getWaitForCompletion()); - request.addParameters(params.asMap()); - return request; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/CancelTasksRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/CancelTasksRequest.java deleted file mode 100644 index c80bc6d55ccae..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/CancelTasksRequest.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.core.TimeValue; - -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -public class CancelTasksRequest implements Validatable { - - private final List nodes = new ArrayList<>(); - private final List actions = new ArrayList<>(); - private Optional timeout = Optional.empty(); - private Optional parentTaskId = Optional.empty(); - private Optional taskId = Optional.empty(); - private Boolean waitForCompletion; - - CancelTasksRequest() {} - - void setNodes(List nodes) { - this.nodes.addAll(nodes); - } - - public List getNodes() { - return nodes; - } - - void setTimeout(TimeValue timeout) { - this.timeout = Optional.of(timeout); - } - - public Optional getTimeout() { - return timeout; - } - - void setActions(List actions) { - this.actions.addAll(actions); - } - - public List getActions() { - return actions; - } - - void setParentTaskId(TaskId parentTaskId) { - this.parentTaskId = Optional.of(parentTaskId); - } - - public Optional getParentTaskId() { - return parentTaskId; - } - - void setTaskId(TaskId taskId) { - this.taskId = Optional.of(taskId); - } - - public Optional getTaskId() { - return taskId; - } - - public Boolean getWaitForCompletion() { - return waitForCompletion; - } - - public void setWaitForCompletion(boolean waitForCompletion) { - this.waitForCompletion = waitForCompletion; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof CancelTasksRequest) == false) return false; - CancelTasksRequest that = (CancelTasksRequest) o; - return Objects.equals(getNodes(), that.getNodes()) - && Objects.equals(getActions(), that.getActions()) - && Objects.equals(getTimeout(), that.getTimeout()) - && Objects.equals(getParentTaskId(), that.getParentTaskId()) - && Objects.equals(getTaskId(), that.getTaskId()) - && Objects.equals(waitForCompletion, that.waitForCompletion); - } - - @Override - public int hashCode() { - return Objects.hash(getNodes(), getActions(), getTimeout(), getParentTaskId(), getTaskId(), waitForCompletion); - } - - @Override - public String toString() { - return "CancelTasksRequest{" - + "nodes=" - + nodes - + ", actions=" - + actions - + ", timeout=" - + timeout - + ", parentTaskId=" - + parentTaskId - + ", taskId=" - + taskId - + ", waitForCompletion=" - + waitForCompletion - + '}'; - } - - @SuppressWarnings("HiddenField") - public static class Builder { - private Optional timeout = Optional.empty(); - private Optional taskId = Optional.empty(); - private Optional parentTaskId = Optional.empty(); - private List actionsFilter = new ArrayList<>(); - private List nodesFilter = new ArrayList<>(); - private Boolean waitForCompletion; - - public Builder withTimeout(TimeValue timeout) { - this.timeout = Optional.of(timeout); - return this; - } - - public Builder withTaskId(TaskId taskId) { - this.taskId = Optional.of(taskId); - return this; - } - - public Builder withParentTaskId(TaskId taskId) { - this.parentTaskId = Optional.of(taskId); - return this; - } - - public Builder withActionsFiltered(List actions) { - this.actionsFilter.clear(); - this.actionsFilter.addAll(actions); - return this; - } - - public Builder withNodesFiltered(List nodes) { - this.nodesFilter.clear(); - this.nodesFilter.addAll(nodes); - return this; - } - - public Builder withWaitForCompletion(boolean waitForCompletion) { - this.waitForCompletion = waitForCompletion; - return this; - } - - public CancelTasksRequest build() { - CancelTasksRequest request = new CancelTasksRequest(); - timeout.ifPresent(request::setTimeout); - taskId.ifPresent(request::setTaskId); - parentTaskId.ifPresent(request::setParentTaskId); - request.setNodes(nodesFilter); - request.setActions(actionsFilter); - if (waitForCompletion != null) { - request.setWaitForCompletion(waitForCompletion); - } - return request; - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/CancelTasksResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/CancelTasksResponse.java deleted file mode 100644 index 9023a53fcfe25..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/CancelTasksResponse.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * cancel tasks response that contains - * - task failures - * - node failures - * - tasks - */ -public class CancelTasksResponse extends ListTasksResponse { - - CancelTasksResponse(List nodesInfoData, List taskFailures, List nodeFailures) { - super(nodesInfoData, taskFailures, nodeFailures); - } - - public static CancelTasksResponse fromXContent(final XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private static ConstructingObjectParser PARSER; - - static { - ConstructingObjectParser parser = new ConstructingObjectParser<>( - "cancel_tasks_response", - true, - constructingObjects -> { - int i = 0; - @SuppressWarnings("unchecked") - List tasksFailures = (List) constructingObjects[i++]; - @SuppressWarnings("unchecked") - List nodeFailures = (List) constructingObjects[i++]; - @SuppressWarnings("unchecked") - List nodesInfoData = (List) constructingObjects[i]; - return new CancelTasksResponse(nodesInfoData, tasksFailures, nodeFailures); - } - ); - - parser.declareObjectArray( - optionalConstructorArg(), - (p, c) -> TaskOperationFailure.fromXContent(p), - new ParseField("task_failures") - ); - parser.declareObjectArray( - optionalConstructorArg(), - (p, c) -> ElasticsearchException.fromXContent(p), - new ParseField("node_failures") - ); - parser.declareNamedObjects(optionalConstructorArg(), NodeData.PARSER, new ParseField("nodes")); - PARSER = parser; - } - - @Override - public boolean equals(Object o) { - return super.equals(o); - } - - @Override - public int hashCode() { - return super.hashCode(); - } - - @Override - public String toString() { - return "CancelTasksResponse{" - + "taskFailures=" - + taskFailures - + ", nodeFailures=" - + nodeFailures - + ", nodesInfoData=" - + nodesInfoData - + ", tasks=" - + tasks - + ", taskGroups=" - + taskGroups - + '}'; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/ElasticsearchException.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/ElasticsearchException.java deleted file mode 100644 index 1fc7811ac0b9b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/ElasticsearchException.java +++ /dev/null @@ -1,221 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - -/** - * client side counterpart of server side - * {@link org.elasticsearch.ElasticsearchException} - * It wraps the same content but it is not throwable. - */ -public class ElasticsearchException { - - private static final String TYPE = "type"; - private static final String REASON = "reason"; - private static final String CAUSED_BY = "caused_by"; - private static final ParseField SUPPRESSED = new ParseField("suppressed"); - private static final String STACK_TRACE = "stack_trace"; - private static final String HEADER = "header"; - private static final String ROOT_CAUSE = "root_cause"; - - private String msg; - private ElasticsearchException cause; - private final Map> headers = new HashMap<>(); - private final List suppressed = new ArrayList<>(); - - ElasticsearchException(String msg) { - this.msg = msg; - this.cause = null; - } - - ElasticsearchException(String msg, ElasticsearchException cause) { - this.msg = msg; - this.cause = cause; - } - - public String getMsg() { - return msg; - } - - public ElasticsearchException getCause() { - return cause; - } - - public List getSuppressed() { - return suppressed; - } - - void addSuppressed(List suppressedExceptions) { - this.suppressed.addAll(suppressedExceptions); - } - - /** - * Generate a {@link ElasticsearchException} from a {@link XContentParser}. This does not - * return the original exception type (ie NodeClosedException for example) but just wraps - * the type, the reason and the cause of the exception. It also recursively parses the - * tree structure of the cause, returning it as a tree structure of {@link ElasticsearchException} - * instances. - */ - static ElasticsearchException fromXContent(XContentParser parser) throws IOException { - XContentParser.Token token = parser.nextToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - return innerFromXContent(parser, false); - } - - private static ElasticsearchException innerFromXContent(XContentParser parser, boolean parseRootCauses) throws IOException { - XContentParser.Token token = parser.currentToken(); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser); - - String type = null, reason = null, stack = null; - ElasticsearchException cause = null; - Map> headers = new HashMap<>(); - List rootCauses = new ArrayList<>(); - List suppressed = new ArrayList<>(); - - for (; token == XContentParser.Token.FIELD_NAME; token = parser.nextToken()) { - String currentFieldName = parser.currentName(); - token = parser.nextToken(); - - if (token.isValue()) { - if (TYPE.equals(currentFieldName)) { - type = parser.text(); - } else if (REASON.equals(currentFieldName)) { - reason = parser.text(); - } else if (STACK_TRACE.equals(currentFieldName)) { - stack = parser.text(); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if (CAUSED_BY.equals(currentFieldName)) { - cause = fromXContent(parser); - } else if (HEADER.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else { - List values = headers.getOrDefault(currentFieldName, new ArrayList<>()); - if (token == XContentParser.Token.VALUE_STRING) { - values.add(parser.text()); - } else if (token == XContentParser.Token.START_ARRAY) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.VALUE_STRING) { - values.add(parser.text()); - } else { - parser.skipChildren(); - } - } - } else if (token == XContentParser.Token.START_OBJECT) { - parser.skipChildren(); - } - headers.put(currentFieldName, values); - } - } - } else { - // Any additional metadata object added by the metadataToXContent method is ignored - // and skipped, so that the parser does not fail on unknown fields. The parser only - // support metadata key-pairs and metadata arrays of values. - parser.skipChildren(); - } - } else if (token == XContentParser.Token.START_ARRAY) { - if (parseRootCauses && ROOT_CAUSE.equals(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - rootCauses.add(fromXContent(parser)); - } - } else if (SUPPRESSED.match(currentFieldName, parser.getDeprecationHandler())) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - suppressed.add(fromXContent(parser)); - } - } else { - // Parse the array and add each item to the corresponding list of metadata. - // Arrays of objects are not supported yet and just ignored and skipped. - List values = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.VALUE_STRING) { - values.add(parser.text()); - } else { - parser.skipChildren(); - } - } - } - } - } - - ElasticsearchException e = new ElasticsearchException(buildMessage(type, reason, stack), cause); - for (Map.Entry> header : headers.entrySet()) { - e.addHeader(header.getKey(), header.getValue()); - } - - // Adds root causes as suppressed exception. This way they are not lost - // after parsing and can be retrieved using getSuppressed() method. - e.suppressed.addAll(rootCauses); - e.suppressed.addAll(suppressed); - - return e; - } - - void addHeader(String key, List value) { - headers.put(key, value); - - } - - public Map> getHeaders() { - return headers; - } - - static String buildMessage(String type, String reason, String stack) { - StringBuilder message = new StringBuilder("Elasticsearch exception ["); - message.append(TYPE).append('=').append(type).append(", "); - message.append(REASON).append('=').append(reason); - if (stack != null) { - message.append(", ").append(STACK_TRACE).append('=').append(stack); - } - message.append(']'); - return message.toString(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof ElasticsearchException) == false) return false; - ElasticsearchException that = (ElasticsearchException) o; - return Objects.equals(getMsg(), that.getMsg()) - && Objects.equals(getCause(), that.getCause()) - && Objects.equals(getHeaders(), that.getHeaders()) - && Objects.equals(getSuppressed(), that.getSuppressed()); - } - - @Override - public int hashCode() { - return Objects.hash(getMsg(), getCause(), getHeaders(), getSuppressed()); - } - - @Override - public String toString() { - return "ElasticsearchException{" - + "msg='" - + msg - + '\'' - + ", cause=" - + cause - + ", headers=" - + headers - + ", suppressed=" - + suppressed - + '}'; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/GetTaskRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/GetTaskRequest.java deleted file mode 100644 index 070bec0718511..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/GetTaskRequest.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.core.TimeValue; - -import java.util.Objects; -import java.util.Optional; - -public class GetTaskRequest implements Validatable { - private final String nodeId; - private final long taskId; - private boolean waitForCompletion = false; - private TimeValue timeout = null; - - public GetTaskRequest(String nodeId, long taskId) { - this.nodeId = nodeId; - this.taskId = taskId; - } - - public String getNodeId() { - return nodeId; - } - - public long getTaskId() { - return taskId; - } - - /** - * Should this request wait for all found tasks to complete? - */ - public boolean getWaitForCompletion() { - return waitForCompletion; - } - - /** - * Should this request wait for all found tasks to complete? - */ - public GetTaskRequest setWaitForCompletion(boolean waitForCompletion) { - this.waitForCompletion = waitForCompletion; - return this; - } - - /** - * Timeout to wait for any async actions this request must take. It must take anywhere from 0 to 2. - */ - public TimeValue getTimeout() { - return timeout; - } - - /** - * Timeout to wait for any async actions this request must take. - */ - public GetTaskRequest setTimeout(TimeValue timeout) { - this.timeout = timeout; - return this; - } - - @Override - public Optional validate() { - final ValidationException validationException = new ValidationException(); - if (timeout != null && waitForCompletion == false) { - validationException.addValidationError("Timeout settings are only accepted if waitForCompletion is also set"); - } - if (validationException.validationErrors().isEmpty()) { - return Optional.empty(); - } - return Optional.of(validationException); - } - - @Override - public int hashCode() { - return Objects.hash(nodeId, taskId, waitForCompletion, timeout); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetTaskRequest other = (GetTaskRequest) obj; - return Objects.equals(nodeId, other.nodeId) - && taskId == other.taskId - && waitForCompletion == other.waitForCompletion - && Objects.equals(timeout, other.timeout); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/GetTaskResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/GetTaskResponse.java deleted file mode 100644 index 64159b199488d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/GetTaskResponse.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.tasks.TaskInfo; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class GetTaskResponse { - private final boolean completed; - private final TaskInfo taskInfo; - public static final ParseField COMPLETED = new ParseField("completed"); - public static final ParseField TASK = new ParseField("task"); - - public GetTaskResponse(boolean completed, TaskInfo taskInfo) { - this.completed = completed; - this.taskInfo = taskInfo; - } - - public boolean isCompleted() { - return completed; - } - - public TaskInfo getTaskInfo() { - return taskInfo; - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_task", - true, - a -> new GetTaskResponse((boolean) a[0], (TaskInfo) a[1]) - ); - static { - PARSER.declareBoolean(constructorArg(), COMPLETED); - PARSER.declareObject(constructorArg(), (p, c) -> TaskInfo.fromXContent(p), TASK); - } - - public static GetTaskResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/ListTasksResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/ListTasksResponse.java deleted file mode 100644 index 151549097a989..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/ListTasksResponse.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -import static java.util.stream.Collectors.groupingBy; -import static java.util.stream.Collectors.toList; - -public class ListTasksResponse { - - protected final List taskFailures = new ArrayList<>(); - protected final List nodeFailures = new ArrayList<>(); - protected final List nodesInfoData = new ArrayList<>(); - protected final List tasks = new ArrayList<>(); - protected final List taskGroups = new ArrayList<>(); - - ListTasksResponse(List nodesInfoData, List taskFailures, List nodeFailures) { - if (taskFailures != null) { - this.taskFailures.addAll(taskFailures); - } - if (nodeFailures != null) { - this.nodeFailures.addAll(nodeFailures); - } - if (nodesInfoData != null) { - this.nodesInfoData.addAll(nodesInfoData); - } - this.tasks.addAll(this.nodesInfoData.stream().flatMap(nodeData -> nodeData.getTasks().stream()).collect(toList())); - this.taskGroups.addAll(buildTaskGroups()); - } - - private List buildTaskGroups() { - Map taskIdToBuilderMap = new HashMap<>(); - List topLevelTasks = new ArrayList<>(); - // First populate all tasks - for (TaskInfo taskInfo : this.tasks) { - taskIdToBuilderMap.put(taskInfo.getTaskId(), TaskGroup.builder(taskInfo)); - } - - // Now go through all task group builders and add children to their parents - for (TaskGroup.Builder taskGroup : taskIdToBuilderMap.values()) { - TaskId parentTaskId = taskGroup.getTaskInfo().getParentTaskId(); - if (parentTaskId != null) { - TaskGroup.Builder parentTask = taskIdToBuilderMap.get(parentTaskId); - if (parentTask != null) { - // we found parent in the list of tasks - add it to the parent list - parentTask.addGroup(taskGroup); - } else { - // we got zombie or the parent was filtered out - add it to the top task list - topLevelTasks.add(taskGroup); - } - } else { - // top level task - add it to the top task list - topLevelTasks.add(taskGroup); - } - } - return topLevelTasks.stream().map(TaskGroup.Builder::build).collect(Collectors.toUnmodifiableList()); - } - - public List getTasks() { - return tasks; - } - - public Map> getPerNodeTasks() { - return getTasks().stream().collect(groupingBy(TaskInfo::getNodeId)); - } - - public List getTaskFailures() { - return taskFailures; - } - - public List getNodeFailures() { - return nodeFailures; - } - - public List getTaskGroups() { - return taskGroups; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof ListTasksResponse) == false) return false; - ListTasksResponse response = (ListTasksResponse) o; - return nodesInfoData.equals(response.nodesInfoData) - && Objects.equals(getTaskFailures(), response.getTaskFailures()) - && Objects.equals(getNodeFailures(), response.getNodeFailures()) - && Objects.equals(getTasks(), response.getTasks()) - && Objects.equals(getTaskGroups(), response.getTaskGroups()); - } - - @Override - public int hashCode() { - return Objects.hash(nodesInfoData, getTaskFailures(), getNodeFailures(), getTasks(), getTaskGroups()); - } - - @Override - public String toString() { - return "CancelTasksResponse{" - + "nodesInfoData=" - + nodesInfoData - + ", taskFailures=" - + taskFailures - + ", nodeFailures=" - + nodeFailures - + ", tasks=" - + tasks - + ", taskGroups=" - + taskGroups - + '}'; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/NodeData.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/NodeData.java deleted file mode 100644 index 1e33b7ede7e48..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/NodeData.java +++ /dev/null @@ -1,159 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -class NodeData { - - private String nodeId; - private String name; - private String transportAddress; - private String host; - private String ip; - private final List roles = new ArrayList<>(); - private final Map attributes = new HashMap<>(); - private final List tasks = new ArrayList<>(); - - NodeData(String nodeId) { - this.nodeId = nodeId; - } - - void setName(String name) { - this.name = name; - } - - public void setAttributes(Map attributes) { - if (attributes != null) { - this.attributes.putAll(attributes); - } - } - - void setTransportAddress(String transportAddress) { - this.transportAddress = transportAddress; - } - - void setHost(String host) { - this.host = host; - } - - void setIp(String ip) { - this.ip = ip; - } - - void setRoles(List roles) { - if (roles != null) { - this.roles.addAll(roles); - } - } - - public String getNodeId() { - return nodeId; - } - - public String getName() { - return name; - } - - public String getTransportAddress() { - return transportAddress; - } - - public String getHost() { - return host; - } - - public String getIp() { - return ip; - } - - public List getRoles() { - return roles; - } - - public Map getAttributes() { - return attributes; - } - - public List getTasks() { - return tasks; - } - - void setTasks(List tasks) { - if (tasks != null) { - this.tasks.addAll(tasks); - } - } - - @Override - public String toString() { - return "NodeData{" - + "nodeId='" - + nodeId - + '\'' - + ", name='" - + name - + '\'' - + ", transportAddress='" - + transportAddress - + '\'' - + ", host='" - + host - + '\'' - + ", ip='" - + ip - + '\'' - + ", roles=" - + roles - + ", attributes=" - + attributes - + '}'; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof NodeData) == false) return false; - NodeData nodeData = (NodeData) o; - return Objects.equals(getNodeId(), nodeData.getNodeId()) - && Objects.equals(getName(), nodeData.getName()) - && Objects.equals(getTransportAddress(), nodeData.getTransportAddress()) - && Objects.equals(getHost(), nodeData.getHost()) - && Objects.equals(getIp(), nodeData.getIp()) - && Objects.equals(getRoles(), nodeData.getRoles()) - && Objects.equals(getAttributes(), nodeData.getAttributes()) - && Objects.equals(getTasks(), nodeData.getTasks()); - } - - @Override - public int hashCode() { - return Objects.hash(getNodeId(), getName(), getTransportAddress(), getHost(), getIp(), getRoles(), getAttributes(), getTasks()); - } - - public static final ObjectParser.NamedObjectParser PARSER; - - static { - ObjectParser parser = new ObjectParser<>("nodes"); - parser.declareString(NodeData::setName, new ParseField("name")); - parser.declareString(NodeData::setTransportAddress, new ParseField("transport_address")); - parser.declareString(NodeData::setHost, new ParseField("host")); - parser.declareString(NodeData::setIp, new ParseField("ip")); - parser.declareStringArray(NodeData::setRoles, new ParseField("roles")); - parser.declareField(NodeData::setAttributes, (p, c) -> p.mapStrings(), new ParseField("attributes"), ObjectParser.ValueType.OBJECT); - parser.declareNamedObjects(NodeData::setTasks, TaskInfo.PARSER, new ParseField("tasks")); - PARSER = (XContentParser p, Void v, String nodeId) -> parser.parse(p, new NodeData(nodeId), null); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskGroup.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskGroup.java deleted file mode 100644 index a8c0e299485fc..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskGroup.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import java.util.ArrayList; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -/** - * Client side counterpart of server side version. - * - * {@link org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup} - */ -public class TaskGroup { - - private final TaskInfo task; - - @Override - public String toString() { - return "TaskGroup{" + "task=" + task + ", childTasks=" + childTasks + '}'; - } - - private final List childTasks = new ArrayList<>(); - - public TaskGroup(TaskInfo task, List childTasks) { - this.task = task; - this.childTasks.addAll(childTasks); - } - - public static TaskGroup.Builder builder(TaskInfo taskInfo) { - return new TaskGroup.Builder(taskInfo); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof TaskGroup) == false) return false; - TaskGroup taskGroup = (TaskGroup) o; - return Objects.equals(task, taskGroup.task) && Objects.equals(getChildTasks(), taskGroup.getChildTasks()); - } - - @Override - public int hashCode() { - return Objects.hash(task, getChildTasks()); - } - - public static class Builder { - private TaskInfo taskInfo; - private List childTasks; - - private Builder(TaskInfo taskInfo) { - this.taskInfo = taskInfo; - childTasks = new ArrayList<>(); - } - - public void addGroup(TaskGroup.Builder builder) { - childTasks.add(builder); - } - - public TaskInfo getTaskInfo() { - return taskInfo; - } - - public TaskGroup build() { - return new TaskGroup(taskInfo, childTasks.stream().map(TaskGroup.Builder::build).collect(Collectors.toList())); - } - } - - public TaskInfo getTaskInfo() { - return task; - } - - public List getChildTasks() { - return childTasks; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskInfo.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskInfo.java deleted file mode 100644 index 2c91be19b3db4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskInfo.java +++ /dev/null @@ -1,217 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * client side counterpart of server side - *

    - * {@link org.elasticsearch.tasks.TaskInfo} - */ -public class TaskInfo { - - private TaskId taskId; - private String type; - private String action; - private String description; - private long startTime; - private long runningTimeNanos; - private boolean cancellable; - private boolean cancelled; - private TaskId parentTaskId; - private final Map status = new HashMap<>(); - private final Map headers = new HashMap<>(); - - public TaskInfo(TaskId taskId) { - this.taskId = taskId; - } - - public TaskId getTaskId() { - return taskId; - } - - public String getNodeId() { - return taskId.nodeId; - } - - public String getType() { - return type; - } - - void setType(String type) { - this.type = type; - } - - public String getAction() { - return action; - } - - void setAction(String action) { - this.action = action; - } - - public String getDescription() { - return description; - } - - void setDescription(String description) { - this.description = description; - } - - public long getStartTime() { - return startTime; - } - - void setStartTime(long startTime) { - this.startTime = startTime; - } - - public long getRunningTimeNanos() { - return runningTimeNanos; - } - - void setRunningTimeNanos(long runningTimeNanos) { - this.runningTimeNanos = runningTimeNanos; - } - - public boolean isCancellable() { - return cancellable; - } - - void setCancellable(boolean cancellable) { - this.cancellable = cancellable; - } - - public boolean isCancelled() { - return cancelled; - } - - void setCancelled(boolean cancelled) { - this.cancelled = cancelled; - } - - public TaskId getParentTaskId() { - return parentTaskId; - } - - void setParentTaskId(String parentTaskId) { - this.parentTaskId = new TaskId(parentTaskId); - } - - public Map getHeaders() { - return headers; - } - - void setHeaders(Map headers) { - this.headers.putAll(headers); - } - - void setStatus(Map status) { - this.status.putAll(status); - } - - public Map getStatus() { - return status; - } - - private void noOpParse(Object s) {} - - public static final ObjectParser.NamedObjectParser PARSER; - - static { - ObjectParser parser = new ObjectParser<>("tasks", true, null); - // already provided in constructor: triggering a no-op - parser.declareString(TaskInfo::noOpParse, new ParseField("node")); - // already provided in constructor: triggering a no-op - parser.declareLong(TaskInfo::noOpParse, new ParseField("id")); - parser.declareString(TaskInfo::setType, new ParseField("type")); - parser.declareString(TaskInfo::setAction, new ParseField("action")); - parser.declareObject(TaskInfo::setStatus, (p, c) -> p.map(), new ParseField("status")); - parser.declareString(TaskInfo::setDescription, new ParseField("description")); - parser.declareLong(TaskInfo::setStartTime, new ParseField("start_time_in_millis")); - parser.declareLong(TaskInfo::setRunningTimeNanos, new ParseField("running_time_in_nanos")); - parser.declareBoolean(TaskInfo::setCancellable, new ParseField("cancellable")); - parser.declareBoolean(TaskInfo::setCancelled, new ParseField("cancelled")); - parser.declareString(TaskInfo::setParentTaskId, new ParseField("parent_task_id")); - parser.declareObject(TaskInfo::setHeaders, (p, c) -> p.mapStrings(), new ParseField("headers")); - PARSER = (XContentParser p, Void v, String name) -> parser.parse(p, new TaskInfo(new TaskId(name)), null); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof TaskInfo) == false) return false; - TaskInfo taskInfo = (TaskInfo) o; - return getStartTime() == taskInfo.getStartTime() - && getRunningTimeNanos() == taskInfo.getRunningTimeNanos() - && isCancellable() == taskInfo.isCancellable() - && isCancelled() == taskInfo.isCancelled() - && Objects.equals(getTaskId(), taskInfo.getTaskId()) - && Objects.equals(getType(), taskInfo.getType()) - && Objects.equals(getAction(), taskInfo.getAction()) - && Objects.equals(getDescription(), taskInfo.getDescription()) - && Objects.equals(getParentTaskId(), taskInfo.getParentTaskId()) - && Objects.equals(status, taskInfo.status) - && Objects.equals(getHeaders(), taskInfo.getHeaders()); - } - - @Override - public int hashCode() { - return Objects.hash( - getTaskId(), - getType(), - getAction(), - getDescription(), - getStartTime(), - getRunningTimeNanos(), - isCancellable(), - isCancelled(), - getParentTaskId(), - status, - getHeaders() - ); - } - - @Override - public String toString() { - return "TaskInfo{" - + "taskId=" - + taskId - + ", type='" - + type - + '\'' - + ", action='" - + action - + '\'' - + ", description='" - + description - + '\'' - + ", startTime=" - + startTime - + ", runningTimeNanos=" - + runningTimeNanos - + ", cancellable=" - + cancellable - + ", cancelled=" - + cancelled - + ", parentTaskId=" - + parentTaskId - + ", status=" - + status - + ", headers=" - + headers - + '}'; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskOperationFailure.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskOperationFailure.java deleted file mode 100644 index 063fb955a8f2b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/tasks/TaskOperationFailure.java +++ /dev/null @@ -1,107 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.tasks; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * client side counterpart of server side - * {@link org.elasticsearch.action.TaskOperationFailure} - */ -public class TaskOperationFailure { - - private final String nodeId; - private final long taskId; - private final ElasticsearchException reason; - private final String status; - - public TaskOperationFailure(String nodeId, long taskId, String status, ElasticsearchException reason) { - this.nodeId = nodeId; - this.taskId = taskId; - this.status = status; - this.reason = reason; - } - - public String getNodeId() { - return nodeId; - } - - public long getTaskId() { - return taskId; - } - - public ElasticsearchException getReason() { - return reason; - } - - public String getStatus() { - return status; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof TaskOperationFailure) == false) return false; - TaskOperationFailure that = (TaskOperationFailure) o; - return getTaskId() == that.getTaskId() - && Objects.equals(getNodeId(), that.getNodeId()) - && Objects.equals(getReason(), that.getReason()) - && Objects.equals(getStatus(), that.getStatus()); - } - - @Override - public int hashCode() { - return Objects.hash(getNodeId(), getTaskId(), getReason(), getStatus()); - } - - @Override - public String toString() { - return "TaskOperationFailure{" - + "nodeId='" - + nodeId - + '\'' - + ", taskId=" - + taskId - + ", reason=" - + reason - + ", status='" - + status - + '\'' - + '}'; - } - - public static TaskOperationFailure fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "task_info", - true, - constructorObjects -> { - int i = 0; - String nodeId = (String) constructorObjects[i++]; - long taskId = (long) constructorObjects[i++]; - String status = (String) constructorObjects[i++]; - ElasticsearchException reason = (ElasticsearchException) constructorObjects[i]; - return new TaskOperationFailure(nodeId, taskId, status, reason); - } - ); - - static { - PARSER.declareString(constructorArg(), new ParseField("node_id")); - PARSER.declareLong(constructorArg(), new ParseField("task_id")); - PARSER.declareString(constructorArg(), new ParseField("status")); - PARSER.declareObject(constructorArg(), (parser, c) -> ElasticsearchException.fromXContent(parser), new ParseField("reason")); - } -} diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java index 4b62cc6e80376..5a6551774e02e 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIntegTestCase.java @@ -7,15 +7,16 @@ package org.elasticsearch.xpack.transform.integration; +import org.apache.http.client.methods.HttpGet; import org.apache.logging.log4j.Level; import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.core.AcknowledgedResponse; @@ -412,13 +413,18 @@ protected Map toLazy(ToXContent parsedObject) throws Exception { } private void waitForPendingTasks() { - ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.setWaitForCompletion(true); - listTasksRequest.setDetailed(true); - listTasksRequest.setTimeout(TimeValue.timeValueSeconds(10)); - try (RestHighLevelClient restClient = new TestRestHighLevelClient()) { - - restClient.tasks().list(listTasksRequest, RequestOptions.DEFAULT); + Request request = new Request(HttpGet.METHOD_NAME, "/_tasks"); + Map parameters = Map.of( + "wait_for_completion", + Boolean.TRUE.toString(), + "detailed", + Boolean.TRUE.toString(), + "timeout", + TimeValue.timeValueSeconds(10).getStringRep() + ); + request.addParameters(parameters); + try { + client().performRequest(request); } catch (Exception e) { throw new AssertionError("Failed to wait for pending tasks to complete", e); } From c99d099f77e1b91614e5c1cf953842a72e96a9f1 Mon Sep 17 00:00:00 2001 From: Dan Hermann Date: Wed, 9 Feb 2022 09:18:53 -0600 Subject: [PATCH 008/167] Remove features client from HLRC (#83651) --- .../elasticsearch/client/FeaturesClient.java | 130 ----------------- .../client/FeaturesRequestConverters.java | 33 ----- .../client/RestHighLevelClient.java | 11 -- .../client/feature/GetFeaturesRequest.java | 16 -- .../client/feature/GetFeaturesResponse.java | 110 -------------- .../client/feature/ResetFeaturesRequest.java | 13 -- .../client/feature/ResetFeaturesResponse.java | 138 ------------------ .../integration/TestFeatureResetIT.java | 5 +- 8 files changed, 2 insertions(+), 454 deletions(-) delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/FeaturesClient.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/FeaturesRequestConverters.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/feature/GetFeaturesRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/feature/GetFeaturesResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/feature/ResetFeaturesRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/feature/ResetFeaturesResponse.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/FeaturesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/FeaturesClient.java deleted file mode 100644 index 3a63162960a7c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/FeaturesClient.java +++ /dev/null @@ -1,130 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.feature.GetFeaturesRequest; -import org.elasticsearch.client.feature.GetFeaturesResponse; -import org.elasticsearch.client.feature.ResetFeaturesRequest; -import org.elasticsearch.client.feature.ResetFeaturesResponse; - -import java.io.IOException; - -import static java.util.Collections.emptySet; - -/** - * A wrapper for the {@link RestHighLevelClient} that provides methods for accessing the Snapshot API. - *

    - * See Snapshot API on elastic.co - * - * @deprecated The High Level Rest Client is deprecated in favor of the - * - * Elasticsearch Java API Client - */ -@Deprecated(since = "7.16.0", forRemoval = true) -@SuppressWarnings("removal") -public class FeaturesClient { - private final RestHighLevelClient restHighLevelClient; - - FeaturesClient(RestHighLevelClient restHighLevelClient) { - this.restHighLevelClient = restHighLevelClient; - } - - /** - * Get a list of features which can be included in a snapshot as feature states. - * See Get Snapshottable - * Features API on elastic.co - * - * @param getFeaturesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public GetFeaturesResponse getFeatures(GetFeaturesRequest getFeaturesRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - getFeaturesRequest, - FeaturesRequestConverters::getFeatures, - options, - GetFeaturesResponse::parse, - emptySet() - ); - } - - /** - * Asynchronously get a list of features which can be included in a snapshot as feature states. - * See Get Snapshottable - * Features API on elastic.co - * - * @param getFeaturesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getFeaturesAsync( - GetFeaturesRequest getFeaturesRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - getFeaturesRequest, - FeaturesRequestConverters::getFeatures, - options, - GetFeaturesResponse::parse, - listener, - emptySet() - ); - } - - /** - * Reset the state of Elasticsearch features, deleting system indices and performing other - * cleanup operations. - * See Rest - * Features API on elastic.co - * - * @param resetFeaturesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public ResetFeaturesResponse resetFeatures(ResetFeaturesRequest resetFeaturesRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - resetFeaturesRequest, - FeaturesRequestConverters::resetFeatures, - options, - ResetFeaturesResponse::parse, - emptySet() - ); - } - - /** - * Asynchronously reset the state of Elasticsearch features, deleting system indices and performing other - * cleanup operations. - * See Get Snapshottable - * Features API on elastic.co - * - * @param resetFeaturesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable resetFeaturesAsync( - ResetFeaturesRequest resetFeaturesRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - resetFeaturesRequest, - FeaturesRequestConverters::resetFeatures, - options, - ResetFeaturesResponse::parse, - listener, - emptySet() - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/FeaturesRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/FeaturesRequestConverters.java deleted file mode 100644 index bb2b8be43cf3b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/FeaturesRequestConverters.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.elasticsearch.client.feature.GetFeaturesRequest; -import org.elasticsearch.client.feature.ResetFeaturesRequest; - -public class FeaturesRequestConverters { - - private FeaturesRequestConverters() {} - - static Request getFeatures(GetFeaturesRequest getFeaturesRequest) { - String endpoint = "/_features"; - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - RequestConverters.Params parameters = new RequestConverters.Params(); - parameters.withMasterTimeout(getFeaturesRequest.masterNodeTimeout()); - request.addParameters(parameters.asMap()); - return request; - } - - static Request resetFeatures(ResetFeaturesRequest resetFeaturesRequest) { - String endpoint = "/_features/_reset"; - return new Request(HttpPost.METHOD_NAME, endpoint); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index aa3202bde6084..695ae9b69aea1 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -285,7 +285,6 @@ public class RestHighLevelClient implements Closeable { private final TransformClient transformClient = new TransformClient(this); private final EqlClient eqlClient = new EqlClient(this); private final SearchableSnapshotsClient searchableSnapshotsClient = new SearchableSnapshotsClient(this); - private final FeaturesClient featuresClient = new FeaturesClient(this); /** * Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the @@ -416,16 +415,6 @@ public SearchableSnapshotsClient searchableSnapshots() { return searchableSnapshotsClient; } - /** - * A wrapper for the {@link RestHighLevelClient} that provides methods for accessing the Searchable Snapshots APIs. - *

    - * See the Searchable Snapshots - * APIs on elastic.co for more information. - */ - public FeaturesClient features() { - return featuresClient; - } - /** * Provides methods for accessing the Elastic Licensed Machine Learning APIs that * are shipped with the Elastic Stack distribution of Elasticsearch. All of diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/GetFeaturesRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/GetFeaturesRequest.java deleted file mode 100644 index 313a118ba4f00..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/GetFeaturesRequest.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.feature; - -import org.elasticsearch.client.TimedRequest; - -/** - * A {@link TimedRequest} to get the list of features available to be included in snapshots in the cluster. - */ -public class GetFeaturesRequest extends TimedRequest {} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/GetFeaturesResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/GetFeaturesResponse.java deleted file mode 100644 index 440444abbbbee..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/GetFeaturesResponse.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.feature; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; -import java.util.Objects; - -public class GetFeaturesResponse { - - private final List features; - - private static final ParseField FEATURES = new ParseField("features"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "snapshottable_features_response", - true, - (a, ctx) -> new GetFeaturesResponse((List) a[0]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), SnapshottableFeature::parse, FEATURES); - } - - public GetFeaturesResponse(List features) { - this.features = features; - } - - public List getFeatures() { - return features; - } - - public static GetFeaturesResponse parse(XContentParser parser) { - return PARSER.apply(parser, null); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof GetFeaturesResponse) == false) return false; - GetFeaturesResponse that = (GetFeaturesResponse) o; - return getFeatures().equals(that.getFeatures()); - } - - @Override - public int hashCode() { - return Objects.hash(getFeatures()); - } - - public static class SnapshottableFeature { - - private final String featureName; - private final String description; - - private static final ParseField FEATURE_NAME = new ParseField("name"); - private static final ParseField DESCRIPTION = new ParseField("description"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "feature", - true, - (a, ctx) -> new SnapshottableFeature((String) a[0], (String) a[1]) - ); - - static { - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.text(), FEATURE_NAME, ObjectParser.ValueType.STRING); - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.text(), DESCRIPTION, ObjectParser.ValueType.STRING); - } - - public SnapshottableFeature(String featureName, String description) { - this.featureName = featureName; - this.description = description; - } - - public static SnapshottableFeature parse(XContentParser parser, Void ctx) { - return PARSER.apply(parser, ctx); - } - - public String getFeatureName() { - return featureName; - } - - public String getDescription() { - return description; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if ((o instanceof SnapshottableFeature) == false) return false; - SnapshottableFeature feature = (SnapshottableFeature) o; - return Objects.equals(getFeatureName(), feature.getFeatureName()); - } - - @Override - public int hashCode() { - return Objects.hash(getFeatureName()); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/ResetFeaturesRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/ResetFeaturesRequest.java deleted file mode 100644 index 5bc2565c24b17..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/ResetFeaturesRequest.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.feature; - -import org.elasticsearch.client.TimedRequest; - -public class ResetFeaturesRequest extends TimedRequest {} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/ResetFeaturesResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/ResetFeaturesResponse.java deleted file mode 100644 index c3fca66724138..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/feature/ResetFeaturesResponse.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.feature; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; -import java.util.Objects; - -/** - * This class represents the response of the Feature State Reset API. It is a - * list containing the response of every feature whose state can be reset. The - * response from each feature will indicate success or failure. In the case of a - * failure, the cause will be returned as well. - */ -public class ResetFeaturesResponse { - private final List features; - - private static final ParseField FEATURES = new ParseField("features"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "features_reset_status_response", - true, - (a, ctx) -> new ResetFeaturesResponse((List) a[0]) - ); - - static { - PARSER.declareObjectArray( - ConstructingObjectParser.constructorArg(), - ResetFeaturesResponse.ResetFeatureStateStatus::parse, - FEATURES - ); - } - - /** - * Create a new ResetFeaturesResponse - * @param features A full list of status responses from individual feature reset operations. - */ - public ResetFeaturesResponse(List features) { - this.features = features; - } - - /** - * @return List containing a reset status for each feature that we have tried to reset. - */ - public List getFeatureResetStatuses() { - return features; - } - - public static ResetFeaturesResponse parse(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** - * A class representing the status of an attempt to reset a feature's state. - * The attempt to reset either succeeds and we return the name of the - * feature and a success flag; or it fails and we return the name of the feature, - * a status flag, and the exception thrown during the attempt to reset the feature. - */ - public static class ResetFeatureStateStatus { - private final String featureName; - private final String status; - private final Exception exception; - - private static final ParseField FEATURE_NAME = new ParseField("feature_name"); - private static final ParseField STATUS = new ParseField("status"); - private static final ParseField EXCEPTION = new ParseField("exception"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "feature_state_reset_stats", - true, - (a, ctx) -> new ResetFeatureStateStatus((String) a[0], (String) a[1], (ElasticsearchException) a[2]) - ); - - static { - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.text(), FEATURE_NAME, ObjectParser.ValueType.STRING); - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.text(), STATUS, ObjectParser.ValueType.STRING); - PARSER.declareObject( - ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> ElasticsearchException.fromXContent(p), - EXCEPTION - ); - } - - /** - * Create a ResetFeatureStateStatus. - * @param featureName Name of the feature whose status has been reset. - * @param status Whether the reset attempt succeeded or failed. - * @param exception If the reset attempt failed, the exception that caused the - * failure. Must be null when status is "SUCCESS". - */ - ResetFeatureStateStatus(String featureName, String status, @Nullable Exception exception) { - this.featureName = featureName; - assert "SUCCESS".equals(status) || "FAILURE".equals(status); - this.status = status; - assert "FAILURE".equals(status) ? Objects.nonNull(exception) : Objects.isNull(exception); - this.exception = exception; - } - - public static ResetFeatureStateStatus parse(XContentParser parser, Void ctx) { - return PARSER.apply(parser, ctx); - } - - /** - * @return Name of the feature that we tried to reset - */ - public String getFeatureName() { - return featureName; - } - - /** - * @return "SUCCESS" if the reset attempt succeeded, "FAILURE" otherwise. - */ - public String getStatus() { - return status; - } - - /** - * @return The exception that caused the reset attempt to fail. - */ - @Nullable - public Exception getException() { - return exception; - } - } -} diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TestFeatureResetIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TestFeatureResetIT.java index 3a5ea944761d2..31dc881dfd1f9 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TestFeatureResetIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TestFeatureResetIT.java @@ -6,10 +6,10 @@ */ package org.elasticsearch.xpack.transform.integration; +import org.apache.http.client.methods.HttpPost; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; -import org.elasticsearch.client.feature.ResetFeaturesRequest; import org.elasticsearch.client.transform.transforms.TimeSyncConfig; import org.elasticsearch.client.transform.transforms.TransformConfig; import org.elasticsearch.client.transform.transforms.pivot.SingleGroupSource; @@ -86,8 +86,7 @@ public void testTransformFeatureReset() throws Exception { assertTrue(putTransform(config, RequestOptions.DEFAULT).isAcknowledged()); assertTrue(startTransform(config.getId(), RequestOptions.DEFAULT).isAcknowledged()); - TestRestHighLevelClient highLevelClient = new TestRestHighLevelClient(); - highLevelClient.features().resetFeatures(new ResetFeaturesRequest(), RequestOptions.DEFAULT); + client().performRequest(new Request(HttpPost.METHOD_NAME, "/_features/_reset")); Response response = adminClient().performRequest(new Request("GET", "/_cluster/state?metric=metadata")); Map metadata = (Map) ESRestTestCase.entityAsMap(response).get("metadata"); From aa3bc72dd9772d10ac8e4283db5ed12b3e76d751 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Wed, 9 Feb 2022 07:39:40 -0800 Subject: [PATCH 009/167] Remove dead classes from Painless (#83697) Test coverage revealed two dead classes in Painless. This change deletes them. --- .../org/elasticsearch/painless/Constant.java | 37 ---------------- .../org/elasticsearch/painless/Globals.java | 43 ------------------- 2 files changed, 80 deletions(-) delete mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/Constant.java delete mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Constant.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Constant.java deleted file mode 100644 index eabef10ed5374..0000000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Constant.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.painless; - -import java.util.function.Consumer; - -/** - * A constant initializer to be added to the class file. - */ -public class Constant { - public final Location location; - public final String name; - public final org.objectweb.asm.Type type; - public final Consumer initializer; - - /** - * Create a new constant. - * - * @param location the location in the script that is creating it - * @param type the type of the constant - * @param name the name of the constant - * @param initializer code to initialize the constant. It will be called when generating the clinit method and is expected to leave the - * value of the constant on the stack. Generating the load instruction is managed by the caller. - */ - public Constant(Location location, org.objectweb.asm.Type type, String name, Consumer initializer) { - this.location = location; - this.name = name; - this.type = type; - this.initializer = initializer; - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java deleted file mode 100644 index 043940011b55e..0000000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Globals.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.painless; - -import java.util.BitSet; -import java.util.HashMap; -import java.util.Map; - -/** - * Program-wide globals (initializers, synthetic methods, etc) - */ -public class Globals { - private final Map constantInitializers = new HashMap<>(); - private final BitSet statements; - - /** Create a new Globals from the set of statement boundaries */ - public Globals(BitSet statements) { - this.statements = statements; - } - - /** Adds a new constant initializer to be written */ - public void addConstantInitializer(Constant constant) { - if (constantInitializers.put(constant.name, constant) != null) { - throw new IllegalStateException("constant initializer: " + constant.name + " already exists"); - } - } - - /** Returns the current initializers */ - public Map getConstantInitializers() { - return constantInitializers; - } - - /** Returns the set of statement boundaries */ - public BitSet getStatements() { - return statements; - } -} From 09490516014d2eee046f6afaf29a582db4a1e763 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Wed, 9 Feb 2022 11:07:47 -0500 Subject: [PATCH 010/167] [DOCS] Add 8.0.0 GA release notes (#83689) (#83731) Adds the 8.0.0 GA release notes. I created these by combining the 8.0.0-alpha1, -alpha2, -beta1, -rc1 and -rc2 releases notes. Co-authored-by: Lisa Cawley (cherry picked from commit f9569e23ddebc3dbf49f3ae91fa6e49b9145c729) --- docs/reference/release-notes.asciidoc | 2 + .../release-notes/8.0.0-rc2.asciidoc | 3 + docs/reference/release-notes/8.0.0.asciidoc | 713 ++++++++++++++++++ 3 files changed, 718 insertions(+) create mode 100644 docs/reference/release-notes/8.0.0.asciidoc diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index fab77ab7e58b9..fce2f81b6390c 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -6,6 +6,7 @@ This section summarizes the changes in each release. +* <> * <> * <> * <> @@ -14,6 +15,7 @@ This section summarizes the changes in each release. -- +include::release-notes/8.0.0.asciidoc[] include::release-notes/8.0.0-rc2.asciidoc[] include::release-notes/8.0.0-rc1.asciidoc[] include::release-notes/8.0.0-beta1.asciidoc[] diff --git a/docs/reference/release-notes/8.0.0-rc2.asciidoc b/docs/reference/release-notes/8.0.0-rc2.asciidoc index 2c9e5c299ce47..1e57b09fda5d2 100644 --- a/docs/reference/release-notes/8.0.0-rc2.asciidoc +++ b/docs/reference/release-notes/8.0.0-rc2.asciidoc @@ -58,6 +58,9 @@ Machine Learning:: Network:: * Improve slow inbound handling to include response type {es-pull}80425[#80425] +Packaging:: +* Convert repository plugins to modules {es-pull}81870[#81870] (issue: {es-issue}81652[#81652]) + Search:: * Check nested fields earlier in kNN search {es-pull}80516[#80516] (issue: {es-issue}78473[#78473]) diff --git a/docs/reference/release-notes/8.0.0.asciidoc b/docs/reference/release-notes/8.0.0.asciidoc new file mode 100644 index 0000000000000..3eef72aa0e44e --- /dev/null +++ b/docs/reference/release-notes/8.0.0.asciidoc @@ -0,0 +1,713 @@ +[[release-notes-8.0.0]] +== {es} version 8.0.0 + +coming::[8.0.0] + +The following list are changes in 8.0.0 as compared to 7.17.0, and combines +release notes from the 8.0.0-alpha1, -alpha2, -beta1, -rc1 and -rc2 releases. + +Also see <>. + +[[known-issues-8.0.0]] +[float] +=== Known issues + +* If you installed {es} from an archive on an aarch64 platform like Linux ARM or macOS M1, the +`elastic` user password and {kib} enrollment token are not generated +automatically when starting your node for the first time. ++ +-- +After the node starts, generate the `elastic` password with the +<> tool: + +[source,bash] +---- +bin/elasticsearch-reset-password -u elastic +---- + +Then, create an enrollment token for {kib} with the +<> tool: + +[source,bash] +---- +bin/elasticsearch-create-enrollment-token -s kibana +---- +-- + + +[[breaking-8.0.0]] +[float] +=== Breaking changes + +Aggregations:: +* Percentiles aggregation: disallow specifying same percentile values twice {es-pull}52257[#52257] (issue: {es-issue}51871[#51871]) +* Remove adjacency matrix setting {es-pull}46327[#46327] (issues: {es-issue}46257[#46257], {es-issue}46324[#46324]) +* Remove `MovingAverage` pipeline aggregation {es-pull}39328[#39328] +* Remove deprecated `_time` and `_term` sort orders {es-pull}39450[#39450] +* Remove deprecated date histo interval {es-pull}75000[#75000] + +Allocation:: +* Require single data nodes to respect disk watermarks {es-pull}73737[#73737] (issues: {es-issue}55805[#55805], {es-issue}73733[#73733]) +* Remove `include_relocations` setting {es-pull}47717[#47717] (issues: {es-issue}46079[#46079], {es-issue}47443[#47443]) + +Analysis:: +* Cleanup versioned deprecations in analysis {es-pull}41560[#41560] (issue: {es-issue}41164[#41164]) +* Remove preconfigured `delimited_payload_filter` {es-pull}43686[#43686] (issues: {es-issue}41560[#41560], {es-issue}43684[#43684]) + +Authentication:: +* Always add file and native realms unless explicitly disabled {es-pull}69096[#69096] (issue: {es-issue}50892[#50892]) +* Do not set a NameID format in Policy by default {es-pull}44090[#44090] (issue: {es-issue}40353[#40353]) +* Make order setting mandatory for Realm config {es-pull}51195[#51195] (issue: {es-issue}37614[#37614]) + +CCR:: +* Avoid auto following leader system indices in CCR {es-pull}72815[#72815] (issue: {es-issue}67686[#67686]) + +Cluster Coordination:: +* Remove join timeout {es-pull}60873[#60873] (issue: {es-issue}60872[#60872]) +* Remove node filters for voting config exclusions {es-pull}55673[#55673] (issues: {es-issue}47990[#47990], {es-issue}50836[#50836]) +* Remove support for delaying state recovery pending master {es-pull}53845[#53845] (issue: {es-issue}51806[#51806]) + +Distributed:: +* Remove synced flush {es-pull}50882[#50882] (issues: {es-issue}50776[#50776], {es-issue}50835[#50835]) +* Remove the `cluster.remote.connect` setting {es-pull}54175[#54175] (issue: {es-issue}53924[#53924]) + +Engine:: +* Force merge should reject requests with `only_expunge_deletes` and `max_num_segments` set {es-pull}44761[#44761] (issue: {es-issue}43102[#43102]) +* Remove per-type indexing stats {es-pull}47203[#47203] (issue: {es-issue}41059[#41059]) +* Remove translog retention settings {es-pull}51697[#51697] (issue: {es-issue}50775[#50775]) + +Features/CAT APIs:: +* Remove the deprecated `local` parameter for `_cat/indices` {es-pull}64868[#64868] (issue: {es-issue}62198[#62198]) +* Remove the deprecated `local` parameter for `_cat/shards` {es-pull}64867[#64867] (issue: {es-issue}62197[#62197]) + +Features/Features:: +* Remove deprecated `._tier` allocation filtering settings {es-pull}73074[#73074] (issue: {es-issue}72835[#72835]) + +Features/ILM+SLM:: +* Add lower bound on `poll_interval` {es-pull}39593[#39593] (issue: {es-issue}39163[#39163]) +* Make the ILM `freeze` action a no-op {es-pull}77158[#77158] (issue: {es-issue}70192[#70192]) +* Always enforce default tier preference {es-pull}79751[#79751] (issue: {es-issue}76147[#76147]) +* Validate that snapshot repository exists for ILM policies at creation/update time {es-pull}78468[#78468] (issues: {es-issue}72957[#72957], {es-issue}77657[#77657]) +* Default `cluster.routing.allocation.enforce_default_tier_preference` to `true` {es-pull}79275[#79275] (issues: {es-issue}76147[#76147], {es-issue}79210[#79210]) + +Features/Indices APIs:: +* Change `prefer_v2_templates` parameter to default to true {es-pull}55489[#55489] (issues: {es-issue}53101[#53101], {es-issue}55411[#55411]) +* Remove deprecated `_upgrade` API {es-pull}64732[#64732] (issue: {es-issue}21337[#21337]) +* Remove local parameter for get field mapping request {es-pull}55100[#55100] (issue: {es-issue}55099[#55099]) +* Remove `include_type_name` parameter from REST layer {es-pull}48632[#48632] (issue: {es-issue}41059[#41059]) +* Remove the `template` field in index templates {es-pull}49460[#49460] (issue: {es-issue}21009[#21009]) +* Remove endpoint for freezing indices {es-pull}78918[#78918] (issues: {es-issue}70192[#70192], {es-issue}77273[#77273]) + +Features/Watcher:: +* Move watcher history to data stream {es-pull}64252[#64252] + +Geo:: +* Disallow creating `geo_shape` mappings with deprecated parameters {es-pull}70850[#70850] (issue: {es-issue}32039[#32039]) +* Remove bounding box query `type` parameter {es-pull}74536[#74536] + +Infra/Circuit Breakers:: +* Fixed synchronizing inflight breaker with internal variable {es-pull}40878[#40878] + +Infra/Core:: +* Limit processors by available processors {es-pull}44894[#44894] (issue: {es-issue}44889[#44889]) +* Remove `nodes/0` folder prefix from data path {es-pull}42489[#42489] +* Remove `bootstrap.system_call_filter` setting {es-pull}72848[#72848] +* Remove `fixed_auto_queue_size` threadpool type {es-pull}52280[#52280] +* Remove `node.max_local_storage_nodes` {es-pull}42428[#42428] (issue: {es-issue}42426[#42426]) +* Remove camel case named date/time formats {es-pull}60044[#60044] +* Remove legacy role settings {es-pull}71163[#71163] (issues: {es-issue}54998[#54998], {es-issue}66409[#66409], {es-issue}71143[#71143]) +* Remove `processors` setting {es-pull}45905[#45905] (issue: {es-issue}45855[#45855]) +* Remove the `local` parameter of `/_cat/nodes` {es-pull}50594[#50594] (issues: {es-issue}50088[#50088], {es-issue}50499[#50499]) +* Remove the listener thread pool {es-pull}53314[#53314] (issue: {es-issue}53049[#53049]) +* Remove the node local storage setting {es-pull}54381[#54381] (issue: {es-issue}54374[#54374]) +* Remove the `pidfile` setting {es-pull}45940[#45940] (issue: {es-issue}45938[#45938]) +* Removes `week_year` date format {es-pull}63384[#63384] (issue: {es-issue}60707[#60707]) +* System indices treated as restricted indices {es-pull}74212[#74212] (issue: {es-issue}69298[#69298]) +* Remove Joda dependency {es-pull}79007[#79007] +* Remove Joda support from date formatters {es-pull}78990[#78990] +* All system indices are hidden indices {es-pull}79512[#79512] + +Infra/Logging:: +* Remove slowlog level {es-pull}57591[#57591] (issue: {es-issue}56171[#56171]) + +Infra/Plugins:: +* Remove deprecated basic license feature enablement settings {es-pull}56211[#56211] (issue: {es-issue}54745[#54745]) + +Infra/REST API:: +* Remove content type required setting {es-pull}61043[#61043] +* Remove deprecated endpoints containing `_xpack` {es-pull}48170[#48170] (issue: {es-issue}35958[#35958]) +* Remove deprecated endpoints of hot threads API {es-pull}55109[#55109] (issue: {es-issue}52640[#52640]) +* Allow parsing Content-Type and Accept headers with version {es-pull}61427[#61427] + +Infra/Resiliency:: +* Fail node containing ancient closed index {es-pull}44264[#44264] (issues: {es-issue}21830[#21830], {es-issue}41731[#41731], {es-issue}44230[#44230]) + +Infra/Scripting:: +* Consolidate script parsing from object {es-pull}59507[#59507] (issue: {es-issue}59391[#59391]) +* Move `script_cache` into _nodes/stats {es-pull}59265[#59265] (issues: {es-issue}50152[#50152], {es-issue}59262[#59262]) +* Remove general cache settings {es-pull}59262[#59262] (issue: {es-issue}50152[#50152]) + +Infra/Settings:: +* Change default value of `action.destructive_requires_name` to `true` {es-pull}66908[#66908] (issue: {es-issue}61074[#61074]) +* Forbid settings without a namespace {es-pull}45947[#45947] (issues: {es-issue}45905[#45905], {es-issue}45940[#45940]) + +Ingest:: +* Remove default maxmind GeoIP databases from distribution {es-pull}78362[#78362] (issue: {es-issue}68920[#68920]) + +License:: +* Set `xpack.security.enabled` to true for all licenses {es-pull}72300[#72300] +* Enforce license expiration {es-pull}79671[#79671] + +Machine Learning:: +* Remove deprecated `_xpack` endpoints {es-pull}59870[#59870] (issues: {es-issue}35958[#35958], {es-issue}48170[#48170]) +* Remove the ability to update datafeed's `job_id` {es-pull}44752[#44752] (issue: {es-issue}44616[#44616]) +* Remove `allow_no_datafeeds` and `allow_no_jobs` parameters from APIs {es-pull}80048[#80048] (issue: {es-issue}60732[#60732]) + +Mapping:: +* Remove `boost` mapping parameter {es-pull}62639[#62639] (issue: {es-issue}62623[#62623]) +* Remove support for chained multi-fields {es-pull}42333[#42333] (issues: {es-issue}41267[#41267], {es-issue}41926[#41926]) +* Remove support for string in `unmapped_type` {es-pull}45675[#45675] +* Removes typed URLs from mapping APIs {es-pull}41676[#41676] + +Network:: +* Remove client feature tracking {es-pull}44929[#44929] (issues: {es-issue}31020[#31020], {es-issue}42538[#42538], {es-issue}44667[#44667]) +* Remove escape hatch permitting incompatible builds {es-pull}65753[#65753] (issues: {es-issue}65249[#65249], {es-issue}65601[#65601]) + +Packaging:: +* Remove SysV init support {es-pull}51716[#51716] (issue: {es-issue}51480[#51480]) +* Remove support for `JAVA_HOME` {es-pull}69149[#69149] (issue: {es-issue}55820[#55820]) +* Remove no-jdk distributions {es-pull}76896[#76896] (issue: {es-issue}65109[#65109]) +* Require Java 17 for running Elasticsearch {es-pull}79873[#79873] + +Recovery:: +* Remove dangling index auto import functionality {es-pull}59698[#59698] (issue: {es-issue}48366[#48366]) + +Reindex:: +* Reindex from Remote encoding {es-pull}41007[#41007] (issue: {es-issue}40303[#40303]) +* Reindex remove outer level size {es-pull}43373[#43373] (issues: {es-issue}24344[#24344], {es-issue}41894[#41894]) + +Rollup:: +* `RollupStart` endpoint should return OK if job already started {es-pull}41502[#41502] (issues: {es-issue}35928[#35928], {es-issue}39845[#39845]) + +Search:: +* Decouple shard allocation awareness from search and get requests {es-pull}45735[#45735] (issue: {es-issue}43453[#43453]) +* Fix range query on date fields for number inputs {es-pull}63692[#63692] (issue: {es-issue}63680[#63680]) +* Make fuzziness reject illegal values earlier {es-pull}33511[#33511] +* Make remote cluster resolution stricter {es-pull}40419[#40419] (issue: {es-issue}37863[#37863]) +* Parse empty first line in msearch request body as action metadata {es-pull}41011[#41011] (issue: {es-issue}39841[#39841]) +* Remove `CommonTermsQuery` and `cutoff_frequency` param {es-pull}42654[#42654] (issue: {es-issue}37096[#37096]) +* Remove `type` query {es-pull}47207[#47207] (issue: {es-issue}41059[#41059]) +* Remove `use_field_mapping` format option for docvalue fields {es-pull}55622[#55622] +* Remove deprecated `SimpleQueryStringBuilder` parameters {es-pull}57200[#57200] +* Remove deprecated `search.remote` settings {es-pull}42381[#42381] (issues: {es-issue}33413[#33413], {es-issue}38556[#38556]) +* Remove deprecated sort options: `nested_path` and `nested_filter` {es-pull}42809[#42809] (issue: {es-issue}27098[#27098]) +* Remove deprecated vector functions {es-pull}48725[#48725] (issue: {es-issue}48604[#48604]) +* Remove support for `_type` in searches {es-pull}68564[#68564] (issues: {es-issue}41059[#41059], {es-issue}68311[#68311]) +* Remove support for sparse vectors {es-pull}48781[#48781] (issue: {es-issue}48368[#48368]) +* Remove the object format for `indices_boost` {es-pull}55078[#55078] +* Removes type from `TermVectors` APIs {es-pull}42198[#42198] (issue: {es-issue}41059[#41059]) +* Removes typed endpoint from search and related APIs {es-pull}41640[#41640] +* Set max allowed size for stored async response {es-pull}74455[#74455] (issue: {es-issue}67594[#67594]) +* `indices.query.bool.max_clause_count` now limits all query clauses {es-pull}75297[#75297] + +Security:: +* Remove obsolete security settings {es-pull}40496[#40496] +* Remove support of creating CA on the fly when generating certificates {es-pull}65590[#65590] (issue: {es-issue}61884[#61884]) +* Remove the `id` field from the `InvalidateApiKey` API {es-pull}66671[#66671] (issue: {es-issue}66317[#66317]) +* Remove the migrate tool {es-pull}42174[#42174] +* Compress audit logs {es-pull}64472[#64472] (issue: {es-issue}63843[#63843]) +* Remove insecure settings {es-pull}46147[#46147] (issue: {es-issue}45947[#45947]) +* Remove `kibana_dashboard_only_user` reserved role {es-pull}76507[#76507] + +Snapshot/Restore:: +* Blob store compress default to `true` {es-pull}40033[#40033] +* Get snapshots support for multiple repositories {es-pull}42090[#42090] (issue: {es-issue}41210[#41210]) +* Remove repository stats API {es-pull}62309[#62309] (issue: {es-issue}62297[#62297]) +* Remove frozen cache setting leniency {es-pull}71013[#71013] (issue: {es-issue}70341[#70341]) +* Adjust snapshot index resolution behavior to be more intuitive {es-pull}79670[#79670] (issue: {es-issue}78320[#78320]) + +TLS:: +* Reject misconfigured/ambiguous SSL server config {es-pull}45892[#45892] +* Remove support for configurable PKCS#11 keystores {es-pull}75404[#75404] +* Remove the client transport profile filter {es-pull}43236[#43236] + + + +[[breaking-java-8.0.0]] +[float] +=== Breaking Java changes + +Authentication:: +* Mandate x-pack REST handler installed {es-pull}71061[#71061] (issue: {es-issue}70523[#70523]) + +CCR:: +* Remove the `CcrClient` {es-pull}42816[#42816] + +CRUD:: +* Remove types from `BulkRequest` {es-pull}46983[#46983] (issue: {es-issue}41059[#41059]) +* Remove `Client.prepareIndex(index, type, id)` method {es-pull}48443[#48443] + + +Client:: +* Remove `SecurityClient` from x-pack {es-pull}42471[#42471] + +Features/ILM+SLM:: +* Remove the `ILMClient` {es-pull}42817[#42817] + +Features/Monitoring:: +* Remove `MonitoringClient` from x-pack {es-pull}42770[#42770] + +Features/Watcher:: +* Remove `WatcherClient` from x-pack {es-pull}42815[#42815] + +Infra/Core:: +* Remove `XPackClient` from x-pack {es-pull}42729[#42729] +* Remove the transport client {es-pull}42538[#42538] +* Remove transport client from x-pack {es-pull}42202[#42202] + +Infra/REST API:: +* Copy HTTP headers to `ThreadContext` strictly {es-pull}45945[#45945] + +Machine Learning:: +* Remove the `MachineLearningClient` {es-pull}43108[#43108] + +Mapping:: +* Remove type filter from `GetMappings` API {es-pull}47364[#47364] (issue: {es-issue}41059[#41059]) +* Remove `type` parameter from `PutMappingRequest.buildFromSimplifiedDef()` {es-pull}50844[#50844] (issue: {es-issue}41059[#41059]) +* Remove unused parameter from `MetadataFieldMapper.TypeParser#getDefault()` {es-pull}51219[#51219] +* Remove `type` parameter from `CIR.mapping(type, object...)` {es-pull}50739[#50739] (issue: {es-issue}41059[#41059]) + +Search:: +* Removes types from `SearchRequest` and `QueryShardContext` {es-pull}42112[#42112] + +Snapshot/Restore:: +* Remove deprecated repository methods {es-pull}42359[#42359] (issue: {es-issue}42213[#42213]) + + +[[deprecation-8.0.0]] +[float] +=== Deprecations + +Authentication:: +* Deprecate setup-passwords tool {es-pull}76902[#76902] + +CRUD:: +* Remove `indices_segments` 'verbose' parameter {es-pull}78451[#78451] (issue: {es-issue}75955[#75955]) + +Engine:: +* Deprecate setting `max_merge_at_once_explicit` {es-pull}80574[#80574] + +Machine Learning:: +* Deprecate `estimated_heap_memory_usage_bytes` and replace with `model_size_bytes` {es-pull}80554[#80554] + +Monitoring:: +* Add deprecation info API entries for deprecated monitoring settings {es-pull}78799[#78799] +* Automatically install monitoring templates at plugin initialization {es-pull}78350[#78350] +* Remove Monitoring ingest pipelines {es-pull}77459[#77459] (issue: {es-issue}50770[#50770]) + +Search:: +* Configure `IndexSearcher.maxClauseCount()` based on node characteristics {es-pull}81525[#81525] (issue: {es-issue}46433[#46433]) + +Transform:: +* Improve transform deprecation messages {es-pull}81847[#81847] (issues: {es-issue}81521[#81521], {es-issue}81523[#81523]) + +[[feature-8.0.0]] +[float] +=== New features + +Security:: +* Auto-configure TLS for new nodes of new clusters {es-pull}77231[#77231] (issues: {es-issue}75144[#75144], {es-issue}75704[#75704]) + +Snapshot/Restore:: +* Support IAM roles for Kubernetes service accounts {es-pull}81255[#81255] (issue: {es-issue}52625[#52625]) + +Watcher:: +* Use `startsWith` rather than exact matches for Watcher history template names {es-pull}82396[#82396] + + +[[enhancement-8.0.0]] +[float] +=== Enhancements + +Analysis:: +* Move `reload_analyzers` endpoint to x-pack {es-pull}43559[#43559] + +Authentication:: +* Reset elastic password CLI tool {es-pull}74892[#74892] (issues: {es-issue}70113[#70113], {es-issue}74890[#74890]) +* Autogenerate and print elastic password on startup {es-pull}77291[#77291] +* Enroll Kibana API uses Service Accounts {es-pull}76370[#76370] +* Add `reset-kibana-system-user` tool {es-pull}77322[#77322] +* New CLI tool to reset password for built-in users {es-pull}79709[#79709] +* Auto-configure the `elastic` user password {es-pull}78306[#78306] + +Authorization:: +* Granting `kibana_system` reserved role access to "all" privileges to `.internal.preview.alerts*` index {es-pull}80889[#80889] (issues: {es-issue}76624[#76624], {es-issue}80746[#80746], {es-issue}116374[#116374]) +* Granting `kibana_system` reserved role access to "all" privileges to .preview.alerts* index {es-pull}80746[#80746] +* Granting editor and viewer roles access to alerts-as-data indices {es-pull}81285[#81285] + +Cluster Coordination:: +* Prevent downgrades from 8.x to 7.x {es-pull}78586[#78586] (issues: {es-issue}42489[#42489], {es-issue}52414[#52414]) +* Prevent downgrades from 8.x to 7.x {es-pull}78638[#78638] (issues: {es-issue}42489[#42489], {es-issue}52414[#52414]) +* Make `TaskBatcher` less lock-heavy {es-pull}82227[#82227] (issue: {es-issue}77466[#77466]) + +Data streams:: +* Data stream support read and write with custom routing and partition size {es-pull}74394[#74394] (issue: {es-issue}74390[#74390]) + +EQL:: +* Add option for returning results from the tail of the stream {es-pull}64869[#64869] (issue: {es-issue}58646[#58646]) +* Introduce case insensitive variant `in~` {es-pull}68176[#68176] (issue: {es-issue}68172[#68172]) +* Optimize redundant `toString` {es-pull}71070[#71070] (issue: {es-issue}70681[#70681]) + +Engine:: +* Always use soft-deletes in `InternalEngine` {es-pull}50415[#50415] +* Remove translog retention policy {es-pull}51417[#51417] (issue: {es-issue}50775[#50775]) + +Features/CAT APIs:: +* Remove `size` and add `time` params to `_cat/threadpool` {es-pull}55736[#55736] (issue: {es-issue}54478[#54478]) + +Features/ILM+SLM:: +* Allow for setting the total shards per node in the Allocate ILM action {es-pull}76794[#76794] (issue: {es-issue}76775[#76775]) +* Inject migrate action regardless of allocate action {es-pull}79090[#79090] (issue: {es-issue}76147[#76147]) +* Make unchanged ILM policy updates into noop {es-pull}82240[#82240] (issue: {es-issue}82065[#82065]) +* Avoid unnecessary `LifecycleExecutionState` recalculation {es-pull}81558[#81558] (issues: {es-issue}77466[#77466], {es-issue}79692[#79692]) + +Features/Indices APIs:: +* Batch rollover cluster state updates {es-pull}79945[#79945] (issues: {es-issue}77466[#77466], {es-issue}79782[#79782]) +* Reuse `MappingMetadata` instances in Metadata class {es-pull}80348[#80348] (issues: {es-issue}69772[#69772], {es-issue}77466[#77466]) + +Features/Stats:: +* Add bulk stats track the bulk per shard {es-pull}52208[#52208] (issues: {es-issue}47345[#47345], {es-issue}50536[#50536]) + +Features/Watcher:: +* Remove Watcher history clean up from monitoring {es-pull}67154[#67154] + +Infra/Core:: +* Remove aliases exist action {es-pull}43430[#43430] +* Remove indices exists action {es-pull}43164[#43164] +* Remove types exists action {es-pull}43344[#43344] +* Retain reference to stdout for exceptional cases {es-pull}77460[#77460] +* Check whether stdout is a real console {es-pull}79882[#79882] +* Share int, long, float, double, and byte pages {es-pull}75053[#75053] +* Revert "Deprecate resolution loss on date field (#78921)" {es-pull}79914[#79914] (issue: {es-issue}78921[#78921]) +* Add two missing entries to the deprecation information API {es-pull}80290[#80290] (issue: {es-issue}80233[#80233]) +* Prevent upgrades to 8.0 without first upgrading to the last 7.x release {es-pull}82321[#82321] (issue: {es-issue}81865[#81865]) + +Infra/Logging:: +* Make Elasticsearch JSON logs ECS compliant {es-pull}47105[#47105] (issue: {es-issue}46119[#46119]) + +Infra/REST API:: +* Allow for field declaration for future compatible versions {es-pull}69774[#69774] (issue: {es-issue}51816[#51816]) +* Introduce stability description to the REST API specification {es-pull}38413[#38413] +* Parsing: Validate that fields are not registered twice {es-pull}70243[#70243] +* Support response content-type with versioned media type {es-pull}65500[#65500] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] Typed endpoints for index and get APIs {es-pull}69131[#69131] (issue: {es-issue}54160[#54160]) +* [REST API Compatibility] Typed endpoints for put and get mapping and get field mappings {es-pull}71721[#71721] (issues: {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Allow `copy_settings` flag for resize operations {es-pull}75184[#75184] (issues: {es-issue}38514[#38514], {es-issue}51816[#51816]) +* [REST API Compatibility] Allow for type in geo shape query {es-pull}74553[#74553] (issues: {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Always return `adjust_pure_negative` value {es-pull}75182[#75182] (issues: {es-issue}49543[#49543], {es-issue}51816[#51816]) +* [REST API Compatibility] Clean up x-pack/plugin rest compat tests {es-pull}74701[#74701] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] Do not return `_doc` for empty mappings in template {es-pull}75448[#75448] (issues: {es-issue}51816[#51816], {es-issue}54160[#54160], {es-issue}70966[#70966], {es-issue}74544[#74544]) +* [REST API Compatibility] Dummy REST action for `indices.upgrade` API {es-pull}75136[#75136] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] REST Terms vector typed response {es-pull}73117[#73117] +* [REST API Compatibility] Rename `BulkItemResponse.Failure` type field {es-pull}74937[#74937] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] Type metadata for docs used in simulate request {es-pull}74222[#74222] (issues: {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Typed `TermLookups` {es-pull}74544[#74544] (issues: {es-issue}46943[#46943], {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Typed and x-pack graph explore API {es-pull}74185[#74185] (issues: {es-issue}46935[#46935], {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Typed endpoint for bulk API {es-pull}73571[#73571] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] Typed endpoint for multi-get API {es-pull}73878[#73878] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] Typed endpoints for `RestUpdateAction` and `RestDeleteAction` {es-pull}73115[#73115] (issues: {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Typed endpoints for `get_source` API {es-pull}73957[#73957] (issues: {es-issue}46587[#46587], {es-issue}46931[#46931], {es-issue}51816[#51816]) +* [REST API Compatibility] Typed endpoints for explain API {es-pull}73901[#73901] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] Typed endpoints for search `_count` API {es-pull}73958[#73958] (issues: {es-issue}42112[#42112], {es-issue}51816[#51816]) +* [REST API Compatibility] Typed indexing stats {es-pull}74181[#74181] (issues: {es-issue}47203[#47203], {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Types for percolate query API {es-pull}74698[#74698] (issues: {es-issue}46985[#46985], {es-issue}51816[#51816], {es-issue}54160[#54160], {es-issue}74689[#74689]) +* [REST API Compatibility] Validate query typed API {es-pull}74171[#74171] (issues: {es-issue}46927[#46927], {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Voting config exclusion exception message {es-pull}75406[#75406] (issues: {es-issue}51816[#51816], {es-issue}55291[#55291]) +* [REST API Compatibility] `MoreLikeThisQuery` with types {es-pull}75123[#75123] (issues: {es-issue}42198[#42198], {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Update and delete by query using size field {es-pull}69606[#69606] +* [REST API Compatibility] Indicies boost in object format {es-pull}74422[#74422] (issues: {es-issue}51816[#51816], {es-issue}55078[#55078]) +* [REST API Compatibility] Typed endpoints for search and related endpoints {es-pull}72155[#72155] (issues: {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Allow to use size `-1` {es-pull}75342[#75342] (issues: {es-issue}51816[#51816], {es-issue}69548[#69548], {es-issue}70209[#70209]) +* [REST API Compatibility] Ignore `use_field_mapping` option for docvalue {es-pull}74435[#74435] (issue: {es-issue}55622[#55622]) +* [REST API Compatibility] `_time` and `_term` sort orders {es-pull}74919[#74919] (issues: {es-issue}39450[#39450], {es-issue}51816[#51816]) +* [REST API Compatability] `template` parameter and field on PUT index template {es-pull}71238[#71238] (issues: {es-issue}49460[#49460], {es-issue}51816[#51816], {es-issue}68905[#68905]) +* [REST API Compatibility] Make query registration easier {es-pull}75722[#75722] (issue: {es-issue}51816[#51816]) +* [REST API Compatibility] Typed query {es-pull}75453[#75453] (issues: {es-issue}47207[#47207], {es-issue}51816[#51816], {es-issue}54160[#54160]) +* [REST API Compatibility] Deprecate the use of synced flush {es-pull}75372[#75372] (issues: {es-issue}50882[#50882], {es-issue}51816[#51816]) +* [REST API Compatibility] Licence `accept_enterprise` and response changes {es-pull}75479[#75479] (issues: {es-issue}50067[#50067], {es-issue}50735[#50735], {es-issue}51816[#51816], {es-issue}58217[#58217]) + +Infra/Scripting:: +* Update `DeprecationMap` to `DynamicMap` {es-pull}56149[#56149] (issue: {es-issue}52103[#52103]) +* Add nio Buffers to Painless {es-pull}79870[#79870] (issue: {es-issue}79867[#79867]) +* Restore the scripting general cache {es-pull}79453[#79453] (issue: {es-issue}62899[#62899]) + +Infra/Settings:: +* Fixed inconsistent `Setting.exist()` {es-pull}46603[#46603] (issue: {es-issue}41830[#41830]) +* Remove `index.optimize_auto_generated_id` setting (#27583) {es-pull}27600[#27600] (issue: {es-issue}27583[#27583]) +* Implement setting deduplication via string interning {es-pull}80493[#80493] (issues: {es-issue}77466[#77466], {es-issue}78892[#78892]) + +Ingest:: +* Add support for `_meta` field to ingest pipelines {es-pull}76381[#76381] +* Remove binary field after attachment processor execution {es-pull}79172[#79172] +* Improving cache lookup to reduce recomputing / searches {es-pull}77259[#77259] +* Extract more standard metadata from binary files {es-pull}78754[#78754] (issue: {es-issue}22339[#22339]) + +License:: +* Add deprecated `accept_enterprise` param to `/_xpack` {es-pull}58220[#58220] (issue: {es-issue}58217[#58217]) +* Support `accept_enterprise` param in get license API {es-pull}50067[#50067] (issue: {es-issue}49474[#49474]) +* Enforce Transport TLS check on all licenses {es-pull}79602[#79602] (issue: {es-issue}75292[#75292]) + +Machine Learning:: +* The Windows build platform for the {ml} C++ code now uses Visual Studio 2019 {ml-pull}1352[#1352] +* The macOS build platform for the {ml} C++ code is now Mojave running Xcode 11.3.1, + or Ubuntu 20.04 running clang 8 for cross compilation {ml-pull}1429[#1429] +* Add a new application for evaluating PyTorch models. The app depends on LibTorch - the C++ front end to PyTorch - and performs inference on models stored in the TorchScript format {ml-pull}1902[#1902] +* Adding new PUT trained model vocabulary endpoint {es-pull}77387[#77387] +* Creating new PUT model definition part API {es-pull}76987[#76987] +* Add inference time configuration overrides {es-pull}78441[#78441] (issue: {es-issue}77799[#77799]) +* Optimize source extraction for `categorize_text` aggregation {es-pull}79099[#79099] +* The Linux build platform for the {ml} C++ code is now CentOS 7 running gcc 10.3. {ml-pull}2028[#2028] +* Make ML indices hidden when the node becomes master {es-pull}77416[#77416] (issue: {es-issue}53674[#53674]) +* Add `deployment_stats` to trained model stats {es-pull}80531[#80531] +* The setting `use_auto_machine_memory_percent` now defaults `max_model_memory_limit` {es-pull}80532[#80532] (issue: {es-issue}80415[#80415]) + +Mapping:: +* Sparse vector to throw exception consistently {es-pull}62646[#62646] +* Add support for configuring HNSW parameters {es-pull}79193[#79193] (issue: {es-issue}78473[#78473]) +* Extend `dense_vector` to support indexing vectors {es-pull}78491[#78491] (issue: {es-issue}78473[#78473]) + +Monitoring:: +* Add previously removed Monitoring settings back for 8.0 {es-pull}78784[#78784] +* Change Monitoring plugin cluster alerts to not install by default {es-pull}79657[#79657] +* Adding default templates for Metricbeat ECS data {es-pull}81744[#81744] + +Network:: +* Enable LZ4 transport compression by default {es-pull}76326[#76326] (issue: {es-issue}73497[#73497]) +* Improve slow inbound handling to include response type {es-pull}80425[#80425] + +Packaging:: +* Make the Docker build more re-usable in Cloud {es-pull}50277[#50277] (issues: {es-issue}46166[#46166], {es-issue}49926[#49926]) +* Update docker-compose.yml to fix bootstrap check error {es-pull}47650[#47650] +* Allow total memory to be overridden {es-pull}78750[#78750] (issue: {es-issue}65905[#65905]) +* Convert repository plugins to modules {es-pull}81870[#81870] (issue: {es-issue}81652[#81652]) + +Recovery:: +* Use Lucene index in peer recovery and resync {es-pull}51189[#51189] (issue: {es-issue}50775[#50775]) +* Fix `PendingReplicationActions` submitting lots of `NOOP` tasks to `GENERIC` {es-pull}82092[#82092] (issues: {es-issue}77466[#77466], {es-issue}79837[#79837]) + +Reindex:: +* Make reindexing managed by a persistent task {es-pull}43382[#43382] (issue: {es-issue}42612[#42612]) +* Reindex restart from checkpoint {es-pull}46055[#46055] (issue: {es-issue}42612[#42612]) +* Reindex search resiliency {es-pull}45497[#45497] (issues: {es-issue}42612[#42612], {es-issue}43187[#43187]) +* Reindex v2 rethrottle sliced fix {es-pull}46967[#46967] (issues: {es-issue}42612[#42612], {es-issue}46763[#46763]) +* Do not scroll if max docs is less than scroll size (update/delete by query) {es-pull}81654[#81654] (issue: {es-issue}54270[#54270]) + +Rollup:: +* Adds support for `date_nanos` in Rollup Metric and `DateHistogram` Configs {es-pull}59349[#59349] (issue: {es-issue}44505[#44505]) + +SQL:: +* Add text formatting support for multivalue {es-pull}68606[#68606] +* Add xDBC and CLI support. QA CSV specs {es-pull}68966[#68966] +* Export array values through result sets {es-pull}69512[#69512] +* Improve alias resolution in sub-queries {es-pull}67216[#67216] (issue: {es-issue}56713[#56713]) +* Improve the optimization of null conditionals {es-pull}71192[#71192] +* Push `WHERE` clause inside subqueries {es-pull}71362[#71362] +* Use Java `String` methods for `LTRIM/RTRIM` {es-pull}57594[#57594] +* QL: Make canonical form take into account children {es-pull}71266[#71266] +* QL: Polish optimizer expression rule declaration {es-pull}71396[#71396] +* QL: Propagate nullability constraints across conjunctions {es-pull}71187[#71187] (issue: {es-issue}70683[#70683]) + +Search:: +* Completely disallow setting negative size in search {es-pull}70209[#70209] (issue: {es-issue}69548[#69548]) +* Make `0` as invalid value for `min_children` in `has_child` query {es-pull}41347[#41347] +* Return error when remote indices are locally resolved {es-pull}74556[#74556] (issue: {es-issue}26247[#26247]) +* [REST API Compatibility] Nested path and filter sort options {es-pull}76022[#76022] (issues: {es-issue}42809[#42809], {es-issue}51816[#51816]) +* [REST API Compatibility] `CommonTermsQuery` and `cutoff_frequency` parameter {es-pull}75896[#75896] (issues: {es-issue}42654[#42654], {es-issue}51816[#51816]) +* [REST API Compatibility] Allow first empty line for `_msearch` {es-pull}75886[#75886] (issues: {es-issue}41011[#41011], {es-issue}51816[#51816]) +* Node level can match action {es-pull}78765[#78765] +* TSDB: Add time series information to field caps {es-pull}78790[#78790] (issue: {es-issue}74660[#74660]) +* Add new kNN search endpoint {es-pull}79013[#79013] (issue: {es-issue}78473[#78473]) +* Disallow kNN searches on nested vector fields {es-pull}79403[#79403] (issue: {es-issue}78473[#78473]) +* Ensure kNN search respects authorization {es-pull}79693[#79693] (issue: {es-issue}78473[#78473]) +* Load kNN vectors format with mmapfs {es-pull}78724[#78724] (issue: {es-issue}78473[#78473]) +* Support cosine similarity in kNN search {es-pull}79500[#79500] +* Node level can match action {es-pull}78765[#78765] +* Check nested fields earlier in kNN search {es-pull}80516[#80516] (issue: {es-issue}78473[#78473]) + +Security:: +* Add a tool for creating enrollment tokens {es-pull}74890[#74890] +* Add the Enroll Kibana API {es-pull}72207[#72207] +* Change default hashing algorithm for FIPS 140 {es-pull}55544[#55544] +* Create enrollment token {es-pull}73573[#73573] (issues: {es-issue}71438[#71438], {es-issue}72129[#72129]) +* Enroll node API {es-pull}72129[#72129] +* Configure security for the initial node CLI {es-pull}74868[#74868] +* Generate and store password hash for elastic user {es-pull}76276[#76276] (issue: {es-issue}75310[#75310]) +* Set elastic password and generate enrollment token {es-pull}75816[#75816] (issue: {es-issue}75310[#75310]) +* Add `elasticsearch-enroll-node` tool {es-pull}77292[#77292] +* Default hasher to `PBKDF2_STRETCH` on FIPS mode {es-pull}76274[#76274] +* Add v7 `restCompat` for invalidating API key with the id field {es-pull}78664[#78664] (issue: {es-issue}66671[#66671]) +* Print enrollment token on startup {es-pull}78293[#78293] +* Startup check for security implicit behavior change {es-pull}76879[#76879] +* CLI tool to reconfigure nodes to enroll {es-pull}79690[#79690] (issue: {es-issue}7718[#7718]) +* Security auto-configuration for packaged installations {es-pull}75144[#75144] (issue: {es-issue}78306[#78306]) + +Snapshot/Restore:: +* Introduce searchable snapshots index setting for cascade deletion of snapshots {es-pull}74977[#74977] +* Unify blob store compress setting {es-pull}39346[#39346] (issue: {es-issue}39073[#39073]) +* Add recovery state tracking for searchable snapshots {es-pull}60505[#60505] +* Allow listing older repositories {es-pull}78244[#78244] +* Optimize SLM Policy Queries {es-pull}79341[#79341] (issue: {es-issue}79321[#79321]) + +TLS:: +* Add `ChaCha20` TLS ciphers on Java 12+ {es-pull}42155[#42155] +* Add support for `KeyStore` filters to `ssl-config` {es-pull}75407[#75407] +* Update TLS ciphers and protocols for JDK 11 {es-pull}41808[#41808] (issues: {es-issue}38646[#38646], {es-issue}41385[#41385]) + +Transform:: +* Prevent old beta transforms from starting {es-pull}79712[#79712] + +TSDB:: +* Automatically add timestamp mapper {es-pull}79136[#79136] +* Create a coordinating node level reader for tsdb {es-pull}79197[#79197] +* Fix TSDB shrink test in multi-version cluster {es-pull}79940[#79940] (issue: {es-issue}79936[#79936]) +* Do not allow shadowing metrics or dimensions {es-pull}79757[#79757] + + +[[bug-8.0.0]] +[float] +=== Bug fixes + +Aggregations:: +* Fix BWC issues for `x_pack/usage` {es-pull}55181[#55181] (issue: {es-issue}54847[#54847]) +* Fix `DoubleBounds` null serialization {es-pull}59475[#59475] +* Fix `TopHitsAggregationBuilder` adding duplicate `_score` sort clauses {es-pull}42179[#42179] (issue: {es-issue}42154[#42154]) +* Fix `t_test` usage stats {es-pull}54753[#54753] (issue: {es-issue}54744[#54744]) +* Throw exception if legacy interval cannot be parsed in `DateIntervalWrapper` {es-pull}41972[#41972] (issue: {es-issue}41970[#41970]) + +Autoscaling:: +* Autoscaling use adjusted total memory {es-pull}80528[#80528] (issue: {es-issue}78750[#78750]) + +CCR:: +* Fix `AutoFollow` version checks {es-pull}73776[#73776] (issue: {es-issue}72935[#72935]) + +Cluster Coordination:: +* Apply cluster states in system context {es-pull}53785[#53785] (issue: {es-issue}53751[#53751]) + +Data streams:: +* Prohibit restoring a data stream alias with a conflicting write data stream {es-pull}81217[#81217] (issue: {es-issue}80976[#80976]) + +Distributed:: +* Introduce `?wait_for_active_shards=index-setting` {es-pull}67158[#67158] (issue: {es-issue}66419[#66419]) +* Fixes to task result index mapping {es-pull}50359[#50359] (issue: {es-issue}50248[#50248]) + +Features/CAT APIs:: +* Fix cat recovery display of bytes fields {es-pull}40379[#40379] (issue: {es-issue}40335[#40335]) + +Features/ILM+SLM:: +* Ensuring that the `ShrinkAction` does not hang if total shards per node is too low {es-pull}76732[#76732] (issue: {es-issue}44070[#44070]) +* Less verbose serialization of snapshot failure in SLM metadata {es-pull}80942[#80942] (issue: {es-issue}77466[#77466]) + +Features/Indices APIs:: +* Fix `ComposableIndexTemplate` equals when `composed_of` is null {es-pull}80864[#80864] + +Features/Java High Level REST Client:: +* The Java High Level Rest Client (HLRC) has been removed and replaced by a new +{es} Java client. For migration steps, refer to +{java-api-client}/migrate-hlrc.html[Migrate from the High Level Rest Client]. + +Geo:: +* Preprocess polygon rings before processing it for decomposition {es-pull}59501[#59501] (issues: {es-issue}54441[#54441], {es-issue}59386[#59386]) + +Infra/Core:: +* Add searchable snapshot cache folder to `NodeEnvironment` {es-pull}66297[#66297] (issue: {es-issue}65725[#65725]) +* CLI tools: Write errors to stderr instead of stdout {es-pull}45586[#45586] (issue: {es-issue}43260[#43260]) +* Precompute `ParsedMediaType` for XContentType {es-pull}67409[#67409] +* Prevent stack overflow in rounding {es-pull}80450[#80450] + +Infra/Logging:: +* Fix NPE when logging null values in JSON {es-pull}53715[#53715] (issue: {es-issue}46702[#46702]) +* Fix stats in slow logs to be a escaped JSON {es-pull}44642[#44642] +* Populate data stream fields when `xOpaqueId` not provided {es-pull}62156[#62156] + +Infra/REST API:: +* Do not allow spaces within `MediaType's` parameters {es-pull}64650[#64650] (issue: {es-issue}51816[#51816]) +* Handle incorrect header values {es-pull}64708[#64708] (issues: {es-issue}51816[#51816], {es-issue}64689[#64689]) +* Ignore media ranges when parsing {es-pull}64721[#64721] (issues: {es-issue}51816[#51816], {es-issue}64689[#64689]) +* `RestController` should not consume request content {es-pull}44902[#44902] (issue: {es-issue}37504[#37504]) +* Handle exceptions thrown from `RestCompatibleVersionHelper` {es-pull}80253[#80253] (issues: {es-issue}78214[#78214], {es-issue}79060[#79060]) + +Infra/Scripting:: +* Change compound assignment structure to support string concatenation {es-pull}61825[#61825] +* Fixes casting in constant folding {es-pull}61508[#61508] +* Several minor Painless fixes {es-pull}61594[#61594] +* Fix duplicated allow lists upon script engine creation {es-pull}82820[#82820] (issue: {es-issue}82778[#82778]) + +Infra/Settings:: +* Stricter `UpdateSettingsRequest` parsing on the REST layer {es-pull}79227[#79227] (issue: {es-issue}29268[#29268]) +* Set Auto expand replica on deprecation log data stream {es-pull}79226[#79226] (issue: {es-issue}78991[#78991]) + +Ingest:: +* Adjust default geoip logging to be less verbose {es-pull}81404[#81404] (issue: {es-issue}81356[#81356]) + +Machine Learning:: +* Add timeout parameter for delete trained models API {es-pull}79739[#79739] (issue: {es-issue}77070[#77070]) +* Tone down ML unassigned job notifications {es-pull}79578[#79578] (issue: {es-issue}79270[#79270]) +* Use a new annotations index for future annotations {es-pull}79006[#79006] (issue: {es-issue}78439[#78439]) +* Set model state compatibility version to 8.0.0 {ml-pull}2139[#2139] +* Check that `total_definition_length` is consistent before starting a deployment {es-pull}80553[#80553] +* Fail inference processor more consistently on certain error types {es-pull}81475[#81475] +* Optimize the job stats call to do fewer searches {es-pull}82362[#82362] (issue: {es-issue}82255[#82255]) + +Mapping:: +* Remove assertions that mappings have one top-level key {es-pull}58779[#58779] (issue: {es-issue}58521[#58521]) + +Packaging:: +* Suppress illegal access in plugin install {es-pull}41620[#41620] (issue: {es-issue}41478[#41478]) + +Recovery:: +* Make shard started response handling only return after the cluster state update completes {es-pull}82790[#82790] (issue: {es-issue}81628[#81628]) + +SQL:: +* Introduce dedicated node for `HAVING` declaration {es-pull}71279[#71279] (issue: {es-issue}69758[#69758]) +* Make `RestSqlQueryAction` thread-safe {es-pull}69901[#69901] + +Search:: +* Check for negative `from` values in search request body {es-pull}54953[#54953] (issue: {es-issue}54897[#54897]) +* Fix `VectorsFeatureSetUsage` serialization in BWC mode {es-pull}55399[#55399] (issue: {es-issue}55378[#55378]) +* Handle total hits equal to `track_total_hits` {es-pull}37907[#37907] (issue: {es-issue}37897[#37897]) +* Improve error msg for CCS request on node without remote cluster role {es-pull}60351[#60351] (issue: {es-issue}59683[#59683]) +* Remove unsafe assertion in wildcard field {es-pull}78966[#78966] + +Security:: +* Allow access to restricted system indices for reserved system roles {es-pull}76845[#76845] + +Snapshot/Restore:: +* Fix `GET /_snapshot/_all/_all` if there are no repos {es-pull}43558[#43558] (issue: {es-issue}43547[#43547]) +* Don't fill stack traces in `SnapshotShardFailure` {es-pull}80009[#80009] (issue: {es-issue}79718[#79718]) +* Remove custom metadata if there is nothing to restore {es-pull}81373[#81373] (issues: {es-issue}81247[#81247], {es-issue}82019[#82019]) + +[[regression-8.0.0]] +[float] +=== Regressions + +Search:: +* Disable numeric sort optimization conditionally {es-pull}78103[#78103] + +[[upgrade-8.0.0]] +[float] +=== Upgrades + +Authentication:: +* Upgrade to UnboundID LDAP SDK v6.0.2 {es-pull}79332[#79332] + +Infra/Logging:: +* Upgrade ECS logging layout to latest version {es-pull}80500[#80500] + +Search:: +* Upgrade to Lucene 9 {es-pull}81426[#81426] + +Security:: +* Update to OpenSAML 4 {es-pull}77012[#77012] (issue: {es-issue}71983[#71983]) + +Snapshot/Restore:: +* Upgrade repository-hdfs plugin to Hadoop 3 {es-pull}76897[#76897] From ccf8bd92e7c6afa7364a5d1fbcf541d3008cb5c7 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Wed, 9 Feb 2022 11:16:54 -0500 Subject: [PATCH 011/167] [DOCS] Update index management screenshot for elastic/kibana#125037 (#83702) Updates an index management page screenshot for https://github.com/elastic/kibana/pull/125037 --- .../index-mgmt/management_index_details.png | Bin 146003 -> 130999 bytes 1 file changed, 0 insertions(+), 0 deletions(-) diff --git a/docs/reference/images/index-mgmt/management_index_details.png b/docs/reference/images/index-mgmt/management_index_details.png index 13d607b061d4ad711f3c3fe359079ea39e9c75c3..79568818aa4ece26efc531825553dd06b6f9df5c 100644 GIT binary patch literal 130999 zcmZs@2UJs8_dd*w8b*ks6baoCsY>tNh@udB??`U~(mSXK2na|EMM{v~rH9Z)x*!m` zbmw!^8|l_O*N&>z{(297&d>Sv_flj49zOVw82q@Q{=7K)`eoFmi}#m}z;}3X6-jAn@K?py z!PL~&(ZbHj?q19o-XEun(vRS(ZX+w>fw5$2b$e$=M8`hMTFdT(j_XO2WAXN{ElHiB zP}JL5X1K$9S=UR~c$ID_w&ojm2{QI`p~8fWVE9V*csJ<<2_QEVmHvpquC8Co;A<|- z)jR6Xal~MbPP9Ecla^HMhYE$gtHr(cd=syq-w@tQh*6h#DNSNUy_Q70kmLM2@62@` zn~^fQpWlLC4=(M|Ii61y4-b3e@)pX){<9(n)M^%faEapQr{LE$8bigO?_WaxVZNSl z2a02YU-dg*^z#?C?YLF+*V|c5le_(<{0GnYc?GpBg0SPx1sc< zKi7ow7b=w=dahhR?)2;`E!A(}emu>s{&2Nh(Z5dxUtOb#`8^AV^!>ZpEFd_Lq)sK+e_e{Dr!C`(iM{Eh7^cgs=TI@qfO}&ddAvYJ~B3;S7oA^Wxdvh(uj1 z6CR$$<@9LJ9mtUNQ(+V)e_U)O!fuC4T*K#sn z-0%5tyYeL6%eQ`x_|ijlv1wz?6uxSxm>G1g-{&t>&&6#k+Rf{jQbBg+-|jV_PN_F8 zhCd(Xuiw(||L{Bt#crs`arHlI-SXyQp-QsnyhdB+3pC2QB{ObSjkQg2)F7^t;?^Oq?oz}#= z@oQFd{F89ZT?-S~cA0m>xv1POG!74sG`+;C{Yx#x_S>f(Z!)%(0yRw9o!^5fif@gZ zkU@6-E4thFf}LqcYeGF~`w_3L$WF|It_QWFeqF~j#+|sI7_!sxI-n7iK%e|SwP9f% z%ek9vOxv&U8d-LKkx2EoDlmUgj=gd%0KTvy`9E#_Dl`x8Kwi$^{;ja_UtQI`VBm6x zj!jgC*#D~8?eZp3i{VHV?Zt%V6e~Cv6I1@9h)(z2vD{4Xy_MmMOQ5LR-(QXq`Xl21 zT?!6vBJKIRi$t;{L_a5_NV}u_U**p*n}v6zTt0bqAs6@AyHH_cx{HhB;h8YU;j{S? z_y2pTvU&15&amxm?+bzB;VJs~(3a^(qW+_^GI`IX6(yj;M@&J6*n|Yi(}x!d`5@wW zc|5>&JX~pIT=f@`EU#NlN!;I^G$#oL{yTfP| z&Z~!#&+7KM7yk7j(e1~7#t1>RONo55F1*2Q8v3iavrk{$u^5c-#pxN!7)}cl;(kKE zuIo>se)93${t^SfoEq>N55YHQ*aPfFjS2BuQ?E`Nh>OF5^4WWz2DbH2Eb?L--d%*OyIi*gCtcD{m(5c3oa{= zlTt2k2M6WGEWXdaaA&m#IcO$$$W=d&|Ko(?E>x~f)3y=Sx2HsWZp{}m173srKUQy& z?_`LrlKA^yW5x5jII@ISqChziUMn@6!kIH-E2F+t=eef{bvLH@}8%Il$!=xvYGHOZ+ljunnbo-GSIFo$!1HX+`g15sC-_T>1 zr;|Q@6*i+?alaJJ!s^26Owt~K1^*j)*COqzpU>X7tG!|86n!gcz4x5pOoi{sH33RO zI`8KpZ45ePq7f1tf6i?zAlSpJ%?%88r21B(O1p(53Kb@N&kWxEiVI$523*XVfAbb( z;CzFaDO<)e*dCdmE>RmBHlQBV(nBecm7LkE!gdoS3?<%2tCjBzf6o79Wcv~C#;#U( zdQZJ%Ny=%B`<8nra=LtM={oOjyBA+o{`q0pzfKLhl;6M?r0lQyB?H~W3Pv$1gpQu- zxlUT&mxYmLyS{-pf~$KU#y8uyI<_Xcas~XFxeVKoA@=OV_jLVi>+F;R(YG8bb$s1& zwoPfbPhu{*uEz3Se@`p5`MO$><%;PCd>S#ccO5ha$5M@gs@%N<6MN}(J?81`w zv!MyrZ{Uc}V$fWE!-ghVmyvR70ib#^=G%*0HlyX0W$o>(9#_HU-|s*M5o3GFX~Fpa z4s!V>q?4lom7%BzD|24vZ(g^bs5v-0DNO-xNKts)Gx53ar|spT6n(IG%;f=8kR(xe z2k|qD$1u_dHw>|y7n=TKb+|O}*9856!dsP^lZ&*-i#fZ&J?}yZP>MgNi5ow@4_wSu zzmZ1Nq;}PPEgc=}=Y3+(!>Pb!{r#hmz45ck>UFK{=@7)WgzMUvmY3biMD0S9g1Gl; z`5ck>_78(~%tCQ%iq}Dm#-vWEl!WV<$g{Bnt4-AR$ZD6>Uc>tB-rmO{YxQTZ@HL8j z`W7pl%5K`Ye2J|6P`fbkm>_h?Y0!6DX#$d5c$z8qLA|aHx51QLvsL7}EU`1WL*o2w zgGxgKD7EPTRbFlAvX#~Q>_FF*Z%Wu_Z)?I@o?#Ux-1lmmLsI&}c%PNU7S$cHZcth( zitQZ}x{iCVr-h3Zo2`zFSG$V1XwUw=#LkcG=Nx1?Tm4+ys41ddz3aW#6Qc%GrP*1^ zqC#>QY_!pbFL5S&98wgV4x6W#Is}!{1XBt4WR5<+RjscuP^i>V5smu84upfoXu96R-LSezdUe za})Pux23~B^0=8b1c`+&0h$_Z&gyTGE#zT ze!DD~s1G#uq(G?|fKCBjotR^V3VZH82WB$sy?Nl82X>SXc2#V)Y<#A|Ck!z~4n)YR zH*hj7i=kA+tWoL0SCvJ!$4tzAZavMhEvtH~|M(G6i4@8(>=8REo$a?G?5Q^Ts7A&a zV)YAc0%C>goJZZax)0|+*b#_b@qf%XBx~m~%VqC5^n}N(OT9l0^+{>oX^mXpDTqDW z^f;TOux=jH^i0u&dSO@Olu|&nX0UP+b*~8H(ijjLbTc09=QaH$tHw0?wkX&K?!@LK ztEMHYxRtKgRhkX1R7K=D;d&*jn2(f}2wHaH>fb%=)vMllrz=a}#NeZ90#nt?dN=d^ z??yJ)zIgMb+V-iZGIGW%0atv3YOI~!lk2(_H^fdvJ`RBMREZ57AYZL+^0M#fsX!*5 zkGdSKx>g9~7>}jmDhB!_4Hun8H6Zdi)e&heS{{;<@7iZG+#Up#qz4zajHl{<;C094 zb?Ec-Cg#-JI;E+p%DvN-3O8z2Xe=rr6`&S=ZZgYySapKnPf_T4Dsw3RLA=A^?|wT< zD7(8#$GXtOpTWHWDH9%TniV-t`92xN)X7pkudfJX9L3UJTl^*0s>d+ip}+i)4^s`X zmwZk}ja;$_?p;|0!Tw9qpI)$E1bCYFcc8g$Z^V4kXQS$0z&~?)!c=20@+qEDM*Gsz zlzVsH(s}(b)NQ)U{&(u_e7c*6h_2%3ZNm9VRj&RGQdP_Q>}ZnV=tE_K^kaIt_^Q_! z?bWd@cGV#6*wDPUNexyJ2P0^g`IN4!_^G;UglbNGe+JVrtUZ*b@%1A((E8Kg=L5HM z0E5Y3sjC43rYgkm|Bl3!>-j#jXBe9^Ox5?YhL#LsmP?e>uREAq2mK?giC%R~hVM^b z<~z`Ysnwj4FqQ_C4|M_5RZf~k%DpAtyqXS&Zohg(O`(5r;QgoLsFyEP?hlCR6!|_y zO*l@XZLPeuzE#HWJE!^1zDK8N=Y4DlUhgDLR25Bspd|482Lk@ zvL-@9cQc$yvkv`w2^#FN_^uLF;YW%+61RsdY^sk0?H(vto?B6yPy_1V@h>aN4W-hE z@NlIOb^;j=s{hwwSD){&+RgrO^Yt0ndA?`IYpB?%U=@CynH2|%WM+oudS%rnB;1^G zkiMUDs)czG!|iTgMdB&^2)ve+II>~3h@ zqBi_y;j{K9GeVEkrK2cC){H)fdaB5^b7_R8%<8vuu@4(x^<(`oM=RXFiS%#)pF@?|bbnNCdWChwnW@_c$Q-qBH>QBDh%wS-=+O555D%IrBsKj|lg417$o ziHRQtJoc7~v?}z%CiOk}_`N*`hmq~k$f6J35@!wG@2x8i>2}FhoYqZDZK$wOeOXUJ z30gDJHx6T=NM92pcQY%uCI*g1Lq(d29f7g9LcC8MivO;m#K}^??_G=35Gzq(5pQeY zA+%S2)_n#tjf<5$S6i?_s$NBS`aBJKW%Sw<$(bGe=vG4Lr=hEeq{}-UrYtrpio#6A zAFa@}yCV)AEa_nh2T=j{$TR}DGA$!Qgyi*P5Z>7Scz%hnv@fp-F&t~?t-{wXsr2`G zC({-UXTNASpaf3m<{v7EvNs`ti8#te3=l#J+7GZQH$T6=u8?JxBTY>X={(;3$_dr} zSe^MumAM$TnIUyF>PIwMStK;q$G~?|-F>Q;#^G-iX5ik(QGd*B+kq@)UnJ%Irpkjx zUGkidH>YZk>XWE&QhMqu?%^Lm=xvt!JZnyz`#Oi-onbpWO><{E&0qRuapNz)d2&On z^3$6TwZ0Ffq(_vz7B7OmLENp#{BzsivG$P){+Y9g1i<;8c!3DiO8TfjBUe)04sIs;k3fvT#RZp&dWpOzKOWNHqx#k z17E%4xQ%uz%q005$%=?$csB z+dLGzhPuEID$UgmvAyGb8K}!TT+%W{1zOeowny>r^ym2M&rXlpR(-cOTARZ!dy_vI zG%fQQCq0bo9nuPp^FeExiKise#%Qr^K9v0+o`*Hh6jM!FoyrO1n~)Lc>e!8j0E2rO z@R~DQdj0A4EtA7{6dvo!7?lqKcBc~r)O>tM6=nsPYL<{Qj+Z7#pOVjC!wG1-m`7rw zkpVv4=FuU6A8u!2_KV7Pt^$1lt!K5UtIGG}giP{ykG>q&Y*Rsz)u-kH#_Pia=1j{b zwH*aBIg^dpWs{xy3F)lS*R*hLJ)#!w^y96qu?`&V&-4nr-+*$Ax&hfCS0QfJFSVM@ zwvRAnT;_u2?)D-O^xYUNv4#DrACb6|!a}+=p=7iALFfKBFr$K~d4fD7q{e(M62Z|u`)sh#yS_hMWbQVh zTzUrZ@{7A)S)tmPYDULH`K3`i#f`w+*Kh3|z0CPM>}pqcvoBB3&OPE8u4}gns7yH+ zl1p7h>X7rlWik)&CZl+JUmTh+Fcq4YsMu?-ACSd+?$LN}hjK!5_b6JaN5^`%D;hOT zo8Cqo9{f>%T2Op)c3hOgXE$N=&p+_^H&JF?i38=z{24R+(=}RH&+M)cfvkeTVR69oYGJepm7p%=Fx&ZTb9-#mr=VE6Q`%VYmyM z+PE@mIa=sl`v&g(g`krhMBQA?VZHwF*6zmveXPQ1XM}2JHX$z!>a{Gjh-d#Y9Ny>}rF4#;OI0j+8p z23fp6a07PYAZpHCRL2VD>^+St)J98;H=-~MddlIrj2GUmuh)i~?o=nl9*iv8D&n>i ztc&QxGJ8u*>i>|GIYC!1P9u0$YSEk)LADBPMcP67rJ^TJKTeITKqARDsuxvth0wMn zTwhMFfOP(ZNHiO6&Ee(N95eK$vfij!CYCZACwXFxe+5-@<|Z zF`e2d#Yp=lkgl*p2ga*QE0Urh@7?@u-9k*8EVlna?+cmxkr?lzu;-1apumM|WOKUC zPneJ}q6I#xK^4k#J*Fgxk+*nbh`mZe{LoFN>ws`?b*TF-*JJ(+f6`7_y{ZC?N91cG zcKCzl9T{?n+S(H><~`MqcT5u&#~7Xt`Tp}yQ-ME;5@faQuQ&cWL$>3~4A9)nL<6{p zaB-)!>0e-1)m)^Y@mMrWdlGIRS~kCK+k3D3^aTG6Ptk7X^{i$U+2ruYO3gWHH=HM%L9%Q;j3SOYjl|GLbI8^VV$A7_ zoF6~-ubAi7CS~Br?6+P@_T2FdFI&fQjgv5bi)Ll41iAPFSXpWyoX%tarl%BKx(>Z+ z4m<|?La2bc=LX+Nv)mMnCfz$`Z!n*0*I^{F%APRAyXyc{23s~kG(k~${Lkk`<#fj` zvGs>ZM*^c2-kV>t6vTI&j4B!h@17_iEH*)!2Q@I7Yn(>bZ{}Jf1Tapo?#FI+aMbf6 zHFm@i6FqOgYX)T8sTr-X5#YDw$hR=LLqadtzA@&w{*g^bd?>`(FbO>WX0csEpr-wC zVJd!bwMsybTZX@8$_3uY?{F`*rc zfh1ds_|Ixcc5f|u!-cLS%;ai}7N6OeZ(4RMQQ^KL-)Khy(dIEq@=GhD6}J%_u2(HtFmvqqj{%+6`_Uc$+n8!g(mi~T*{NkrxrPcf%I(b5!C zqVZJ&HEY(r9N*KM0{v`(SvN`}bEediz1Q43((^hYzZi7%=rS4*$+kv(HOjgDa`f7jSX`&T!7owSsS zmpP9!+bRJQ9aUBdHqu4q)n4GvtkV3~PTJDKcDGPTBCgW*$N02PPvKfWHbwVv8^7Zl zbR)`<<`EZ@9%R#cg$7@VO?$8(2U{->d zQhH4~+XXb03y^rB2L3$^Nogz=^p!pMr+ux~`YU|aLpWZwOm^<0jQr%1`yew-MXGMgT$w1W z_IVXt*h~|i(PsT=TiYmoX?JuTYNzWj&z5IFJkge#w*geIt4(u)7NQnkizZq3aiBxc zT68CiEv;#hWrye}ON)2>gEe*Fn2aNnyJWLm>pBst(=)L;WjWpP!ZY)_A9B0!i@I*R z!K;S|HY*B#uV_SbQ}tTikb~CFm77)e14r+UFg=1-*@X)mS}YIRC})|g7TADlAbZj4m10)UV>~~t%+2fAmQtI$tej$HJEIf9cczr9fQrWY=%dr=Sv%L zX9M>DIsBEQKpvfO=%_&bcGo3PZFq7_fr`(H(wVb5q6a|@ zNyKF((a`FJ9c2kZqhoEGo5h|mM(OXG9F-`IM8RU=D7cy|O!YB?USvqZotilLia_{i z<@)3#4oqo4T=v`$Yk9=G&6!=x5bkd}#8;3@ESc`T%guszk3tp8P(V#kN_;R!?)$RP7Cd zVz4#EZL_zd@Y4|AeM?w>^4rbj<4_unj0aysIM|hfnxK1y-y<@M)i*R^(o~clthPD8 z)(jO90aBhpMYpMv+{-WjK}g`IP0)7osfe^Nqp3*gI`$+vwAzsD&d@+vCFzb_h{@G5 zEu7KXa;+G00s$)cr@Qy{U}bUDjFPM}HV1)!?4~+5zOF}qwX{9J6dS1cXAKBNbcU~K zg&7!s$!4iW&C9ZC@w?lT~5Xf0MbMAnCF-KGoFlL?rGH7YAL;H2GLdK^VTKD?#1z}da+|w zS=e?%2zxPgz`T?K4P@X^Z95SQCO0lRgRY$p5@XqCPUa*^c_~5v;Bz2t*asDnFvdc) zf7a@bXcfonKtQhD>k0Ii@2c|{R})ztkFo{)94nA*K|f{Hby%exEs-Pe@rV? z$sppTTZ^bdE77a186WeQ2GZT$RR_YCm%rq^N;-Ie+3$`O66e{`Gpip%Hu4x9vhboW zl2*AgZVSc&oys*Ny@H-72iWd{DQ{`L>W(z+Crn45;K)>sygJ{i<;4S-uRlVGJKWGexpE7L>m6rAlXZC?A%k#c`2G63o3 z?q<#Hr+A+FgDO|7zP45TiXcF=%B?3Ayr0W_v1l=$8(68vMwiFxdbp=jtt_PYDh0o8t_tuOCnd}iJ_@lOb>VDA&PzD(sit1A)7Ian#6cp!KJ-gNH($C@J{_>#@3$Y!N(;`ouNf0@#va6(`q@k+L zN|w&jrpJ1d*7u=xhgzS@N*>HpvIV8>Fwbyn2{+J+sHuc`W+tFZ;b795ZGB67@fP08!DW1NdQS=YM<9EHKH&EBfX$}Y(4`95FoQt~nz&iV~ z6zmB4nmDvd`c04G-@w`WIu3d7*^Y&+Rk=nKf69HOJ)dE71ElHPsx*Dm;q^dJN~qE} z`u?wN39&-9GsrKmKgh%SOS~sRnoV9w?;Smz?7fiWinz(sOFfwZdjmk_64}ut^x?pe zt_HyTz)lrmh$SB9qrkAG(ep;R7Zu4+*PcM2EK3^(KFw-Dc{;%+_y|T4h_k!HE$!9O z3<@=Q!b7pvXF@ei?>#;S?EJ2cJwEfQ%?X67H8^5Xp9#8INL19+2I|l7_8w{IzRP?Z zq8^qpA8oZM@Uc=nic!u=qwU={dqc&HY2u-i^^_A`t?VBd4@9(}Wp#o+pB8)KpH^Y^ zg-t?9&O-B@9}K!Uvyno(=y~K7UhAqkGL)FRyUe5ySTEn_J-v0)hJgYhHT2u8#lttB zWFQy@*T>TlMXwTWEIyz7+ac^|iO1|N<1y)@uqr=B5n7iI{7g$;Czr5<8hVqjgH>9t zr8kjis3`M$t>Jh*opp*pEnNu!GeiV7O`d(aN=ix=!Kyd)Gc>HNGI6At@=5$Ju(xAX zDjvBlFIQ=-PYNclNt5fnvQ$3L{e=_&8SEN@2BP*Fw_H>&Kjy<|sOYaj>^ckLz_=D= z!(#Xwc#K48x<5d-W5Cu`689(!q9aI@`z3C^ON1wH`P*V6N<-h-W-M%5DcI8mV8K9k znE8=q1lONnqt*UcHpOW{agkY2U=EO@GQgp^SArn=BqLvEx+ z7C!bF=Pd`-bx0)ZY33=AT!Cgy^?&W{JU?tt-&q~vsKMxeEVEgTZWDFh|Izk62_gOP zN@}!ku;{Y5w>%3sG(qrYRYQUYkIB-3zVsX{#=In7UrEW*;N+#j*&tzYj!d@q7o{{Q zbCGP28;s<5&o-GFuHMVFq+E6~RJ`VVq@8PGDZPmEH<Yv_-a+* zCQwy2*$ibuVSg7IOurpWwQjg#^6M`pWS7{xc{k>d*|2#+C?>^6r6`&Cc?VG5g&%?zrfu3vj}gdteb zmWQX@I&kjmZ$M#C@kHqWpUXHGgS#%Ngq&BShs-%M=|SViGosq10C?avk%5wB<1m|K z^py;Lm8PMZzec{sS5VrqMrwfQY*Zm((aXVT!Ft|9$Ge(7tfVMevqgC!8}ZCw9wg~e z2PlWDevG&FRmJK_;ldnMal+8toNl#fRhqmmjqN_>G>dqV(X_}nyC=D5*^vW#MpC%IB)(?0Zh!I-Z%Cq*(}=K0oAZ=oXcth2uj_1@hKlB`u( zVBdZIae^+3(9epwWwl>lZHOb7np}7AikvJPo&7AG!_HXVq~|Zfpdp}5Av1%+cUR4c zA!;hfj=D5?J&&e>YEX#taEmm~je=vd9-O8r{bxz1Zz~Rl- zw)H1nAfVXo6qvMNyJEF00L3Y($~PbIvSM78wHBXM1GxXv=x`EjqlZNSP$q)!6`j{B z#rMBUo&@a{R)TRQEZb4m40Og&r^3tj zAogmr?@uEG)ye4q56r9d`4gk}YkSm^V^dcMhi`59|C?UD@ zzG1=VM6Z$#4_?S5^uv`GirrX(MGoFG@f7FY3}y;|mv^?2E-4IQGTZP`k(s*#YVv{_ z-L;8hze9F8m{Z{g^j!-TLcfuFjY}%@ghp+wP%DMU7W1wh@Isx+34_h^0QSE7lBovs zTapQ%LLhcnhVYMMPx5ny(A4U}T_|Ol^-y8uq>p*e$D@T#iH^v*P^c2ikZF8-w zqN7ond=guUwntlT&0l0s0U1F7a3Acexx)3r@7fqoe)j#iFONp!OI5#@ z?oCJ%Nf<4O(FUO0IU4y5cvgZt5J|ElHVl>W);9Ejwa zAMQkEdP2k*_k8lLS&{tBH@IlhjLpRXc_XF9HqPvM#R`ob$93P%PMcxXNuQIIyyehn zu9zy%zN#nsV<7hKewu(D{;)NkMe${ID9>)vXO74ZN35!*wzFvOF#YL1<2k0qLeqTO z@{VE@U@PqOY}Al}z*EBp?`imI3lYGs-97e|5O1-scCz2m63SKca(L zZ^GS=xk2SZ^dP_6AM5z?tD1?rc^3|Xf`PMpJ#h^v*7OCk;=Xs;yN4)N*I4Oa(&8u8 zD9`^^C>i^`;V>m4gyk-!D?x(Ju!E}UbwAs244*9*L+vzwAto2xuT`#T1~f)SsoiB` zdLcTy8t|9$NK}2)w!XF$n1-_*h9vD|InFdlBDcQ+Y6+D!Om(%Oaw2RrBttg?*J8=~ z!)b42PD6Lj=SRyoG!D01SU)8dQ`Z-XK;GBeX|W1La@6hj>hl)EDBX|~DiI^3wSaWq z;o^CYQBOMPjD^VnyF*+ZN1ynBvOSV)N6sD~Nayu!HRpX$l6D95EN6SQE|n|kej!U* zCRO>A!S)6MGCftg_d6s$!T{N1?DO*Ipo3~l6v$UP8lPOct%uWC^ZouAR5sQC&n9=B zgKSW**eGMoH3Jjs-V0I~t3Nu(e0QrBX4DgftmVhFKW{evpWIj&;>R0npV~uRTOlnv zAKWg}+GjUW^EjAu5derb)5j}eopnjUErebHMdNJ%DpDTPbZ7fm=;|}$})6*YK ze>oe0v7)lZQfH=?ysg0HKsp_>a7%pUhZ&%X1Sx#F;~0-A09=gajjGtFi#-vWc?)t) zN`N%zlfu*qAW}WCm1yL&*o!)1wk8Xk*EG{otDbJRe%;Y37cXq>^1Qt2gKZt3grQ=` z%9mgzDO)Wr4mCG6UzpR;?l+IX;C+}OWy+sANUe&K=4rFja#fLYR%wUx^UEN_t_0u& ztnP3j(essQLEl1h#9?yzJE?ICN<4e>f_=BKU>V&QiwCT+&(~{TjOo_8uDO$s(0PC6 zv>3Kk0Dd#l?%s}GR6+H65Y63PibAF#*>D}D$C@Y2;wMgibNyw6GsA1ekAgvnM6TPb zuc$8I<2K{Ln%i4pTJWrG9lrr(t03=BIa9Y6dM6Kmt$H)B|M;jrv15aLxZ( z^B2JDf5ZEkGh34wF(KL!tox=i#Sqs2fo~_tb#g1@8%vF;1Zz3qy$PU0(1J;uBoR4p zhxB!9Cw<}@QOMyyCfRqi#sGB>ncGJQ%heEkZ(h|<>wNb*n%3V{50Y-GTU%)+=T>?i zTF~hM=Z>2*1ssa;dJ^YBz)`Cx+5`m2;3{)AH69V?rPOLZuXbUMjt#G^PC&<^s`ZAh ztkw=`T=BHSmBsH)@HC)ItHoBk86KCrtd8t7*5!NsU_Tcr8-jgoxf|eVA+ER)t@#&2 zvC+jf8T;8o2e?1`Kk`Xg_|0%vX(=dH_9LNWF>*%vf8QKVp}M?ir+u)4kfSe>Hh zIk!Ai5Ssfn(7R;+ihxwQLdiA1!=+Hb$3oxF&(&^WKTpGK4Lbvd`SW|KJlMuL2r z!dSKwtI8@LbvzPi3%D_d)hg|s$~Nri^jJz+?{4kfR(CnqG4DMwR9jAAk-R$jc*!Sj z9R}|)EpEfl;yrBhy`CV|14ihn*z0?^B$48UTPhjni!asiR(~q*EbOezQCsFTT;gk` z?}k{&TcWUjKB@d{z>c6diQl~NbtDoOIq~^T9u1R^-{Q;i4FKJH>dw7sn(&zk9cbCT z>L)BN`uTVuI{g#o)Ow-lX;6Sir7-UK=vyqm*OCM zze~(-I4zJpiA_PxeE7vjOH5s&wAcLx`(@d5m~pQ4B5fOwPQQK3I5I1LA&svF2W>4p zX5YvMspquw^{RK*Xw*z#PG7$j2k~eL)NxuAr9X25kWQ0!#U=df4s>?)2dl6ATeuKT z-ZXTn*SMhC_1$7W$wqJBJiJ2`hS(QQjgbt&_c%0yIkPp zc(hX5Epv!g3FxI=LiD`*x#+DVSG&yhaMg_6LkOn1*y!se3v%P5fI$YEKy$C{dgXem ztnk%%|1^W1EA2QWkp?jvH}ud?Qopa5#ejCg?IN?2?0J+G#G%J)zW?JU%e-kAk5N3Dp%4|7{PB+IIigM^Hk@OyguC{Ckgw!t6mejJ?hG?}iZ@=!brC zL41BRI_?BrfOIm$aLo_7x=Sr&mDq7hb++ias~pRHy+a4az}!1c5u-zIhMIlXvo+Hg z{lmvj0kh+l?+=0-%Om~*pcEwr#h7=~dVWT<4xY@4kElj;e|?%OuRU9wQ$3ikM^&|P z;q?K#>RhdYgWE~BxLX;|4tMT>jtlynNYeuVQFp6)i5JlV$^|HdDnmM8=cQ~0x!3dj z&|I{QbhohRw=hwh#S!ax_1+6QJC@8K4UIQg=qnasZj|&So^n>6R%A_pJi}Vv(ieytR@Zg6kkr;huRQSWWIc z_OGwkTy(iFSqZogdW;VQu3L^{b|M0!?|e&l$Ar{0C9sHa$7>T%%KMAUvSap@TYu_O zjG@bz8)vn8FJSEf3d&QNT*-|Pevx=<3m>Y ztS&vktHyym<)IQmXjLMJfKt@I0Ct+5_Sdp;uxl1$?a2UkCgv%Rc^x6_c5Odd%(Gl5 z$HtG)PhC|tec6TKx=z`1KDRb3pjW8@G;3hvs{d1+ly5EepO$K$2Rpe<-{{OeS@1oP zee&%uurnTw{p@%Ij1V9o$B(>ooY|sVUrTqJ{%MaX$|983Y(OP&XvV|#gl&-VvN40K z*$?BU=>Z4}LfPQ1LezF-aJv4BspcIrNI^kCxk-@jb@39`WM+3=Y^J`ajk__J&P%n} zAu;{k&-ipLG|HG~QT&VIwdkI%!@jDyuP{r&&IGYa)L|@AC^MAPLdZ^`J+$#Z4AF_TmOgHFDM;;8=7Fs`y{Bg$#`%|DA}h&y_FnT z(v_O~c@_jKS`fe6=Wc~H#RDIX;406}Hf!w9WyfQp$qeW~x+ zA2Tg#i`)k~STibJ*!HK!4?@X1se!bE#VQM*{Q_mndg+Ya-}YTjqB;PM0%T0^z>jG|+RIAMDteb@C@=&wC9v^isa? z@botLyOKJ&UBV|invGJR5tz8J3v*x>;t5f1UuBkrlI!N_j z2-r<{T_Nc+WiJ1y-YpH|ZI<2hxy;s#1{70F^c5RGj}RRB_5yfnMtP?ko8df-^)^Lg z9A`Gag~8*YJDXofTtE>T$N|uY8=|MDY-uq8S5TpM659TGaP zQK`JSyJD!=4Ty6h>?+y!JE-jPT_jwaYXYuQQMkIbJB+(dUedqy1R)G4GYsr{Fx4me zim?8p4>KeR;->L?E}#yQ!BIcW=C#FEIoxNaVc?j=_^LCv+4KpZ4heDH%%GMX*q0^< zZy~#UEh`^z>H`>B9R~)6+y{1C)+cDlOF;kI_JC;-1tGwbXglAI1CMMkwDg0Yhlt;1 zc(bZRJ!oe^Dm+KEs;P}FJ361$)J%|asL)%&;R)D8)#Jam{hdmku0B?G(wc1mNd8~# z6bLgmRLi=#gQ^IqVz%Pds_$PqR}g?jc(c?kE319C>GIVUDo(S&J2dqCg8+}i!p zG$2HNcEH_iqVn7qcu$$>k#GY8u>`FN2D5~G;rf_KY8emZ(O;*ZaqG0w80CxA>~ zghX(JwmTjWOFCbTZ_}@J1a5U{plz_ypdNsSJZ4{I$21GD*T~ez|m!_~xq51di1;DVhL$*7R*{u?K{6^9xl^&Kp)hSCzVIU^1u0 z%qPbM`4}%1O_k3{Oer4P@i4iw1X~r>ugQn>UHyz=S=#|!Ib^2SSSbDW+XhXU z=gib;Ga8v&Yo51qqH}ySu`VOdi;*6TdoRuzj%-KDH7lV1ribgEd0Jdj*fF5vV*npY zt;eQ0IOdS|u|zied_uXknzSkf`A%b~ZhLO)nPJn}RnRk<>+M%D-%E5H$GxG1=Kno>R>C5I&@Qs$Xp?>XNfJv zxL|wWc>xr9#F{tSXaA%lwn#DfFxhi2`K?QClR3p&LRPX5ntm}080oGZ^9}!##&y&YCcbR}-KYte&2W_Ne+hs+))!i&XOv!@WTel!`&`P=d zLU}Ka4{o8L$-I_8P8Sis-qcJCoSN^q zsZOVV4%b&lTotEtyIH(rC{-60LnBPbkp*E2&@$(h{^2^=5M+S^F=W8HswtsJN+fkB zXsJa_5Gw5UMI&nE@lVnnR%E@zo?_)-HZ zQd60CBiNHy;pCiW-JV9f6KL!tP;N7-A6W1Q|BLUp{ps^^TLcn*Rvsxq4AuHY?28pZ zEY9Z{3;`9%)0^O3g5_@79$n&R$Pph8iW%-|CtaB8b@AD~t3nlp zg<-@R%-O1Yo@`5Jh5!SU6CpYz}?0e$r_;KmR4%)m1W+@)Z=XXmcF z^Y7oiji{@j^~al+i7RTBZy>R+Bl*R<`&@MKFo@jO>$xl=7V!(1l3dmePQb4w-{Y43 zyY!7^r*FFmVBq{ymoBsI@0p|DfbuY$Pre-bZWzG*jOvh3s;d&Fj`wuKstna2a`|Dy z^ej{WM#p?5T`!mP^+mUQ+Kj+_*bZ>sUhMG?2%smb!+0o92LrlQ9G|uN*wV6I(ZeW~ zhKOqdP;CQHq<;h7QIrz}r0EhM7z-0a#S@9^Z{R&x(A)_+lCUo=V+ylYsWt=evB}UP+h2)d4t=pUpmI&XO+j*8YQN7Eu|IBGVtB?GVXm`Mz&C^dFx@LHOz^Nn#@ zc#G`;#g=>b?kEeZ#qxbk43O)|+&j*>>vcaLqY(ng*8<~S_w*7#6V%cO*hkHNM9Wuz zwlgB}=V0^MK7UNiuNJA{Cyv{N&9xYFu1;m563`-WC8k)FQLWU04pf)I@}N{MR%2C6 zQbY=%SM>#%+6W?1WkUIn;zUF2O?oNeU4_{;SsF-^uycZOftGXTo~_;2xMG4H+8@i4 z7$QHQhWOX1qustzQpr?=(n-LJ__O-^7yo3Ln}_+72f~@uHHHARsKz=j@bS$YS1bRJ zUcO8QD8~0!=wX^1K|lS@&R7QsJvQK!&d*wjzk{1TPgF>+H30s(=fe0NwRxJpbf~)1E*7IDZz+xqAI!@-gVQ`QyDz zq=#zfT7-$+oa;TW#(~v~0TlpJ)jK`4Ip%lvvi}SXT9yZ4ZHC74pLY7cx3w;uYxnfWODSys<`Q{tQ6t*%=k%wm2;gQanK>nTa;I z!&HY4)(Rx(%wVa{B?aV61e$0#6IAr{l7!pTl$6{70T*zb_I6s{G!djV54A*{UV_|G z@weSwnf+vZc6tdX24!X4v(T!wtM>s7I5N|2p9E@0tzQ@_T29tUa!F$W@=mTrp;TlH zME(C!b{1e!rfuKXRX|Atqy$tH1*97Z5do!Z=#cIbkOpN0q!%TmyN2!@5Rj1W?(Xhk zX1;6ObszUVp6~d+caB}%g?nc1ysq;)&;S2-eo4tK(8)GGdeD>vB2#i%kLfBYRm3)h z2w7=d4_wP*p$#a+7*+>C>OtBU^I+NeiI{8wy2tbEUtonx`yjM21F6@Jx8Dl9H0Ajv zj$Zl^QZ3&?*&NWG>g|NPr zHok=&@m;^jPGV_@%kmT0SEV;~T|`vt>BeGWVsrzBL}a|Lnq8@!@-{Pcid`A+&|n`; zvsvh$%!>vKx*f?*sxE-jv{Dt-WE1Cduna=>Mafrm}tg8%u2b+s15A2CgOw>^1g z6~0r42y&37`R7MqMcDTuNWsi&5DV-mZ|emF4v>zLB@FZs5EY>M*i z0VdUj%ym?rZn_U7UlLW%-BxHuE108^W9Jh_&qxG$ix;l4`>l% zsox3syLW}aQuKRjK<(=GX7eA8BwE}lKm+N=gI96=_TxLk`Cv^Fddtth3K*x)kl#K) z4?}wQe8D1UdEJ_V-|}n9$K$Ohx!H?Z^^)a!@?PII;A@}=G6)xRJ6;!lGrP)WaQFw; z=#Y@M59aK`%eMN4AC?O9f4cwJ@J z*^Fo&uir583Up5afPp`OsPHdP6F3`h=Y^eplw@7YI9&D(yT+zKL-vAMksum=>3;v@#hT_(XvrJMq$ttsL=k)Kg z<`el=HOE%qZ+Pnun5mk!V0KYL*|ZkJr|b`~v!Rl>0S^ zg8N_^2wC1=b#>E#KrpEoFc{<02sPRqA3BAbNQ<>CF&CM<^<6xO-90VjodwI_sRvq5<=rH`j4^q@%((Y9NAMxDaCzs*pK3-bH z{Y4$a{eos)%>~o{U%vncjrHrGaWOZ}->=`^@CzLUezQpM_gR1c$1l1N_-R!!$Q1wj zK&)T9GVqJHK4$ZO4ePkSe+cfc4{7}ELtK7sk#ybd zfb7p2BXCKtk=9_ zbhb}29yX%f*mdgA8bXE2G#6Lii#AChpza3=fLR!c;rO9?i;;HWEM3R`PyaI6BAOFI z%Ie(7sFJ(bAGPwK(0EG&z3c(XT+K91CW#(*z;0(~?MTpg-5seAAAs0SX7pF1G$do_ zgt!mJ;Qpy?`;Srzf@XgZQgMH#{E0!5zwz6Bs8KI;b}`Xy!d&!F1paOtz5|B2y^4Ce zrE(vqhsK}7IbChKVjegVPtMk!nUE)95}FNI)NU6)W;16z5nCT{XqLS&h?`1Vtm}Rv zjxvDewGg=jEYqU+(0r{k*WIhxdg|pkjjL=CsLa+1Yt0Ki(~O;2Y95=hepA;eE`$n@TxZLLr!-5yltJ%8OeX$&Z-9`DnKGekKpWOH4QD%l?434tpqRk z&O~%fh}Xr!qgFM=y}oMw#-#miHw)zxg#)@Ay1S(L@}PP;4s=rfT58%<>O{pNaoNTJ zBR?=H`HkU%mFlRi56{U4wa0*-tJOFam_43im?-P7xk81`c|~k7`|&*a*W6b50MzVI zRmW1adoImQ7OEE(8^c>Q2^xFq&0LJ$G>-fWG#6V1Yu6$b4>E>5fJ(32@=R;sh=Iva zngqFN-$Pnb0AkiB;vn$u%W3lo{xM9cLPmVF{~VOTyRShLfU*T*%vB6Wqw4QgkdtCt zsZD7b{x#mbAN<8KlLzC{(Z=!02+l$R56iSoz?Qa~xm?`A$?P9BC!6u~s6eY{D^J}A zY;s1tsI)B3>dUc(k#8ruX=v#JN6v(`4*MUU|ETVIR@$n>;+D5wO>+JtvY;NlCuiyC zXybl5G!$AVyxLO`Qh8B^x8t=lz*2{|-N<_h*|YO*+XWTeb+GN8GlNxN>f)n?e7)s( zDfHKN!EtaBS?PHdQ~1vhJc=?Wwc3 z3F^oSxd@jl*FNtZPgr3dy6jeiM?Gz#S(Q9B+)1C+_n+2yymnd`-4vnjtu!8Lts9D9b*_Z2EN#d>f z*?mdI#Lb{`?9d5E9FFW)%jARtSD#&NlA zg97R)Kpibz+`mDN4Vfs^OD$RiQ`n&*_fI zZgL%`+Ut+5MX~7G?Ip4(;jFXApJm#s6mZ+zSP1aiZSWh|uX++1$2@KuL zHV;%xJ`%kFMYhA+R`RM^sj{M~3kM+o-RwA(nP^=Ja)!i810*BKLVJr!=X2+wLb%&) zytfGW;xo8&wk#Zi?{dj@OnUS@ASjFr{X$Iiv3}Yyw40eseDHLydI`uH$tP5+h5-sl zMO3Wm$Ho}~q!d=+AVZ(xIAqh{u|_yVROW8``Y7`qaO>x!F6Fey^>@`c@7dc`xXS{J zm*Dfsks+BAphk7>NeqDbYmC(R0Nq$vsLaWBaH-iq2Jo=Q2%02pYfbqBlXB%^e+S7@ zMQ%z;8qo4I09Vv_HtL}EfJF-=Sq;b$Pb4pHU7uM-2|vCjblZW}tw~%gj7$0OLP3s! zG-v0JsOh@xb1YYi)s^%4-3k}r?7uT3hIw}13;ddDXF)a(S-l^20#%n|NhMEHPB-^) zmBC}AhSB6-ys@y&YeiM*nr@b_3}Xo+ojXGBXX-^UA9(SewLp4m>*xh;!{@(hylp&U z3{IrcKG?k7fsV&zC!9qFPmp})REc)nFlRa&meBP(NoiZIcy$bJhu(Y`mOjUsbv@i!RHvCcftv>Me#@*n)uEhHs)%h4*i zYUI)S_#<#7GPb)emLti5ivA6WlYq#FV!+j=7a(?uYrnt5=Y5A1`>0<*r2VNlv)M-x zE=ojSYxyTF#hXu98aFtOv%cFp1Klo^;~Ax@)|e#L^Ck->4F@k<35xs%x%!L!j@`>o zJe&u*Dtvj;wN#zW0Ux8Dqc%-n5BDyP*L>y|vCI0+gAd%qMP*&uCka105PeL5BU+}2 z1KiFx*7=f@wX`Z$Q%4dDohTPqb14TZ4Glwy0trr`-;%g z#D+{!mIE4L)j@ZVrb3Unkz6o@xR_Hh(^ILIo$QbD(3I~iVY^oOpKMQ~lO!Su*+*4T z@+a)`BK);&)1GVT>E{`{UMbENY}%ML$fdR(U9o?S<>3qX*tcWlyktw7cCzzI3LwA^ z9)6&!(#9TFyJ>eU#c+*+wEa-Zj{=8vJ-|ymKjp^CUMBp;Ss2U@^(?(8Mh%^QfL(` zMNtFT=t_cD!!${NG(F=ci_Jspg<*cHZY?j8g#gWA*VQ?Ri!x9lfniZr(34!?y%W=7 z9+nCP*%#FHL8d)m&0_4X> z_qY;d8@>>rdN8^{;uud*zs^X~bM9g+Ld zyTT@@Uhj*VYnaphC*!XuPwOUjo^Fh%;9Ym+hsq_1j3uW-A=+UUBsADEBg|gTR*RR@ zET+RxQrhENBw!q_^Ktc*!pd0kHZHXI=A1SfXR|*TTGn@@j9J_{Dr=d+>GYL!7FT&U zh(C@2Axvt(5T;j+4u!dB^21*Dr&a4be04L{m8^nOpV~3BH_-c(KG9?4IW;H$+F# zbwxxSURmSzw#&gL8nNy6P3<`rZBw6k0cAne@p&ZyY@_r&jF2))+^O_@j4YDk;B3>7 z79TdY74=k|!dfb-ZgXi(5CdylFfH{Hxn0BaS%G~FZ)T${7;lcWZb(W0@T;T&X}d2) z%`okt5%D8W|Bp5YKqm~(9?kQri;0-FV?v9lnv(vE>EDXoM*Uh!*aEcHbT)hXqjBEt z7!pYz*ODgmvpbPuWXfh82%YoAvNBy2t&+x3vz-H-;fMQDGiQxzVjNWx)bC?%MHYDN z4pFhVr#;Ns>w83$-)FeTBYcp&ci?3;xowqq5G{2_?Y1ZQxW|vqh~ZISH#|*hAF7E}Kmx zbh--|b8Oukz$qO#85V>4oWb%C*ekRg_R@hZ%gEJRShvCxeqx{6mZhMcy7&G^=_XafX?`&6vcJ9+{6)7IpN-wZ!ofP%0xzl_d$ z_)|Y;mw4Sem@r)|_p!o@e8mnFzrQn+_i0;n?ws#@)P)*)-A zTDFBwH(S4E40GYW6Q_RPABUP|@tlRXU1Uka%^GfJ?lmVJ_cefug^;Q3&skWP7fv$brV;LlGTD>hhrSCB?-?u zFd-pXIN#0%9#m85g1Is!V1jSinf*F+`fQ{I2?;;)kNbEIR(QCmi{g#~{Owmi5 z)i5KY+5=suq*4c#e+BEOBVLDnAym)yT_S>6SHWr#M{Wgqf=)LIe}{|O1yU$75W+o> zw0rJDr|WL#&-{fw9;Krk&sK?5-=4u1S3BhbV*AOgt~IPr!HoG=^;p$vLGU9kJBX+P z1A69;jU-1hj8L+ScTZKpNHXb>OJx3du9aEv%nA5gQ9UDhnlzKS{i7&}$kG(oiMHLdbiE^8&ZnxmYS~lt z{Bb?hK=|R|IA$TPJv56QT*m;OVgIw)eT+6m*q7_gp@0?;y(38lMH}UejS8T0*QfB# z{n2abQQC970iJCS@cH%T4ZU2Z{KbdI5LE#X9IyP8kp}px)O}F6{d?&6nOni!5AOl= zp0CLIAmo4)?sj3u@_k7J(xd4A+GY-D$Yb~f)vT0V+TZMQR+haP>!voTCw94n2ksYL z&mliuEMZpxTT2)>#l`9w4>x7JyryR6+ho{B*MoUS`#W`Jhb@WQA%sw7JY^(4u8P3X z0t_!~50#8^&G=12D+6#>WtF4%ck<~xBA3FlxXvxvRn_||AAl59;Tlg2*@>!lK^Vtpe zQekgEumK(Xfjdp_50;!CdVc?E=hb?1KiZvZ}OaU2wsQ~U6s=h>Z!MFZ^F<=8B((* z9;>nSO=Q%kJ<7}CuL~=2w1}~JBmzn1UYUx0fbTlvL3Gs%1@u@~d9o<;0AFR7Z3DZ{ zRF%n4@UpjvmE5l8^H9l!XAjgqbA0W7DoS<;U_Cgh*IN{k`YnNXrfA4-oS41`GySi} z7n7dDT(?VLz~@vYo^?Si*l^OE|0xyebC7g}#dAnA_mnj&>=b{Q4xzm(xl4=e`Oa#( zduiCcT6axm2yrOf?*%y9p6AU@kdeH?C<%b7vqr5W+PdA}Hk-^fV70ERwEkWP%vx=;M6Dq??8hje2MNK4Qt0WY9&JvYfLO=U@aILaS!n4A z8niL|HmEzDpn}QlJe}w>XsCa{XPAb~#SpF)Wa+7l1k~!Qs(l0fqsF^`L_7fcY_3+v zO6zaH>~mzH19L4!IK3>~HYRUOYKg|iyMhS4nI1s9v)wig#&j&lo0t=Q&+oVRGDb7D z?i>}fR}VoRo6HC>sVF31o^PD-j-=PqXG&~F21OnSD%o4%;$*K|W{gXGFc zw6TK#*VIn5i{54?CSO75*}yfZj#pE#e~uO~RX0GC6UNk@4`HtSRF!?E_h2O9$iXo% z41Ihsm!zRX&(}Qm5S3 ziF*$xQXVCKjH+7iJAs}x*Vk9^km?(%hobA_EAB6M(jv_SwowriZQ1jG7bzTfS`-5s=zv;mNYo<9idX-sq{lx zkl+F1BO*$nb~mp(7s=GLkvpGeA8zsi1Cl(JY9EJiWq<-VFB!Ju&R^3%;jaP=CSdf8 z=Uv(H>)PW5N>>)IF7jYUc=Ai|fOvY8N$dLvbih=mh>iI;1C-~=ss4rEz&S9Lo`gwhX)>#nwPWTlK*`LMT|8j2&z&?AWUE_z=a+Q1ta)veIgHmh+m z2^`!`V5brQRjonh=i-xumIf?Uq2U}#gF|1>_{l;$BHCD2-7JP*imw8?cBqrJA$TMj z2X}W(ic$B&Z-d+3JmSqI)klw^QLn8K$5eBS)(^9Sc;jh?0b&$3M|^PxLrc){xVUM>;) z_Q!@nDc0+m4|Q25GToE+)ne#0)%j%@@(9G~964vI?N|c_ut+O6_Zgvyz(fRz<^fR( zZj}p-;Mw4SxOpqELGL@I3zT`pN4c6|LQv+Qv+uN5q+LLgizGhEuAN^(IENI>e#=`K zva#f=0aN1d+1UZrPcaFlA?`0DJZz?AR|dG^2Tp0pWEZbEeBEe_Kx-M}`#WPbJEXds zQ!#(&U|`U8mO5mIOR73Udlrq>j$ti7K-#ZsCa^`#_6#_?`o2(T37s zn0hFQm>*NXm0FvM?vvq}f%Vr?wCD)qtVr|tDiy99dZt~2S_9=L+oH1a5aN}Y0j)WB zE+{sqUwyI2*eEJ^4CB|Ts51Y4PD4P$M^{PDLS-F}@Hl~Uq4p^#tKF;LRV#r6lY(B9 z!s7m;&j4*6;SoH2yt9M>XI&?EAtfIFE5)Rx7vqc>`!8V%K6$!cZf66wvI=L-AhAh+ zuH9Ng%i8*KgxF=b#`%dPx*3$*^h4SX5TDA(R0|$8f8{?7(0xt>_j-w%%rrC4G$lNNg5DRvkRPt8Mj(X6J1rvlQbYfzFZrnd9i0PH3Brz zuWpJ9at3wd`-%i>e61Gk;XSb2Sts+Mp*yABGILwA1)wgJ`lchB9=au@WkoJuPNNspPjxX$fx2X6{Q%FMvA5wh?-kxmEai}Sa2Te16;16M=pc4rpH zcheZunKkxb+cJ@qZ@MpkZd*?VnbGXaJ|asniJ4FViwKD!J228i;tLk!+!{vr`61TZFgOEt3+_4` z?>}PZ0`g#+s~)iEUfPOOlUIey)~xsp7?7uqn33sGn0cVMaVzx0tA%R^wAz^K`P#TX zJIJVzQX8ayKrS0B`;JqGrbu&LvMD+lCd}l#JiY zxR1%dP~mj&w*!QPb*F3Y=eRlLFElmEHy&5&8aOq`0LvQ9u@8k!^_dFQH&Efj*l-^R z0j`?-j~0M;H;q{U?OHz4%D_IX%R%Dg8alrK-1hS8D*ghq?vq)GhFq({?vxZ~@$1rE zCVB2#QY=gaDEWnU&FO1q9;h=?$#{yU=M$X10bvG8%A91xN^hlJ%h1*sU#-93eZ%_P z_#q*Q1%Ms7VmiEd)h)ga=IlbaFF{7~)ru4iG-+Ac@flzaS6b!QlR2l%)JoNRzoVwDiW%U=1OU*-ycKANWuZB0?WK55+KJ7^?#6(lJuXoG}@ z-UX`_XObyc*pVGm>XSXGhx^>2ZgkF=K~dWs*lry-b8tC10zUSRlhbUt^{BIDHl zD`B_)zCG#ZV08(|1!8z~ttT*177HAChM=tQs<994V4$qOu-@{udR8|xkR+#dxl2w6 ztx{5rwpci}tjJ9>TXZ-e2;7gyWdN%99>6)CE?%;4r7&(6VaUq7m$dx#I8a36iFx_E zEkt_{Xc!X}hUb1Xq#E;9Ra1$-ByA<6cH26F*r?&Iak*9l zOdth=fBX&gBDtbhc_;w52+L_D;TpU&Kn=MkZ%}4wLP7nwJqVjfuJyK#lpoA4|56b) zgfFz*ljfR+Sk-jJw5uCflL5uvqzjH7}@PwA4Pr#X6rb=L_oo8Wq)bw1+Zt#Q{dd ze4hS>&Ujfb3iq1ctSd(^OIQEQNXzg17H|c$m@f?P)Q#6k`6vXN&Ci^#TJS$kRIZZY znpPI-&7n6XzA3OFw}LI|3Gkai)Qky6k;lBiCEoGeh!`TH^5_zLHtLUfkdf=mzGc3< zOYssecZu2InT59-3FlG3LU$b32z}VS2iw}A!n4;rQR$uj=l{VBCU645f}sZo;?>tAiW0jtw}fE{2rr7T;&0z zHRMHX{9LH=0|!i@D!zvXav&N_P1}2WO!;z1UHeP$J?+3ibqmM;MM_M5j1)c?o$d(a zcg1?$@%Wil_)mjF5ci0smaeAD1wAx9SnGk8A4|E?p+O;?U+Q~i*K4X7O;LD7C#c-7 zp3Ha^z=|U6Qc71dx?-#9o-Z7Pdes^Z{>eQKNY<}dFsbV22EQ7Lf!SG}Q_{xc@F>~| zu5H&9U^kNcwPd%;rzh{k!utVQ2zB}eByN)sKRjx`hoVYMcIslTUH~H~_}6AF5ntvj zuM4OX>(uPj;b^Ix?KU~07|h#!hesM%bnEon2g$ei9rkX$iJ|peEer$6i#gEbCOPG% zmR3^Hx$#6fCHk#BfZjNaxK-3ATydq)0*hQ4j|(;=MqG6%__dW4({ESu^5WR`RL%9{ z23gvd3m-i%orYkxy2SZUyDv}#`70X(e#-aqZy6Eh5Pxn5{`m8k&*0MUo4P3drF_F| z_5a@bfAztdno|G&yCG`y*N*;6*7V=Mj^{7^*w5qo{ZRfdTmRNm@As`;ntruH{Fh$p zm)swdjQsfx;E#W$UH+r8{I8qZ{MOP92s-}z)<(9Ai_N9Jqo-1ge}Bgy?XRx`*@(!? z*^@!ggVp6cP6ZA_Awn~$ zhg>FCuXC*3_<0x`0UI1Gy?#VDuk-HVOe&z#oAfqkzJi0@;0>J{qV-{r|q@$d`rqjnwb zL9axi5$KB1MhMz)7mCPhlp1A4CZ}(TPEfBqOrLnH2nb}{ArjKNF3%~_xtcT4kZR@ph zWu*Mt_46(l@CSQNDvMoWTTOMfuZzP;|t-8#lv$h_QkI7APlv z`B zJ8*s!>BpM&nOUbKja*s1WX@M4ly{$Kj)VXF>w>JM{pa=1;^JP1F@L_ga}7$jKClvw z5k&W)4(dIP@7g;@r-t%@HpF_p`$~fgr1b*#_AO)=xVKYvk?TB?alLLb08?;H;q-on zv8CjaaZ^VS8QW*Dx%}@ERckJP)~v=DpBC!n?${k_Y$k_}HFmPhp7_MK0Jnfo(W!4< zJO~>4L5grqs=`C#gbaM8-lRBy;|%s&OAE3;>#`w=V>2+ey&NIL70Cwxj=2xiT{FM3 z%FD^!?n~g2D-d2GJz!SZk>S|faxm#5ZcOqyPRp0t4T}Pjy`7QyaHVr>qEldN$VwVu zF@8e{kDAjqs1NZ04B=Mw#V?N?X}Hg8oNYEY)5_;^O;(t-k#$2p+e0;Sr@9jOWV~4~ zbrsX8;r?=nTk*2h$Vk`BG2`4|J9hWePsLN#$03qa!HAaG3(#+XS>iK4zQ*?gxeE(U z4Cnu!$$d9f{1$dzjQhBa?AEZWrj%b2~yOWG$osMxNPR*BZ~^S5ghnldYW8;Sj~mx3C~c|B zkleKzD_MVkTl~Cc*h@Hj=cB)p)dvA8`@k=ZUQYe>PMBN&qOOT7ek!gin2EytIu!dL ztd$F$y4S92mf5E23P|`$`>%ZlF51;3s2xk`LJ50vCeFWeZ;1dceVRXz#VtAbUXQ40^y!ueURwnk1{JL((@bfs^wht?j%2 zP>X_D#BAizOvLNF{QUB`Kpn;7^&teH0<2xSD8saiiOQey-c!-t%_44#1*WuV=<((l z<7gu2_INTfbnG~f8KO{T<_my#>;H@zOIP^~qyv$e&4iv?HSks#&$xk2a8lyC4O||| zx4SpU({msR@4x_g3=|)CnBX;{7HbSnJ9fqFjG*Lwa&riDznN7VA~1UC zGywM+G5u~nXDc~T83>$qJFF_s+M)M$zBy=@K0^xJjReq2*;sD4Hi8g`%00^~1|~fT z8@dk5H-GqLGEpEKQD`>Z9QMV-@0k(<17mQr1MN%CE3oZU{$3V(nDro5e;(<{;?urW z0&5kS4xM$#5X4V`~!nxAU^W;v>%{$#Gl(NbFvI0>UXFYT2p0r zOsejv?;Dgl51Dq0Aw>6I2Xn=L=0r~YV9;{ec)oMChiHRW{7edqIe2L(JXD~Jecoet zam_v)h0jvJ{iahHWcbw7ij+AZ9L|c1#_)Hh2sV)7A&r(6thVqm5)*sTB+{>5vO+y= zmutink17lh=i(>;Z^G*$%a-xE5@d9!BW$gxUEm0y6klJ+FMlg8jG4|!pLUJ|b1E65 zc=s4Eg;gq@A0JG7yzaKpC%G_2q&9cU=BSBCxbewT^Na69(pd%(Lxq+~(Y|dl5L`!} zE4Lf^^BP{hT2!E?OSvZItgUUj6nyAC5c+{ariRc@xiO5wKxGL`Ycc=5&*;}~jQeyXYQunE;$rj6hIBN1XwPKG3}xw`CZv?M_7O^zWwpK0V(Yr)1m z%MNPs^^~8?0rRjtPj}Hk-Fxjc+h)_$7CjjmnZ<7hu`P1Eue4M8C_`4))^45*k-OAI z$A#Qs-A?_EWGFHp%RUzoiIaY(v1%1z1t<}!_4##uBms-!`8oaN8am|jZE_^nA&vCb zC9bm;Pkqo4At!EPy!R}Kpm)w=W03)-ain8@(+d^xM|(NL=~6roT>pLHWxz@xm;>MS zRZSmG0%!ZJPjOXV9=#wc5~q_uX0*ouFSM6HRe!O1{nhM%uh=6o9_IoiVQMoBW_th+7t(93m6xbK!Qu8PvZOc$V-D_Kq|zI__!0V1Ww^a9X? z8FH18oi%yg=(toyOTA&Y?2{$T&ggnYd?i{s7vVP%Qldqr9r(tF6^?MI;-}CEJCdaI zAS=~=B5F*ONPpKrT_~=1YNs#C{3SWLgpt!lkZR}`HwyJQTY^R~yjMfDu-}P^r9TQL zCbv7@AV-MpH_G_%L5m(P@V6MPUA&0|{U_djMEJCMwcC@1YeQZ>aUbEyO$v|7b0k!Y;~Ad^cbNx`iO#j`NCA9!z|7=iQ&P5 zaL4UpiPa1c0)Y_XR~#bqaseMo$K3^|p4_*aV#Lz`)st~2!{7yM5^jP!K9N6ytm z@tXxhCNIBlKM>82v8cQ5E7HWVZr);uRvLr!9GliFK>0b~$UR($FlcycMz3C-}y zh@K^eAD_Zn=S}0I)A_Ol#B%n@6P$(0NDarMv`Sk|HkrK!ugB$@r9t??V!1=hX>`D1 zZprn*JaCRb-aoJR31smT`-5k$PhX;Hb?Z^|=YTf4?zt^a@ia#vFJmo8Nw*a^Myo%H zKs0)gobaG<#Lqdqt7i1tm+ag9RuZ})$NPSf7f$}-Hpv?djf$v&J{@n}%4+CcRnA+U zYwt8d^?DtM%xTNi4G)y2e5_Td$a4LGTRh{mt4k0+CZC9_n-=XMWoTN)hBb5fr}6ji zv-ZBGX?lCZ1!Dh)6LgjB{A|swbnU0kS2yqB8q2BZ-*VD0HM=G^}6E1 zz%lZ%igXN@b${+I@h-IGLM_z-1pf=E!VXlK_9GKsT}C#!S=yb*Z0zZ<_*ROQRg$}c z9@`hF)J%$r+ywp_yV{TTL{MLYaJcKqo@f38`?+l4OqtkxRXflXi+gxqYISW%-O;gZ zdVum~`g)?XNR|U-#Wgts%kO^>v-eu=&nai=Twc#={BfPi*Xf&QB?G&7Mx7vlC~Q4* zQ0C+B+SUMNt*5q~MO{bsB!J}K42*PrybQvgHxtC1R(tiPCnSxIIzQV?xv4c;jpsfn z1Y+|L{I>Oh{Hf}V_f3wC%I%EIcq!~zI>K2sK~;riq*BA>z+~&V7U6G619`;xwQx{> zPA@f9*&tU!+Tn*T&(WvP({Kb?MY7OVZ}OL$^U_Xdcbk&BVyMjS&LxRJAt)J#9tu%T zwQ{DnF-OxLvVCIyvD;cFK7Vx0>CIHxlOh}ivaOz_^EnE@{K>jsfgZWRH)_ohYhfvw z(fe1eUNpPH&*Qh{5^~QA{U@8n{Ty($klqdikb|M!-Xp~|c}F*1lRKSWQKpCDLOqaG zC(uM1xES%|{+I^g%_Dv)rh;##iAv|<*da)-{p6=1MMg)^0QvRduYC3@QAnF7W$iH7 z1E+OQO(RNd8Dvt3wAK2^_25X_kPFd8)4m=qjR(&}>KE0xb26ig3)chMB!{4W_%is= z3`Ca<+FeX=rq%^7)vJ~D^RYn75F z$~~nWT4dC0Pw2Wp_o`nLf5>%aR_D!7`u;0&6eJfefykUh20zSjJRZ{?lHlC_aotRY z4c~mwMNy^GYoowlorG7O`1R$G+b1kknLpZ@YUEuM5SNKd_ISl>>GR)C|M=joCM}8|FI{T5AJorfM+T35C;{*+I`(&&U&@eM?+8#{UJ zr)@W7xR7q(-qF3@Z!p+5cuh4*!&;LdR55gVHz4LrkXJcKz?5|%c5ZF;!=1b7@D~cT z0HPp{q9ve8@X8QIF@8c0>^9(*Wjn2(`Xqq4^a#oyH(n~?eDrZon2LLIuu?y6uFL$= zRcIk-d%<<5w}6IOuXnJ%ioED_wt!9SIN76q{Si`>RBX<1l;5|qG5->V)bT4BlVoR6 zs@L280}=5O7V|cy=-bB_nwf!|?Fy}H;g3ljmiRj6vk|jKd@Sjjk`}V1eo=E{T zPWT$E`6Q8)?|N|`@qTOt+F2HqZ43uSU-5FW7gdh~aZgDQui5--ne6PoI&X{YmnxGA zSOOxetyyvU8DwuAm zo};i)4}O9;Txy6@2apnvV_os8rd{@SZn5q;3+yK+p%Ro8RYgXrDLRFvg?3bBRH+Y) zc}Z4OKbL^+5iXx>85q@KnD>4tNz-sQ)TrGjDw&>}y-#1RL{)9oG|?ye?Ug&2lZz?f zEye))W^}jz2V!JVMalEg!2ladM2vT58j*QE-%i?sRX|JI5y@MLkOl1sW6njWJcXWo zTr5cE{QRgkzw=Ac%(gHIHlYo7TmW~z^DZe<2j^b4m%S8tythRcNM&~rs(6%W6`X&E zKI*|$>{&Ng2h+CfvSkczxuhm#tnfSzdCG}@KEaI`TC*InNl~g@ICN>j_OiRUHgto4 zo0e%kig2j*c#eDvp1nw9C7fY3b3K)=Gj3+}b211NgWpKnM(PRYCy%T%d`}EL+E7Ru>G6k#&$4WqI^`a#K?e*5^N$aq-VbKG&kM(V}}OlwQQ%{@=E?2=cyS4VhJ=Ri<@Q|Ea4gSuvlXR^2!iIkPTctHaY z``L1>`_9sLJ?B&|WIJfEnU+PgP05shq#-?tX6ma=K!tKYfbogtriMt?_ij04Y^)%@ z#$r@_(Oz4IQ5MO+l2OkG2EQ^wt3goi%5i$Y41Me~A_(iK1Q%Q!*)2qCuGOvZr4ir- zL3rgw8WzLjRhQu_rTl*hHnnu`agJ?90KZ?kW)nBy%$X5Mx%%a?nYWk^l^#QBT z3*{&$8%kzcWlzOqccj)AJIXR;?P7=NY9l~WzP4f$WJD~^a9N#Q!`uj zL7uGeX;#)OX8f_Q3?@JHkZ^-^`^bd5vL3H>pJZF5ljQypRY=>;p|W#N%GD z>>SJgO%I$v1{B`QqAZ>ctkKVE0sH&Z!i><#yvX8B4s`;JjZQlZ+&YgoKW=*m|ma6puHnNV>dQ`KcCtH2h zSW{B8`xt$Fx_%EPwJn9qw;xUPudEzJ1t5Lgz!qHiB#V%_T%f)XJo_z3{`ml4XsNZb zH6+sXcL%YremxScr$4MZiBL3!e#y)X&3p1kvSR9EqFBp^N<*>&MlZhLLJ9904nGwD zF}B^!t44&i;nIfgdBCd7MoDBbyxE4-Id%&29J7&b%#Q=4kzQK}H*Lku#{wd-eOy;L za_O33DKgGiHW04(Qo^TN4a$y3*Ptzrut-wbs-;e}^|`CtZZ6?Tcpt}#)HuctdI0_N z)-YVcmb_VUssbblO-~Cg%*}(e5lTN=wdZY<OZOI9ZIye%uo zGi%svyG#e03D>*@_%43{V_N?k(RX_V5aT1J&(Qy+H<*h9El4JIP8mx_5rD-JnJ_j@v}XmWh^#f-sYN7&Inhr6(mL z^rL5=XG(H1YmpclLPXd>)ag!GvxEqS?=!w#(1YD0ae4qeAapY952yk@s}<8?iOJ3D zXXBnl+pQEC3>{$bDOFM z_)c4ly8SY}Ubc7qoE(rxX^fn%)}x)*vm*wMj8&S2CXP6GCJqK^blogQS|k{@*6O>5 z`plm-$W3fY1^RYGG2B|GN$YM_>*z_~E(Wd6e{48b#ylVmAbdhep-xw5mDKrlx`I3r z+hj2pMnNN+ewFB+z*ZS-V~r|yI-jI|k9DXNFEQk^`*~H!OOeYmloUEiCvsTm?orhT z-KWaeBS2*f3rY&YZjejxJ(+LSrIXF&)_z1vFRa9Cq z83}U!Gv#g(rIh7`R+Xq1iQtBE#{l4Uy6mTIV6O2F zGU83+$F?0_Z4WVHM|dM?bG^vZqBjIYHu+W_VLk>?>;0e#G~~J0aivt3DuNnou88U) zaeFT zJ_x+%$`kdIo!c)xFYEPM&fFGFtt&|Q*5GBWXDE$aumAV+`}#h9^+*GPbWPflSx=s? z?!>v3fFYX?aY(a4+a3K(S80e6??NtT4n}qk>I)x7@}l<)9+N#18$#7bHifRK*g3Zr ziQiJT`nH-K%#Od_Pu$1~Q=nM80rypXpCJ+Yg#`8~sPF0sWc|Tg7Rh{Ka22Vl!szYH zJg?qow_Db0bzKP zeC{2F%>7OT=6*5Hu))&8VkqvEx)GKbpjBD4OU5=j*}-dMm?6R7P2gMvRfpxHEX|$Qz5UUv{09i zf{WNrfwQ~C#7pYFCfkZ~#`ns!P{Xu4Y}!*=xJ9g<^K7rY{pX{m4O+aVuiZt$IwQMT zmU(!5`e>cDMv`Qv`~dLrninjg&qh*w$0sur=ik{nk)KsVI4rYU7T+S{8dSW6pLU`* z07=(0Bz{*ZjX~>t=iKCrF<*ZAu0?7zUvV#MmT}OKCYu2*m8gBe#Fwkb0q;Ez-YGA( zG##!pUkuOm%TftVj(W2z9zOz4XUgcu1-2)Q%LNemuVuh&#PLGxk(F81H}xCX!Le6( zif`%Je$?E5z9t^7a9R02rSR622YNN=|B(09QBm$~ySPydN(H3^RHTs}KvJYc96F?i zlJ0I$5R?|_?ixyRU?>6UZW&>rR9xIWfU&NyuTEbF5mBCLYaqw4A7xL?8R$LtF zKPA-E4S%+C4tebB3{>8eR1yb@geVMq5(y4F-u2<}RjXQ^ZvgBouw0OT{b{-DJ#>1v zn}0>}<<{AAYWYz2?1w6E*93QN=1ktZxpL)lB>Nk~(0la`)>$ZxRY-s|{M%yD1S3!- zITSZE=n*JABX;dneAK8*oX+FP)7JG-EFdv^qlvak0W^JABp(-s;D@3Z+*|H?|cvT{1_M*?uO+s zJRsH(VVJ*jwAPRRqz zkiXN)tO%OLI$HNmJrdE}(XrkZWUCEqfGQ}Cd_dOl>&Ql^1)dD|PVHsBV`*G|B1myB z@7d4KlVgXu1DtpjtqI6+Wq^tuaNm!5iepl_r-^j1t)3A(U&u5itkm7=&sq|(45T&) zhBR~13(yPvBUg&e^aX5@)~IoS%e@=v2R7X?A@`$s?8|ACxHDBZIdm z+$Dw|IQ3^+G4C!F+~duu*j4&mI`D0!HD@~RFgsqBKy%Up2%J%m!6-Pm98qf;1fI}>yA9z*IOOf1Hz&BNs>>ABkqGC>ml|go>kMLOOv$mq7 zM#=gq$T7V*hgJ$q@1+s1K@hSnEb9l0XbM$(#im^ynK~K!AWqvoHI{rpYgIoKal4pB z$YzdC>0xlEhDkUUwv~8h=pHHBCL{1>?L8|;u$2{`uF053;-W)4 z%p#~D{y#E+%rzmW06pJ7WRkdR7dVwx_rfw&`>>R_6|Fm`l{iYVp^rEBzDTxJ#$<)$ zCT5g{JLkt(BlT2!Von^m^K_BJu@FNFm0=%r&9eTF=psF(wSh0`foTY97oSW*FZAI@ zZyGU+@fUX}_4PSv^Be@sQFg9tL!a^o2VW~W!~FxLnSKWMFyT3o_r%n$MF*}hg&)X9iD&xAwtkK-h|L4c=danO6aus3sdpC?Ra91| z?x7M{b#!&{SZo1*zbmDvUh@i@Qm6OJ&_8^pfT&KGF1v2aWi*&$9;RpPXEU)x1Pu?A z^%|apXesG6!x#6>l~}@{>MIbFCrj_A&Y>z=S!v&b@YGd=p;2i#v#gCQdsutP<%AEE zZoj&o*%D5nyuWXnlBmWoU(hH^EKp!5f0$BGe0NJtDoAqrpEHQTA&}x-!$>YFl6i_d zr#jQQ{EpW8z!`&%{NGd7KS6N4KL8lB5zv|i1AJGu#Q%=3N-lbYZFTrZrOLhGZ)MVSFy|e*ox$y1@LM z*KTbN0q7d{Zj$Kad z1NRtv9iStDcG=|`Ay}_+epM$dK{+-9=qycbaOZ^`7l zdLS=F!E2U&Hz~5|QU>Cj^iADg7Wmxn9~jO7AmcceI`3ya-K}tpdr5%?tjN0Ii4h=c%xK7ekN7FI3QUM~&MRtHRQ7l485=T39o zc;4eo%gu(M{>KG-^_D0c>`bah0h-%gXg7J@q$u&!KIK`<`=uyP3Ov zlb?Slf|nF+Tnt+zF%UC{Iks3qL2G-lyHqM68Y-StKy@y6okXIJ9yfGVU z98)nWLFw9H!wbm&Cf%DR*)H~`KzwLt?3+-)c88C_nlm>1 zCh1x#SW&~eY=`|ml6b$-Y5-^3VE}AtdP;j!@T_E_V>K83^({$6)*3L@4zso9VZfjZ z2SRQ^MP1-~d1|4f&k0!2%tn{>qqIoI6_Kn<0*B1S55A?hDM$p95+S}(N5Lrg|Mo#1 z16)WY*g;g;RWRO1J0Y(Bx(A?WGz3&4t*oqoy4&c6*!%8Q!nDWHt_P=Yly?D$pYO05 zn~KO4Y&LtTm&~z4?wtU36^-hjl~tRngb0N)K<36%QNKRsD9Y@xSM(GHh&$?jGzXD= z#HG9!V>nR*aQts{i50JmV>4I(j5RGx0vrN>n+&amPbKq;WE2Sa9U`hN&RodlbzuPV z__yZ*c%6RzcEfui*eh^)Y6?h9-mBaKJaBQ(OoOF>lgX$XrcH7qjq%e!?#*v8UrLN_ zeYxd_6$Pva?v{Bs<6JlP=&X7-Z6E`jJuxfqc3*LE{(GyOcrrO%z>oV9DW48%H3Jpl z#{)OYT?8*qv76pBj_3jvk>)Rb^494^2+gKU@3eLdc)Mh8@zcr9kh?HQi z1N$NmvIOgZZ03onCRkXQzNSdXvGDnT?!GZTAK*Stv(=#()>`r*9mPib9YWUt( z-}HFZT@Vm#gh~*{nCB-d&a&Jl!crQx9ms?pFct&v0qFDUPSB+D7F2p^AQ}aYR~&a~ zT~e<8`4bLI=WmZFz{Ie*3r$h!1_UYI(sxB^IYTzG!wGO%E{0UJj<7|2g?pYIdbQ>+ zFHZZo9xiXPBs;HA$vk@VcJOv^E>>EkHE7RkX>~RoAFmc_{X7Xk-T7^q7}+exRW2J1 z$+Njx{r>>h&zC4c^V-Ulq@!ob{I|mh#wy^FFfw(L0S5Sxcu3q7LQbzG@*RAujf<*58?xr#btdAc&;GfMap(2td0!8qUW&}Hx`dowQNUi1M zRKwV2Aiu#Gf3;N)+WqVrlMnwyh^)}-@hPym^bAHguP_Pr&;Uc}9^h=?$T09t&4s&~ zY+C*M^iq5fHvaoQK;NY`>4pMpYL%OPeEF948ck>*cQZPv4TR)?{^{e8Bj5tT1P4Oq zN+Xd?g08`B_7MiLKGx~krKb0qZFh*&_rIpZ?~Wr{en;!dCeCer1vv=Yg9nNzb{icr ze)1B(5W#~Fh;|U%;v@(AC#Ue)bb1YwTm^Tc2^y1uILebSs_|0i5=4Ck>(Zr_)ZOpl z-!;>{_HQVk>|Y`Cm!pM?kdQFD0ksoyL~ogT&(;bMQP~nys_A^48hNRYSl43Km<)dw z+jhs}VAK$e1$f1JcvjbR52XJ>X16C76a@dvkS0}?1Sp?#^ z{QJ=q|2<-j(uoq23@f^><|7DHwCpA^*+#!loU{8s@+EpxbKEu95^qktms?ot&wuS~ zcf;-@KoRqrALq3yVd0b10)kTBhVMbY_c$=#v1jM!|0ehT6%P}fho6NWy=c1beNC|O zlIWpw25d*xH|+^EcF@g6AHM^@g;#T`dFlwkiD^K0?;unu<8%ZV9|&RckN(P5F=Vx( zX()aju*(IEj%BcX_q|KHAT{OUXNy#*>GC4rQzfe$#`g4Nt4Oy=n$}k<<=+NwGT@XC z1<>Edcb&TwgiH^{K_0RNyj|T{aGl4|eAGER&?3kh?7YFEAdo|2<9#Xe2o}8*OB8m? zlLQX~1DI>~0D)HJB3kVH4=~6KZBhlyy(e2=5aYUckO$+kJ<_d2%_5mVl(!Sy_0j!h-g%g`xT1LJXH_=FTNu z%C=;0NzXGOn^~~GsH!0;6;-~ouT-C}Pl0vzsFo*NY9QxQEBrhYbfzx-UqujO1`33` ziadtD+#4P3877b){nM-d%A)~s0wYcFsO7gQ?I(R(m+J0gdF?)=ahFU_AgPMClW>|m z^Tf8G1r#08bbr6n-!Bt&$%^)K*j(YmogRVBh6FxG6t@SUXk)f30ckP1qy;BBk`4zx zn*?#SNRg-jCf@7-Zqw5$IJMl!S$FQ*zw5kzd~TOg^D&1mwGG%~#qIA=FtwG6?Gcq4 zYY==NEW+x5$}d>(Y$T(XBOWXP8SS_Xs4tRkSw7MvwgXBgjlLnpn{sTj8TNoS?c={K z3p>;A&wK!|S@*ge0*N=116v=Or#jG~KDYTH{ZMBp_Pr^$wQbsuKwJ!=pCM*hJI|#r zKU%1?#)!LC#TrVVCj$RsVyk64p_5oimuIT{mF)nfwA#B@&d0HO9?XNbD+ki@Cw7eW_jww46|1b@Vg{`zMC;)l~{TY=QsYs5K1r$~tDT1ZVF{_|Y@Rga!Y5j1ec* zKrn5y(lPW)>}K5sOVeAann^(5@Zv-XIskTpMVlm43fwers-1uPy`8on{M)bp_q%YG ze`gB*(~|zH4Y?hIo`W{dZORe^Gb;=PLcT9uda>MM3{RD*yi{ z7ycjJF&V?Zo&c!$ey2egt^e`us&jI4f2l}TVVCI5M6 z^gs6|{&TnC|G|1G_oc$Y<BxKmPMh=8o_G=Z0+LY=$n@2R!ffeqz&ryRk9dU*QIn{-&o~6lU$dXxq)l*8uaj;+Q8l^UhPJWv)xb@SHVY3BDhmeYwf$14+(qj)xhOV((Q++5mSb|%DWQTPv+{f(?Bt8@Q*5wkT( zj2mG+3adbg?Kk=c&-T&W;0w%4k#V~!qzRt+S6hO`d(EIx_=Du;Vq8Olot zN$FZ&mv}e-VBrfAw-3@uf{lFTAOLXHN=UG&_=ADLpZwtsCOJ9b=Y;7BqKa2V71gd?Q@k1>9V$-xe(J0h6Cb}P9;lgR++}Dx zfz&p>hs4D~a^+(p78NEJ@$+q~dHV}- zmSW$ZPmsbO{bL6wo1R<4&)j}mW#Z*2J6=BEwu$g2^Tf)k zI7{=scUq5d_s3C0DRBESzLnX+N3N%@D1MOL`yO)PT76z?pT3=!5g*9J$loK+*=KMd&uREQV^Ipbw@&j=X7-^U9#9I{q3Z(7|xmN|?}X=8QOJ(^+>_ z$N$`S<=wGpNr9`%6w*q-Y0K5o=X#6aTK?{^KVtos@ghVfNihG;#*+PtAP!vS*72u$ z>`M{?l?VU&2(cy)=}#Jyvnv_TnMO>s@8F(~1DZ;l8{Rvl&kf3!xPIhzpZVITVWfk0 z-9&Z}7%;x0^wfQ^d*6I8yfx3ED=C%`pS;|l>*P*j8yL#E>y5^_pUk5kEGnsJ4OGco zfUE@Q{F>MAi!>%Dxz}o@&BC=^pV*!N0S*z$fScYcMIof_NgE^MP6JQ7+8N-FrEg%o zecJTg($m7ki2iQ-5%_qIc_dC_4@a<6cA4KLC7$P5xH+S9krOA8;gvkxBh8E|1v>{N z!1G+EaLa~P@d4qP$(V>CH@i2g^VlriI|Zk+o7L5+?c=&8=UyY#dU_P z4Q*kxTJcBnhZHr1HRs~4&V?alykC1|F0-fS=BCvN!jSJ@U_2}}gux^zN#q4*L$bj* zos@XP$_@o%cZg+EZ% zs-IV?F}I%|F&{ytScbUxP*|izY3$o0g1W8>nX%AUfZwGARwN~Us?8j z889c+pAxy9zQRLUi#hZIUJ6R6mA>2%G%|8(gA-3UagaK*nV%A} z!zFRunMm!G6{vAFKX#W?I<>8yx)VGXU_b4m_WP2YS^A=DQy;8q%E%N!OmA ztepDLO2_kG1ZW@Yg$NyW^F=+*isSN3Rac~yNkkzmFRE2wj1+348R|*%mNl~*TR;5> z03FH}nhk#*mx19a8Jj(dV|B#RpTbe~2)**LML>=x{WZj+L7@Y!F@08C@v~~V{}HI} zRDcvu?-e4#NRzNKQ=z$Qm=EJ>>JBhBSN0MdReWLDJub$<`Q~GS13?Oz=hVk2r;U!n z3QM@&1|?ZhF>|!z#%V(VrRQQF`I*@EKrf?%NYu;$pFzrykmK!|7gyeLXwI+5k7~^0 z(E~p5@t=@QPgzZJcE9$SCd(zbee-UK$FoTnRh9fhG&dKwWoxV4VNc?2dy$@y*m%c# z%vxldH^gJ9IP^j}{)OrJwhYv=rbZwA?et0IXa8H~_L^aF=7h5c3+7n! zMBgQ+aazfzU3%m88gZCPa=dDVP`*?oBI1d!9TuX)> z+{t0Pk?Yl}-C_MS3(QEn)fl6MdLNNaUZ_O{#OViGM`L%9%%UnKp4Azv=~&#S&r^#Z&xo{)=m_TfYGWj;K1dO6auc3PPQBZCMB^jxi0n6 zAP=#K@JI(_NnH0AWiFPR0?be@0k;aut>b+~4Y#4n!jD|tqXVY1GXd$AOB1-{YN`9+ zu+g0G8V?~w3agIA=RWT2USCD;SFCyW*PYDceEK08um>MDIS_`_Y?qg>^RBe{3pQ?h zK9_ePxf&wDRvYZ-FoeytT#`4J6kNZdil45RRgls`@ThNhrj~s9QK{)ZmGxVijpz#nch3M4 zEGS^QHyUCtyP5)vS?5KmEF2xCj+G%qg&H8I^LLN)Mz!9$tg_aL?7xjUd>p}L?AekK z5z%N0lxjorGqXnYz0a-;Dx~rGI|afY=~%woxV6FY1rXG>Se-R3s#sR4aN)R&eCxlc zuDQ5C9x#`LU7X0&oX?iWGoSeee{!o^){;F~EO+*_ozOf$UmPIO+zt;E-^i0_RH|Qm zsWa^#Q-tXp^%hT$ly;v{dRBkc2^qh;`+SX8O$%X1DVr+sR(Zhl;wZEBX%$gsxPR&4 z$szQFFaocTSa4=R`sg5Zh4Of^eVPasMy;>JJNd4Q0~YV2Vk|P>LSW;RZVymJ)&NH6<#p z@sdTd^V*xquun@jW9!e8r6QTDj`5*2^0m8FH7A{QEi!KH1$jEgwzHrGXlf&3TXo*C zyD(;7?p8dc`Bo~Y3$xp-br?u7^4f3Q)2Nf^c`y-Bl{o`F_`JaKhHzX|WI-rNu#55f za|cwNr{|W<@odOQ%P5xzOrg?wfAh7AaDf`=*}(YenvDS@PMf zZR8j7X(BfC5+N9$M520e`Egdz1w3hIT8O=%B2mdZ*R;$yzvsdn_x)~o-@k_swR2n$ zc2@l?dU$vIXba9Dc9S4KFFqr#$uIMEG_Ey`TGm8Ui__V>%!h`6J0DZ5UOPX09a$=+ zk$Nw@o*l8eY_%Jz0wPsdm}#Rxz5Eor+yPr3L4HjHj}FA=iLyjQfrQUi5lan)!Sil~sGq&_o^@bbinV4=_$Tw;+ z_l2nOjc{|`C6{@ng$A1R{<>9@WY)RnC(`wY>L<6SL~03qeSrXewmqw zTi%gM!9SzoPOg=_%RHa<6-;T9-rrGt$=0U~9~QV(1LYl8dK<+sn?03QRSWWy+O?-$ zRd}^&j&e)g5ekZtoC}zCn<`yQp0?`JRv%rM>S}Em;N#l4Pv`k;u{#>&p;U#ILaZ}S z?3T!PY}21-xo08|C|3$L0d~q2)-Ua~65bqh1N}s}%M(l1d!a0emU2$T4*%y3QY~CW;&4@XTaa%hUW#Tc4Cx$N}>-7!#OB4p5fkevST*xlr%y5owk>@N1J zae=jos>uS-V}GhvzC$vOG1`^{t7=6}Zcup9D`7m( zz8qztThGenJ1l% z3}u6kH9Xm<^*E?ufs#*;brau;C9f%8yXKQJt9}q&by(dY0A4AC(_~bw9uv2_yP@#+e!87{|NATOQ0m`LQP;$C!~tJd-~gp+qY5kWCA#ezp%k z=MYmDwSTQAh3p!3t(ZnqdjPGmww$r?36qmhA6y$>YM*JZ8xFf!uAk<*wcVHul%o$f z4v#wyb8a~tnY$jXw)0uih{4&6Eps_xYWsWRC`YmyJQ0F|rY4c~#EB2qsjNn*I^TXg zkcyt)7s4^28p%maHOe2e+w!M*02IY zTS+D<+Rcr{e+k$j3;hmr&XZrgp;3dkkhxtsKRAd_r%h2EsR!#97rQ+*W_pS&II-y6 zFBDDB?`;H@WJ0}GCPDKMklsq@^oFy?BmjY2FWs-ftfm#$YXl^m`xm7;Vhc41U3mM&}r|J_=t(RTkQY#YrFxdL?tVfe5-3SPUHMBi42-RI0xJIrj& z-R4^Flw^)jX=(13MHI~KqXljDYS0VFcQVF{ppdj*-dI2N+m?yF?^soPjNA&! zOh-gq_Q&jrId6N)QPU;Sx+spngyWs*yDZm`sp>E;M*6q{4celP~Hen`(Q$(?#-jcrL)wy>%@b1 z&SRPRM*xkkkvir|Yn9!qF_E#CjpfG;zKBG;lZy8CjtQzjN~|Fc{V?_8k1*B){k$ZX zli|g))*sw-5viR=*SyzbMozLc(^azw^iO8$N>`RE?L7^=H@Bz~YT@Wg+@O2Ib0e}Y zI=y@7N%>peneP6N>6)CD11J$(lq;oKLn2>U?76LyLNDVR%0r;BmdylmI3eR0B~HBe zB5W4zzCONptG;T}^7vW)ERdvV(s<>?Rau#unz}&r9@tNz#j|hgo;l0iRr{3igP4`o zN-v=Ol)>VQ(njUDjIevPhj8s{(!%mLat7(8?EZ-55Os>S|RMay_T;dW_2>h*{?v{i1E z-E>0}8WSG~l1KC@WHD2}XJC+UVl-lkue151)dn<3$c1p=5d#$gps3Wr@`F5Ff7<`e z!H!t55xrrAU|VPDxkixGyiGuBrt;*33{3lm%XCaQf zM($^p?v^$t=PDoV-dz*2`espP*u%}M*7W8&(S4O5;(Z|KxXXLxvr^{k1d9R=Mg|Gz z08ky86xyt-M0i*?Bp-i?a7@X)D682eVpaoS(X$o^!UOhgC1=enSWTE=8CpOzn`rs7!q6e;58zCCOHMox9v;mAgT2jy2ROGA7own0vG zry+d9oHv~i%bv{HWdawMPqM(~BTS`9l+(0);z_i7(Uk$M7Jg>=f(`j%xO3c;-~{^p z#_I}_o8Ai_?OB74)cV8Nt+YKvofXDDX2SNE=wu@4)YAqmPRNl#^W%%mJXc&sc3HR} zr@J(4GJH5DA&>0-un2l|jc<3fIm0B2sG4funvGBGkY|lRcueMcy0)sj^7)8*8+ z0+eEP`KLr8Vm@cJ!l_!xWrSEts4;XI`e_G8=v62m4%{jwj^}~wn+4{l^mTd9=D(#% z`H3UEGucC<>hMod#{JAg-&g%!#JqZ|tb8;g2MFsYE!Gzc(khoHelDzQ-s->Mz2eED z$rV^Jwkn5$EEZSxT{O0pbFJFYpJb=)?4+S3tz`Oy&_B54fJ%^N&x_2&C={SDmb`2s z-tguE8?4EvFAfhDwW60NLR}n?j7EXamWzz1D!*obUv(w4m3;=9cjn3_*^IsjFRtlA zkV$F;kPOUx6fU!YpQvy{2kt9g*kkSuf2Yk72Z{+{YW~L9@M)`c{tCN;c$LobDu;D0 z#1GCDm8x320?U@KMAhYUGi+MmzJtA1P3b0>G>_0Gm&YaMODJsjq_=;K&9GY|lF~nH zKb?7{?{|#h)S{iy{@N9j+10AI12$X$bLLn42$P#!qhQMD*S#5zdOcl2|XZ7 zmv)VZ5D4JQX(-9ui_974rcI=ux%-om$}eKtnTsSM+U z#T5v?iyqw-xJs62%kh{CJM~mzN%1Q$PF#}d;`cxk3M^ddcX9ou^v2GrF`qaibl>clIf;eV$2gGl#Z+z@w&2r zt<(#TIvl1lM$UDsQAaFJ;Ywcp@;b(JG3(uesAJ!Qc1B!WeFm?bhXz*aEI9C%`TS%) z``PpyAeThD7>tgJWT;=hps=%v@icQ^-WcQ^L>Cwtunxh+vo_21ySen`5l{6=c`OOq^IFJQk2Y`h}uP)rr@OeQt>#Qflh1o|f zOw4S@2*uSdm+xiBV9j1H`LSB?A*6{bLt*^M!lTh)M;K|dG-PA{b=grmK-$xyvJAeK zZ5INKg?k+xUs#PElSWpZ>lUg)rtD@NEy8IHTIqpegZb(5!R+hyAVc~`QKcn$u4WtQ zofKn5EDIvRKd<N}0oFHE>?j2mf3dNG|;%#aZ5p;O!4hW)flJZa>%(^X| z!6wh;GfSW$4S=e83tqLl;4I09LmGSa8feGwGvdJ8sI?L<)Qo~$=e9wMf9c9{ul(_V zSuR?P^los8Wd7x`XdyeL!MXMDWC)y9Ew}9m68=P1>sL#+OmZLG!RNOwmv+W7sW8weEQ~?czY%M#Ii~hArFoW@haj~eEj~9?|;2h6K zX)Uw#TNkvgvf}`q>@VppX)IqV2U;+nbA8q1ZxLV@?Bi{>Gr0!2U55C6xjPGiUPc0 zYFfO8^o7Pe4P+iUjCh|8MkZYLDT50M2~{5>aRbpYJUPonVSKAU#^WP#{mRWJ>GxRg zQcK;<&;hjnGR^1 z=n#{uzh=QPNvh~B;%KKSFgdA=was8rUP-yOc)RX4oppnfqr*nz(lui$t)nAv)1R1k z&)BURVUD!7;zScgxa*uo6Ip7>mWtjoe2u0fHBIt*VHj(FLXhR=km|Y?|FECblxgyvP-)Tv(Di3s0^k=?9vJ-a2R6d0&0&e8JsAm(QLKv1UW9 zX!ym>6wU=jMBz%CH&Z4=On-JzAIs6MNNm80((hmewQ8V-t9fh6$gzeBSx??BFJ|X4 zg-Q1AH6%B(BrToBayh5WAJpMEkA_t6A--yKK`hsJULL)4wAfo>HOdlf0=8af852iW zO&oeZ2+!N$mfg&nPzBTDRJh+Vn6qtAJ*C6*BwC-^dr&hSEL@+r2ztf^YA!I~IjL{n z0?pRO8{Qs=6KGM!>zLtOIHV{}I;6+xwuHIUQg4drnFqk=Jcl>eUBH0Usj{^F1)qbP z4pvA~q9B!>4;aH{5n`21L#?HJa7DH zx||URZb74Hb{DnjUg13L+JT@)CN4|8FQH3zi@sw6-3BEEPQ-i2AGSL*xKi{eBU?+q zc5Er*$4)FzeLYV9M!DqT#9|%guYA{EeOG%;WyB12GEPui5Dpl||4ozL_Mt6QsX_i@|`0@zAO* zSs=}rgrQa{Y)!Cldt_@w7TS>fuV9>3z$PQtuabF4Gg~A8op@paJORO3bB+!%r!FM! zg~d>&i1|n1npZiLwoiS8A=Z^d}^&d{)Mus9?vje^whZ$-#Kd=508*%!ezpNlQXElxqYL^#en@1=!2X`n6Q- z74@@)(}gLZvRtmI%eWIzDzsd$5S|US++*bUd4HupN`P7V^<-U&udsRd*csQ$%Wj^6 zhMej%c3V(g+{ej($!8bMoz5?Zqll;yZu14nV3v!FT^(jX`#SH%P_<_#s}VFY3Sq2E=>m;37sl$MHK=O>3k1j81z)&N#`2ow1&~-?{YZ&NpTwJ&$KKb8 ziSZfNF2{_Soe#`#kPQHsZm29uoW1;A};3jko4ByCyt=O;5QK^AbZx5}u(W z>vMNT=GcrcJBO3`ZtRfNn7Rw0N#*TivtiVDDuqfn9>D=^)qHtmJz+4}6ra#_i;fL} z4f%qGOvUSX?J|WN*d$bY1F>_vtK^(gs@dXil1WBhv4X zoNSdm!!dCk8O1Vl++WTj3T&RPj#6O7G09=cEblMpQoV=j-~j2A`v%f}Gvc7aKu0GO zhUii&-Ik9lC(k?K__8;YT`UxBd~xY={NrbhUjpV~T4_U?J^_byG^2;MoZnl%KHF0Q zeJvG~QhyeoTv`{;ujDBojESttq*vdMo%H7lNauZ}sq>ww{<`jn!M<}IDb6H+Wm`1M z2moP>t1lnk{i0YP?WeC^*qD4L!7dJAW;!f4IsY@HFVjW=z=S^K6+ZZJF+@&Xqn10w zY7|3YJE%BYv-2u!*yk{#BkeOy_-Nj$c7Ad}CYw#5^YG4P zy{{#4t3Zw0YIY(kl(H9RZ z?8d2PyZTiYSuS(^rJBPaT#KECfRe8Mn*>?W>Kd>T~RnR6kYv3se=-F*vEnyWY@gB z^MdKR18j}4MeB|3Gx*-s)Z_42>01=NJGQR>JlcB zXvm0^3%D!%#wfWjOBz9o-TJy(23~vem`7m9H=#H`2^-<_nS3R7Z^M@cW5r~|^rbhx z_xVEk1J@Hx%*NShgah-})p2yj!{0{Pd*jy}!u$ykLUgU&zMrGsBO^r4Erm}hxVC@7 z#-U#?>e-Q zHoMGz)zIMd$DAX)iJx}-~=s=iZn2^ zeyTP%FR@SV4Xd&3>UZ9~A5C_d%#TE}Xx%Y;pW~Eaq2m2o6Z3Fof~BG%a8aw&RaSmd z9lmL_+~73Qgy{8?kv>5BuOtjG_0J>aqwnbSrt?LC^z2}`Zw`Z{+ie+mYEsB??Nt33 z>(Y{Ivs1v{bkoK`o1~1hR2{k|jtJ-7+IDwUZl`@_al}Wwvn6J=8iV|rWPv$52%tP0 zLX}Y>D~W(J$jQEG7GkRF%%N}So)Bv9B)8;#Rte&j_*1xeZZRixytEKI%Gt*Z0 z+QCD%wZnys8`|RN$^HN_lTJP-f8>*jRZ&j9>az{u)PfiR*ZrcfiK;^MF;Jrq;5trd zUllYhd@zX3bxa*DPdw;8I&PzmT}7bZRR}PRpmk`7kpIqMmw?9nh)bVuH`##V8*P_% zf5D!h0-@}k0(0G}doW~;YU}1+JG^RWWS}4|XXtVU<_ZJJ{Ur&*V5=grBIgEKr^QOK zGEqkTBN(4$6+$u2sM>jDFw5Ij3aBfr3B5Jh{xU)!5IHpq_LP!gV=mftHc9^*1aU3* zC{d*rfx;6rHBx;f4fQlWM*ylS(Cg{B?vgngy5n3}T|OU{-)O!_vN00z%lZKrcilagLE4zv-jR0XG)HpDI?5jSNmN@5icMvujZ0sNGM9F5{mKUY1 zOy4HtGd447?MaZ}DNRVkeRM79j;pJcpjMOYT0ij8sCQ92;eGX_+)NpoF-vcaK6BSi z_0tbcX&Di7mI_%c-9f6C)y;N)S_^+gLUmH{dZ{Z8j{XW&pNO`};;R~UQ!=uZ-Hm4u z$)S`#X^c3)f7u`P-rR&3^+2&PcSO_7bFA6B3NoE#^2QEf$qiTG3d9r z-&@Z5aXvGwzNF7G7$BhQt)uGB*vR!MUy@i6e-bOoo~21gK!2Z+DZ`MfhINU&CSY89 z{?oTO_1yyR^;;u|I@UhtD32%up-ro(3-3to^i$8%B0~@XkhB^Z9LwfE0Yb`Q`Qp@; z#ZeBlJY9MMQa&s}$#*R)^m?aKB7Q{zY8&uAd{FmibO&VE zTaH#)0B`k>dXx<7sRK$QhA<&&r|*`3faa^)T$RtFO>7R61HHTgH{{3gIHLO*t!JqG zw<~e+Ap8T>`s$877Pnd^dgHp4ByWtpX5++Jk+vr{E6k8EELNsjpY~A+Z!S^gR!S9g zyTd6_>4>}wIkJ_ha(s2~MOh3*M&Y!8#l?ZMI}QXNP$ehG{g)vI?F>elqO-tjJ$NO|^*bk99R-A|AJyHy-2sl&{ z?h8AQD@p`oNZ1?}^Zu)WFsJ1Rt0R)HTI_7omagWqiu`*ni?Jy*I;gNu?IZiG&Jq@*sZnyIGG;oxLe}-`)j=S^cA(8f z`1NR3F~`wFgOnOe8$iCTkG)}_hL?&2F5`4-x>1c#L#sxJr8ceLWvR?r3@WZ*0^6Hr zq7opHxJ%ozP5S5P(U3Y^?{gb4@CNJwX_RFL)%qQ@)h{vLW({QZ@C&&6uCE%yLG2&g zC{O`dTvsefkz5DNfkTFSb8QMKhc;7FhkBGQomH&Le3xnVVAIzs@bEzbowtF*yR$

    b%)oSePS-fORN-}iFPFPctxA9z!+X!nb)fXH{Ri=$5{lvBHN%9}0d0G-@1 zOiA{FnGn{5N8>^fWDRW2$c24SvDofeot+=G;4h^*eR?#B$90nL_j#)$=|wF&TYmP!k; zsILCf=%*pHT-o(`CwWg<#hI-rwruBStZf+fyuRF);pW~8a6kr!`A=6%X2eFRbQQzCyp)bwGGbFQ+$2C(2<3 zzqAwv8UHI;diYd#8WmG!i|{Iko;oSDbyJ7EdymM8*urk$)@*1=NuM5YSn|Cb4mdj> zlg05<7tw*6fHEavz{#_0@H3h8AF$@aR_}tlWTh_ptsX=uphSNsT-=9mk!wyQ7XJ zZwxA_e}E_gZ`1fZRBPHl8wqSquZ{LnF^V|!6(|gKFi5DC1)}^>RwGrbU$chTRA zS-$Pe_B1_OpJ*&81oZI8(HJ*GmFF_CH@%7hVFxl!Hd?IRCBepaDpE_=#0Pnz zif(IQ3aJz zEzLQC@w)>QUa23=XMe}(wNqsWR5E%IWk}iC0cWp?)ae4{O(AkUod|14RhY_+4YX%f zTr298Jf9DyKR~vBVoC>I6ULn5I!CuYO&u4NfmNJ&G^Na+ZF9oLDc|R!oM}K{uJ2)g zYN>tT0bshU#~wSJn2x-5N<7qplX88s_&4$w2%;ory^e7MA;r4znCsBUi9)Upu{YHL zvZ-j7)d8UiCP%_{zWz6G*E>j*l8Rv}4tKrrNSSlZYy2&G;ncgA!ctOkn#42vVIeb8 zK2!qfmq`N7Mwv)#kQZ!IuPYZP%>P2#Z{e=7Om7?*QajU*ONaMy)-ET zVc&9VWrNK2>QGvfA>MT3p22oHLi>ul{W!{q!gp}6dNA^R636d_lar=0yyN${h=;#v z?7o2f@=$gcS@+e*X)3oiYjy2 zqH~9r4pK^U^whV0yh4aTEdCZsuuFUSTqS<|OZjB4f3hp|G7p!h(Bf74RK|VCu2o$v zqzU6neS2cD5g{NE9U1;c6dyCnqtfZ^wIDprDm=yL_~5i}?AIctysI-5itgz?P9OHQ zw}{u6(1_9A*S3CjO15^oOH}X6lTq#-aJfL&ST-7OVJn4$O18$bTSK3F=y zo?e@{c(Zc&5V2#sk(KJ4zqG8l*r%%$Lw5o7PizT2dhP1^IJS{@H5VS+Vx2?& zD~R@=s%BkB>LjpMRXq=CJw9jBb!Xvysqqs5s5@a4?Lf9v66#WG=kLayn4wTFZ(l%1 z+Ps@Y&pTn_FbBwy3e_s_N2!iKU(}GVnp!HaC(cO@-<<8av{~rc7+_>z65&RVeZUay zMt?Y%*}B&AEJxG1*=jHCmWJ!eN@iUBxi9SLu|&ftsKszh2|y9A(o}>J;T3uR5#KnD zTP~MAV#yY7cZZK5ORFtIZS|Hmxu7b&AZBc&#crWd+}ARKap6Pqnb*++Av}c#f7GWS zjr$uJLyArozBoTOUevMOI0YZ$Mt=3|JyAUpHOm{E+yv(OWvj24*GC?D9M)AII4|U+ zMW0k4==nI|`Vh|u-{B2)Y{sismv6E0&lo$mj!`x034D0=tbMgE``&T0U_sZyqL9!S z|51a(DeB5O@)~;mxwJ`gnyQx_Q?*xEKKUzDOe7|UX~|v^YU$9&?W&8}wcPbqcH~as zct`k46P6C!gn)$Lr*Tl$eRNp$4p%IDVWy6H_DR!p@olDVDmn`+O{mENzbPh& zisOAoK$_-Zq0~Cux364@=P$j>W0g5^W#Z!!JJ#UZ3|&o!oyg0Fi_~d)&36~r#I$52 z5KM$~6R~Gfk%SRJ_Nohp6Rt@|iX4Tn&X7D-JMz>On~$}5OFmf(roCXLHqDFgxng3@ zzmZ!AweS%j-@ppfuM;!eU1J4}Io{8pGxMw|*Opc;8-H8eLg{2ItlW5bfPyAxO6paC zg?-?uXJ<$1X)ZXvzo#(LzaD8xTX?|k{ao5pIVG+QN`|zBgmc-aa`a|BIkM22PsMis zDK_h9x$G9A7X9Ahu`Uw}^U4F%-R7tb0=SLQj5=piYs552PfCeYOO(=#f0rBae{d(>;%`jD|i(pnd4zOGVaAZtW9=dIV(4pm9htI)zit-k64}#cW$ubo+@P! zWseuwO@^idaCP}g353VUTrl!$sS7|~)t;4LAS1(a!TKXZAB6-(a($cV4+?w2+zGhY z63Y_{wWk?i9)L7SR+s|a`7i?#kly)H0C%wd@F&XuMKI$a@iudZR*c)Auw#%meJ`21 z^0s%s%JfXEeZQBZD$E~dN0SZ)a66HlnH-a^ydoj)s$o^}U@kM^{{45&#$GN{*upVm zWKd9m6KP^IO`Gqk#+GNSFE(-T<_$g|eeq65W4O#mVJPBkI0S!%pW*d>r=v9deETn9 z0mq*d9QAiTe8Gq`?jNrUM|PAw{s7g-KmU{LiCtz^79?-J|Nb7tBzOMs&sXmDZ%+8} z4vrI$Rr&MvW~UkN-4fEe`0=`H>pa~jM&iqoUjO^;6=jf~?vHE!dWSpOI4Ac0ucLXm z^ZozDP)7Soop&zyb!TqiR{wc4=XPfO1Sn#XMnrm@=>1DfaR1IX!^8WR_qHDUF}wf8 zIPAv%oQWsD{q$`IcAoJgkoEj=2c*>jG~OLiqW@yzigx6Y{+mlf@8AEwQ}i!44NH1Q zDdU%${y#jye_rPQx{m(&%8cTFuGo~H)jeRV|JXObER3d7=dqo~{9i1vw6Zc%p_TOd zWg`~-)r0%+Upk9qWdA&xf8MnJV<-$Sp$;qT&^C&a5jrMwqaE^LB$G|1&H@5JJEB`V zHzDW*S=r+TnlI4)dLc3Amw2Hv_RV|4E##6#J-;t#?oG?;LPYaeH_v22&v&c1h*d@? zBAp&Cw{-YV%dxZ3Er%=Dts|r3g3;7W<+lB0+>D`X&|q^pw26m7QyWNe+X<2%;DhM& z|CqN6r;h^dg#fGxlet@=zx!26&giTxS3mp!Z)d20c0N3S7FA@4{tR?vRuMs74yNs| zjbacUeQ5+6Tf5UQ{!c5E@KjldGSgsTyNAoG5!@?a9)}q6<7Y@YTB%|tW zNmtX;4lM)bq2Cg~#ba2=&|UxYt|I2JYWz&C0DDO^**XttQ$IH04LJ%;V7f+ffs%pM*_W`1--nEVDGLZ6WGqf1_q|;Uo z(23hBp-NQJr(dvA`K1ZIkHaU%iMw)Q>U4~ao3f}cq@BC=>hl9>pd=Ha0Uwd5l`duD z&~;`$hW*#sFR(u@>T$>NihdGj78XQ!3NlTXVzQc|i^op~yV)?L5vpR#7wN^;@gog-^0tL`y%y`(p(1_U(a%^9p~b{q z7rcY=-3fgS52UL$og#}D@f)=$1&1okGv~czON+=#p9iv8m->t~G=#FVoh+{?%(c#+ z3QA(%`G@AxkPB)!Z}?E5<|-AlCaPk;@>M!YW(I?`y&p0`Q{?j}#%nJn?!22!^vX<6)kwLWxHVa@pZ0;hN zd)GUq%#?mXjC0y_G)$vBI%r3ApBwK}D^Dd_G{-oKM6C|rc7F&Y+On@y{kl(^`n{G* zdZFxq>0c&}`O3HFJ|7Jh#X>=ooMrpzX+;u8>NRfg-OZdKJLrx5#RRjVV4LbOZpein zRrvjc6ck7-zs3O#gJn73VZXVRJ^qhoIx?(QH#2!oW?6Z*w}<1tWR=4T8aw|ElAony zFag|GcrB&^_MpSLB9m`XYE41UEnHdJ;-m& zWLSxHT1VXgSuV~j?0#%;A(|V#t=HXp&|Pb1ATT21sLXcn>0>!xP@7xcI@a?mw{;Q& z2V3+!6%;HZA|kVtgoHLfaB^@nWp;m2Rtgf5-E2)%K%K5cTfSm_#Cz@>y`*SW!UfV} z4e#us=}blNGFTU?xC@S!0mxu!D-5Y@>MM5(KYB3giomYHAW_G!!^vSPg2^8~!z#(G7=2mei(NB^O1m{NR zbUqxtm2mq4$Ie1#4?0EJ^Ad`PhQ%{IIgv_Rt``{>&>U&Kg6x%`NS3?n{e>K8{$$@h zzmH0i%t#i6cw@K0BfL)u{5^)alpC$PDQK3*adn0kwF|tcR8V=CJu*UdJR3>S)U`W* zfc&Uue$u3}k$DX?o~RsHT4!v~t=Ljk7NM=FY1doxcW zgLbJ~IFVC^R!W<`y8gP<|J(QwRSXXq+Wn0PthNN!eVLcphpztg$Pbd+4c}0AaPe&W znp%B%e8WcOa@p8WU6@U9DQjI?h8x5rv~y#^61q&`p=?CC_Ug^Wj=2Js$K(+mms_oA z?LNlq?s|X#eK|XFK@o>X{!!oxU#Ow2FyhTZmpL^0afT4xAW>JaHx2O+)s%H9*vVlg zUgBeK)zLFLogIC!vpK~wm2Z}RS=|?2L1z-P_DugdP(^ws9TuM@On2oT;W3&bYH@rv zZML|W;&h^&g7}kT}xgsl(<;A|nC==nTP4t0< z28x;l)0s-!Be#$i^}VI-N#+_1H(KG`Kt!K2D0q*NK&Ce+Keqo|!m*Bxl}4^dSy-GQ z%!!fsnn_+WUe|kzT2Ap`q2M-ZAXSxcrfol?h#miAg{U)i`T;Tmld*{TE&76N?~*8{ zxPz?!Ik8lJL5HHI#R2zpQPG*jWA}VgA}|BwfUtS&hnhfOC#Jk_wd6RE)C_Y1vzq<10F~b z4i8&~n{HhiTq@39Fd6Q6;|aT@yvwOBNiKUUZMwRGo)8}2?y*Z!qXKS3{WLA|QtQ$B zTIt1)VB~Ojl6I4Y^nr#3ZpkwbUmB=;t$bOImfU!@=g{|crVsMKysclKnX*07^Hsm} z@~^{_WOJ101wrsJT0tY6&p&jOd@SQCxM3#pf#gLL!y{h#*AU4?K|y78n}p4-ufdTr zlGFU|Tn-t`R^yjR?7sXftQgY|h9u~YeIa5AB}x&~Arp4PitLT|TCer**57`N9XMYR zrdPcTwX8!}5V0&VrY3?X@0N!qrH(EG`V;iNmU4+&;>w?%^>~scHTcU4MOUmGV_Q7! zpC&p;IunEL5!Q{uKd>3xsBbr8r7S}<8SIPa*2=N`#K~ zLl_ko;OJDWvEv(4_7VBPRf*=Q1&pl5VlF}=l;SfHYy2}^QZ!Y2dh&W2EoW=H2Cmhz z81C`_$MS;T;R++Ya8=`rS@(|yc`bVn*=Jq(qJ>G)m1}C9^qzT#^Kfd{GU?U${?tf3 zFnq2W3x7+Pbf#tJ{Z*P)e_7u(SHhYAia~mU6;*O~=H{-;a#r2Hhc96zbjJ6inDE)N z-DAC^bAiRon7*JOTk}S1&?ip+$e%{^=RbO-juG2MD(2w^V+9fuASVBDt-+*Wr)Yn-QlJ?V@#^?u20EMQ=j`v7b+ACy6x%3 z$+FXA8Q405!wISkWhg4Y%$g@6xKX<%1LWr1dPln2x7nJ+eY9I0g;onk^1pzpdxZj_ zU~!>E#-=Sduc}i!;yXin^EKUFk{YPs<%5xv)r}+I*SH%R`$9`LP^d0@7KR4D$KPaZ z5+Ukgt8@BN=_ClnaJ1d)Ryl;^@c`i}Jgbs*t(Xi*5&v$2$x@t{&3xL@Nx^)cR5P zhfkAdgoZsYgACFPVDQpOvgVfMP0aPJIho04(4tW5#C?~pqizi=E<8{!zqsM#=WTIPi0&> zD>yUXE43Tso~|;G7h<=yn%2x4J!Dx}DsrY77f$dhkYyN@SZ35C;4J4Z(e`!VSB7Z9 z&UHy_XTNYW8|3n_-%|eN1U?I}gBi50cxG#DjEP~w;E6dv_eQ#$69a+%(c$w7-zGmu z2@z}~Rb!W3l5ULtZaRY}hRl?O2xFKsQ`N^+55W}6xH@!sN8ItM}b9J=;8n<3t44Y{|h2Et$SU-jf zf~z+?lm?(i_n#}LC4gRBxm@^)ASz2YHgV+;U>z={O#AaT#WC{Fjnt|{L+O+54Yuzo z$a}S&C?0)Eqf!E%f1%y@X(eE*-tUkwX4Rj3QGCBYy*NjrDYZ7;q8Wwp{pPb>yZ^*N zNDH-|PzaDUv%C^j{7zyfw?G-NE`Tv?jwH}qzu;6RDDchrUbh|JTai)6%#uex@}(wV zta4N7PKI3>&>yuIxz9Ody-PY|+z1SW7Kh5U7m>|a9gOE;eEL42wqglMm+>c+o^*&O zsHffK{adOi<^*>GmuIB8s96nEi+XSI<;fc;^MCG@S+~TL8oQ3ShW#09YW}i5nwHx} z9<{&BLQR%LD+Z6&8Z|LiB1gNAP5bAM89|@Pgbw2}V4Fbk0RkLghQIeTg6$hUzj(%g zTSEf(NZj%3gKpu^#vIoO#9>ty8nPphgs^-9Y~QN2QA3BlBdm5JNMmb+>810|8Izgq z(Zl^V3oJk`VJ~(!M?mbc9_;Jh6V|PiBZX1GAXc=(H2bFkZvH;AO91Enxy`+$!1E5Y zayYu6x#tNvCIdB^kO{C((-^U_C6O0l$Z`JYJxFj7#gxg^)h5cXJdpj#?*@;{d}G`x zi5@-rd~>dj1=Ex-{9UDOwX_j{`Emg<$qiCj2>Hb1dAxJt3-l&t$K*jY?KFd=@t7oq zUFF((nf!UmxK)9+z25EX^9^}ZMH3H6_P>2t?`vPA`4bXay8AXRQ;35p9gq|Y7aLUs zF?Wfn`^hrazdp_rRH;z$ElWLPN!qOk$*|BU<$v7eo7%e(9|f1bHbx4vkxGN$95a@A zx&fRnRw8}hq~A~p&?0?@+v+0EA(6x5UN#(*geLin4DPxe>dCU*`B0_}DYgY~= z68BzUM4SgAaa*0fc=o#u+e5b0gl0q@*BC?>+4>7U?Ptb8^wed2(aI!%%=#;6MEUFW z4_&(GwZbI2GO`wP3=%ivluCcauTt_%UM~D-5}?rQU0YwkvR`g$0Gg1 zaBf-8Pgxq7o}R4v#F@Mi_}g1vaIX{Y?KkNr$pcj57P(J56zcOfIqHeJjJJ}s4x?LX zxH&ewH?JoikAe@s&};24uR(XOw{|DhEABsq!w69ZRVyw(VVBgEW#T9a$?M z^m+K@*hUeP8>mizew(4g>RMxzz_dT)LeQyv6 zKz3Ymbqf+un^Ox!x7_8PSvaO=dX@@#5UyyD9y19*EhYl}-=V)Otl$lTnC@b_|0O3~ zrt&(#qjNf(R%dSj;*E^#GJ@*LFGdgqDBY}{+-Mzq|J-ZE1fdSF=6XXEq_-fe;PDvS zwU8dqLm6&k)NP}ZQh?SMT#w_&`y;HU`lZAsI2(^vFor_Pdlpp0T8&jiAB* zSk{r`$H=9eh99|?n}-}1T9+=wFHZ9e-B8yUQgg^K2zRTUi)HPwX=BP42pL82I?Vk)8ihH2Fg|c4D93e^@Xcs`HebCcB%*LD)$LDBWBufvah@bSd=oc)D8X@Tj#K2lc(Yx>1#he{op=GHSK z4n7;@#{^8!z$=*=s4`WNO5Pm{C%+i93N6P?bfg1ghXoUXxK9e(VUPWRO*Ld8&BoOq z8GQNE|FH)I%lFhIKx+!Q&8GA%GenZMStG7ub*NE4zn==jXQ?pqyZgjgZ!z0`fNV+A z5Sl*cdW_$#*n~x$Yf#8)PVWMsn2RhL?Kteb76nUBf;N?`UanEW34ay<(5q66Mi`Y|LU#Qovm?|)*(E>dij_ym+L+u!>1Jga!GR#mCIyLk3A}l zvokEP)mqt-T{y76+=b`}$cHW?uOXIi*hNshf(35#ZP#waVDx~T8RGY1A)6ddCf(Y% zC;!gzfllw=)uQMA1IM)cFK7`#imyp4>!ABUTgD{Lw?ewW-T_~h{U%L?`2?%~>Ag97U= zSS*b8W&QZou}KF`Zl+(>$S>E^-91BT?G%F|Vpt{k!#{MnIk)l{1B2V!#9ga;@S?kh zMqBG7s_*6tVTW$$*Yo$-NcqbjHAB)8fdJP)Tia-%D8U}BEs>3Rnqm{(XZz;a2RXSn z&&)>Xs;bfg^<|Ugf3YGE^Ziv0y&fawRaEUStD@M#P_$PFmolLe)uZDPQqsHUEN+)z zn>a9(eHL=KwMXb8sx{C+HZ&+M9fGV^R!`9kwp>kB-dy|f`r>3BPLLnj!RSzq@w8bw zOTt)tXfsWZ_oW%vRc@`@gzqZdU06?26>VI?qt^C5BsR~etmH1u5c=~3Ux|oN8t98v zKzqraQs!o`UhZJTTAyiNqW$|w{~?47V}{oS28MaTIn-z4eX6NvY$~~kDMc1R#UV0& z`Yq#}TwLE4mPDfL`ZaS&{pHt7J^e0r>2aJSP}xWI(k*Jg@5oJ)bM3O~OtQP9XTX;3 zYhp=XaDDcVvr2x^i&MCOT?&6^L4Ka8Cz<}o$(siIrd^?zU74-L}`k}=9WBey{NQsTrG)}G4 zQc?Yeub#iFc%E4?ofhZ5#LbmQ|J2sWvo}KP+pC2Nu9Mb`o}6|QxekU-oP`#c%R)y?p4>$e_nm>Pvh=`W2-(^OH`7`##N_)`l^>ex=87quo*i=Z;aXXcka)7gwrs-5BTQ8YGU02K_W0bI*k$~XBs=JcDhZWf zneWC*{}i47Vm|nVKm_6O3%loow(W~)SBXmBa)!Q~4VgoqL`Uzp>tCiWUkvlLW|%t3 zaISFEWd+>MZ09a!lCEysIuKROU(hf|$NpYnk+OlU?%@I*U147krGokhUEB*MA*CZUXFH zVqdBuRkWYA94HBuXJT%ofK?(9aWusR+Qgw@^?Ap7>8*8;+G<|7=+K;b4I}2fFyFKe zM+{Tnd$)Dcw|r@i*)ILrn*O{h-qMjy2_XmS?<=EeX~2N#5}}{1PX44rS*mA}Yr)6t z@Mw{beNG_3KKcT7D^+fue*#p%+`=P;RWr*M>RxAF`kad< zZ&3^%{K;MZWi1q@mz9;>gBI#N0z^s6Sd~BWN_*^%EOgx-`&}l(Q?ZW6d}1cg>gLch zRD3^Pba=iqo#A*{iAGU3$6KG%$3rOEup z$;cqkXH_25sle>=z8{_@(f_<34&dK+YM3$W^0v=J_y&yiY3tYwIxeYl-2yyq{Z~KS zu)m>lK*Yb#dX5GVI&Y^FG6%LawBB-^%w$Z~igQI#U^b?=caxVPFQ^zh6GMtc9CALJ zCFLMi>PvG2t0E4?$OZZP_bcFqd@HLT-^)}$Lo48e={-;NGU z?+#20yzVktdcjypxXHf{=U*Q9?R`JbmjZlv=i(QTuHRuhM`QolkFNXu zmm%+9k>CtoqU(77b0PNUKQaD%k4cbI{c&cKktI|n3H;>9Y3x8t;P2{HKd&=Vq2VtV zkH97VuXpkP@m1np%OJh<_hkKgBR5eV|MDm1hFU&L{$D=%i?MkPd3P7fD-Yi#=;%=L z{&g6WG7>+nh|5XP$p|vcsb-*NtgEfdUm^d~TYqt7GO`cAJG40{;fQ+n4{C+La{_xLFD0?sAa;w%St3tGZbT6|xJ9Nn_2>~r3<3c#JzG*HX zL6)JRe@vjI`J=Y|Tb(+#PhQ=&pGyMc&%<(GynTyao0BsuDBwl-KM&@#4~Sm+-C~h; z9<@C!sj_{N2 zQS9q@wfJ20an~gl6#3-P5~G+5nR=d?13h+;LTKSb^kwvRr1sBCerHRUXadT}cQxHt zfbJ;;6C|{2-5{?B9Zn9Tl_6Wt$$qYIqGj{`GthIvCg~`8HGoW!^9!`gDu?=Dghyn%U1K8=lFH_JS;6ojvme;r)Idx<#7GR z^xNvpU7mY0w@cTyC@2Y$;pv24-aNGdBRb#H@Oa7oFy*6M?|1iVN&?)14=@WpD4SH} z=x1GLc7X+P#XWc5NK1@ZIR0M78k$vF^%hqM%3`?DqhAfgC<1=TU@QG?|)!j^aQo_mFa#5hj?+KzLyunOe(-9pMng`fv14((9_n= z5JNLOJR&gv%Z_O!XCjxnGdtM&>`g16m>5<*4z*PAIj_yrDTXl(<$1x4Utj`p1OufF zDIqgdG3tJBqJ*U5*Y|y(tJRlU2fa>ib#68}_;%$Y+0HUS1@btO56ZUje2z}gDaF=Q z(=O}qC_uFz{~iJIBvI1(1-AWodshH(jnA5RIKH4}P0deSTAGumxW4R6ToR$?;KXhF z212d$D*-YYD=kbZyIiK`;9{iE@JWXXo}T_o}^l z+?3`buER>xbjPPs?Q2c29jMGta(bR2HQ>$odbBy#v~u?A6zqQ$ zdrN$>MKj=djFv}(1|=h)7?Up%jD|YywEfcJ;xQ0O>Mf3EJ3!W6Vx)GMyuWbL#-VA< zykZbs=mbZeRTkt_;9#>**6ntx2xK3|BCKCN{8d1+DNDAfshF0YaxX%lz&5`HWac`7 zBB%8#4SNdP&nZs1K3}0XBv}TnxO!AT(=&`|gQK}yOF(0$!B{-%;3UP+D|oU!0??0y zcEt!|Za2Ok1Kd=)#nUYlzuiSBM&8&l^ng%Bqyl6~Hz3ql@J++O27#BCvh2>6o6Y-L zud{#3xR!oQTh|UWc9MkjDlqN2qq<+7oNw}A6jx$0R#Qcd+C#gBmGLFux^czvl3gBh zk^;d}!J*Rf(T{B_Jqi+TQ^CH~dE`mc_BbBFQGcb|Jr1GY(MmJ)6&iZ!S6bxeT5oV5 z%^jiDb#8w-StvRrTDX@AvfdKzJ_nrx2z6FoX|*(dFQ~H~mHLXjMG&wV@=aQ_<~rcd z6pl_mz18SCGtpsjQE#=Oqa}(Cd-iz;p!c8z^!=HVkftUXZDxgy{B-=cMIlMN`8sbK ztgYa6DE&L{js(8nfur1olLdAIBY^*Vp%?n57@YMn$S#k_l4uUUr3G>dQW_W|oBJ#g zq5hrI0BZBzb%7yr5|E;RIGsjqIa72inB=y}D-KrV~Xn>Oc9hP+_H9 zI!3uasgRHHn^p|F=HF2LCdoTtx$e*T3carzlk+?$Sact1Mtb$>Z4cZ>I)|$K#U+u8 z7D?_k3Dp#ZHqlVY27;^G>F9?iWL;0?7=8H%m_T^$;9=lnWC>1cw|I+#)=cI z@*_@_f<*dkTw29FKi6cXmWTRwTkBN#T{cYII<0P9U8YAVdFH2ke00?6{*a_YtlD%| zz?1P$4y%G8(y+> z+7hz`!aT9!E9Z+_Okc5&#zIdXn$R1*P*NGaq$1U_@Ecj7ODXdrNf27du*lwbE_Q%M zE40L`pggfmnH5!rY)d+eKcy08FM?IbJIY_+wcPXkg`Rfl3R=_(j3hYbM_#Ukc=Cx; ziVv+F#oQtRCZ+>Iue0K){1A$(pvzn?q1UU*Tb>bv)_=}U$sieTSmgb6xP3+#`*^i7AOT?!I^Aa1+sFSi=>HnB(nIAv5-%mDP0{yZr8|%L^!zG>KxA2L^YsM-g)K zZ%;fNO@oa}DgoK_9+NaTjw+vovJ=MmIfxcW9gTMwn9fO1jgJ)aWO8*F_qT2)*onGN zOTwy-_Um7iSFoR)h~` z;Z9MvmmeToY6i!V|5iOsFmiFka{)jhzS2koEuBl0%J7^O5;bv&EWTF}>3bihj)qE` zRJCD{A9}f(h~k`a3}oo&2G=&dl#3RUzIUc-rip~^V|{J_z$p*q*={1P6P|i)K&dj! za~N3ipv#n-1wJbYnl)i);phg&Ir0%}vj0j18zVV#tUm(33oVhRhu>f1YMI34Hsf8} zJq8EMRN|!oS>N@ElH#1oTcgCjjYczr00N^W-v5A!;dty>WDL}duu+B%4q1}`8g{15 zdF4h77)610pRy2*->jWVh*0X)!dxC$ zw(B7rbLZUc-~VoP+>gH?J|49GcH`1Oi5_jb$@?Zi9Tq<*kB7uD=WAArtetuXts`%d};wegdsqJ@CPCcCSeIxNJ5#0{p3do9#D+Lv>Aom z>`}lkK5k3>>-@70x>Zi`ka`kv3+KkukTP}>bL&au7>2=Fj>?rq)T7n@Wi?Qw&+pQO zwYxjbKH;diBaiRKrlHn55Rpg_d>qOmllu*P*9|j=T>6!dt8=vR27ql;DSH=bQuC3mR>taxXGeB` z$&%GbE5|6nYEMaSSI3JvU(%{!!(7v0IA5lK&VAdz6puR3T@or1c8|(W8p1s7{D|b4 zW3MHx=3C)n1ixS>k?w#3881oOQrX7(=B>5p)2Ey-*4%vu5 zZKa!vShI+C7<1Wm0h3(pz7x*_zpJUJ&Fq-bwc|0Nd@c#tpU+asRHg&_s12gcUqFM9 zZXi3Gi>(~tHkmK$Zk^mprbh=#0+T^$;UvP4*te$e{>tSI`iml1j|CD`tYY?|0-O;- zEGjOwAlb@fBBU9Dy6>!;2~J#kj%u7_Dd^2bh@f(xIysl4_RU!sB}n(Ya?C%bz^rLx zZx1R}(%bLP6ReQDF3yh&n4L&BBz0AEiBzBIy&3mtCPkkR zdW?Y_xfNoDPxqgrWmSlj{!QNWwU?DDlSwY!CSXDbVUCeM)6Tku%e;Ng480z7c~JGB>9Xk8 z>9%SAj(AU=w&xEYAK?Zyw63)N*{SY)e99BI7W|VtYag!gQH8vO8e!JgOu&Kx%HG6pr!Yl`jLwj+HD^-dy&E72iHXqd4Mx?5J2t1-L$?3lIlyXT=POcQUH5ZwmgL+)dQ0npi# zOS}1|m1M--=bOv9Xpwaj6B8|n$I{y#nDU(85~kY|&ze=md@K4~P<2&LPThDpQb(JE zC*yN?zkFAt3A5l!epeOmb_Yb+?TN|h{ngW}P-dH6#j_pUdkV8vCAx67aJDOv5Q-f& zr7cu6eLYF(;XYRQS#L^bPB&E&PO(R(Ra#_QakYZS*6j4?wK_+)n%&1GnD zL+H4dmamuGX%X+WjaktRV1D&!%PuR~x~r0w6z;`{I%*!puufR&^fabgZ)u9sK5N%ttH(`<;`@5Pvnx)U&MiwMYF)L?)}uAG{_IMm zm=wexv(U3*k&B_id4DvvsQuu$0$43fZG6=pLx3%U?MdX`fyg~lR5xq!^teo9x4|4x@Ir(Na(D`y)0^fUWX@|sYZUP z383*>u{@O$$R2ylJD4z%w>3VCeJogl^_Nv}-s{mM+m-D(aIW3`?fs_ha37@d?r$Kq z7Pzu+e6k;6VxYc>tzv{!w(Y~l01E`0xi7PKyz?;(wdM+u5f<5|oB27@=QJI=EF2$IhAu(NOtULh7U4 zI2B3hD7;9mIW+R;2DxLH=XnSoxU8l`3;3kwzw4L&;jST1ul-sh-*Qpv3+;SQ!8lOb zIsud-w5s9@=#xq+ux~)#f_m)dH-Nz&3N=nGxQqWv@z+ZDSUR zOeSA=>S2=c97PUN!OR+1#74;@5c}5DLF(e`^~&9R%aPkkcNPZbfZfX$Ocp zvAn9+-cxd6-Dg1LPNrTTLiC`Yn9`&G9h^p?GZP`_r1`hsB`AP$Sm4daFLbG6Z{mB$ zfFC%-QZf1-i=DkfAu1$u%jI!vL0EO~9YAgG>6#mD!cK)>0X258R!NwmJM|BbMAL2^ ziG~>YtY}k`$O{ZzS#LmHhzOT$U%^?Iy}dmGZTqiuS7h`u?<4zT!AYvjCAAnhOo`>1 zD@~$ats_~zcI%OQy5G0HkrGnisCikJO_thZ$_o8@1OphJTExz6KIW$}2M>xruM9&Y z8>n>f+6?Ur@l5~R3@eH$@pzvyq0wD3%d4>8oyVFV`@SG0^adaER<=hb*sgbQpOszO z;`tMX_@nUfXO;pcf$eI{R8FbY!bAEetQjs2db`^ON0n~-tMbpN*sVddoFTo3>N%X* z55{>CvvhL;Kg;#h4cei#4ja|k;00WJIuhj?@OKK)bDWSN2xL#Y6-{Z?UJr-6eRo0X zsB_mhh*VE`J(8m(8Cg;}>B!LoBO*}KgTg#~zW?mB^M zSb7vQ03w_0B~g}o>pst6?_y||R%!bE3{(;Md-lvo(Q{#Kr>@W#4%`(-uDir&C6-Rm z66G<<2OcXW<>d<9y3p>Y4_0QHwN0-dMO|tA9zD)P$iMJPIm=QnaWY=gvpqN8XN%n7 z)cxyH0Bx_1^)jt-wjl+-(vSpk9sC!FjfxOsaArp#1LaotWV!^!d&Y zbExhm6Qul)Od7!P(r(5JdMe#nbOdEhqBtp~z>T!8J%GQSQv42RI?)PhYQCL@i*t38 z{dO3uP9Z`D;|j4?gvi^0`$M}aGhg)!i1?;75K7mAe>v(xoQHu(*^_T6Fn_ z!$U|AL@U6-fP(3J`K0taBUwsFC0E!kyIs7Uh_csG^N8o3yIcwb z{hF^di3@afeMQ9lngA_JTy|>;V!=iZ)Qs$6{>b#9;-JgS5e%!Sw1{Z$f5^NE4I~Fj ztQM;M2GU=bGCZ|TTBhklqKCRvsgFxq`YNXpfxPS@T_`6chZ7TX8ot~}RhT9%Yy0_? z<4T-VP_KWkma%b8i=~00*5}D76eUJhw^3s@Pd%kit{kA6MtP*Uu>(Bv+RUWew|l3L$A$pmSrCVv0vF0lJ^7~2+9 z{a@UFMj|~BEr4z_G*ca2;5dx-8X71>O~1^ku?+Wk#x zG@9C9wp+Darx*PE9Bb3HKcNyrh|CD5a7I)#9afO!hZwG}M~6vmr}R8RF+$_Jp&~m93X!>1WtXRSDn@Avn2(0_T?jn=ZK*7Vu} z!7~tmjpt&K0?o{VBV`lx))^|vVu8Y53D-)FvYh-%deLJ_`riP4k2r?Ab`AeFUJ9U%khoNRT` zlyloREQLKsr2EU&f71!MeC7W4n!{a$NYZz?fdw{D1M*h z9y@<~w0{Pep=XwIu{sL2{X<<(x%x6HM!|6OZvhGamEqEK$AGb*bEmCCF|R?zfDU8j3f|g|k?lcd%2Un69wp}z=($wK(Qlj< zTFn=bNTox{|NGVeLD;ee_{HRmk2;zABd6VgdJrw*&?d9Go9FwNLfuC8PpAsXApH4% zWGDVbI0WS!_$QooN6&)viGNYI`5!_Vr1$=dw9UWCb^Nmg$Nx#8<^Lvj5=|gj@65x` zx;fqcR$qU3Q<@k4#`gU4X#Ov1B6iKO5{m!*6ZmIs6snH*q6LHl= z3I0~pnXz5fawU_w_-}K)-!!-8P%>6iY$X>pG@k0qruquZ!Uk@&>{hPJ@>%g33AoS2 z{`D3w(}4pvb6F}X4kc}U%5|Z!&vLU{pGk2lgnBUFv*o*f!9L%dd(nPgW}A@r|5$s= zxTg32ZyXOtI1-{FEnory(w!n8D&1XDqeWuWK%Ia{i4xL-NtbjEL?i@8hcFmO_o&gZ z>y4iC{hjacx^Db${C7_u+xYCAuXu*8ufBa>=@8=BzgtD?pMdCWTjlLp_9mYa$BY%k zN;_ulpAs)m-SDVJ&Jbn@2Lu66jB^#KemlCejEjQtTl+QeiJ3VwiAeg(z!MtLR@7VN z+Bhcpp0FKw+Be<_MD3rt>gZMYgvI5pOZ!%HaV1+vIed9>PAcjPO^<+xBlhd5j_*+J zKnk|UHEdgUgtLWgpsp=^^>)76_5T56y>ng;q}Pc?1D~+r?$lckcY2~C7ks&XY3bbG z=;tQ~ZcD&me!ew7kU+{y3J41AERBR_R_Z%)sud1HV@1j48Wyg+9FNM%TpxP9m;TpO z1LdY2vQ_b?3g-C6(|K3noSCBETdhf132e#!qa4RT7Q${I0fp5KzDjeqU^L6Qj<82q zAb{>m!KN4ND?;vHqgN>oUB718%k_ET&1IPOi23B8=?w{r#H1vsQd?w|sAExjU8=O7 zkDQj`pS2!K4o{GBwKly7xkBKSG@cpu5Wea#H#aBkyECZp(g=;3$dq;@IFFS(kCy5- z28d&s!C`=ir?gm|JyV#S|e?Mi|ez+j0DkK%<>q9zP=m*8z z23n`ZD4Co0!De_-KnBM?UvX~gRI_QUm*x=JNh9FvrV>ZYpraLObR;PJFh-s|Nko5e zUD3WVIXMu_$FmVlst2%gr9Cu-oYfcuuJhf?5vv+(TMk#Qz?&9V_% zv^aQYuqex;k8qgOs9HXscIxkpZbo>?;|>SRPX5TzJuHU=?gSjA>EFHn5rPH+9b&A? zf=nb4-tgM>L$-f#Ltt=-dOw27Wf5PFWSkcsN4RvE&TqY(^ygkiIgo1dG+cKzZu!%8QFaMob}F>8C;yP ziEUe!u?<>u9@#~XwKbD4y>P7Z=#5+^BKqzg-|7K z9+;ovC)XlGN&$H>`*K2kWF<0yPfzxb%8F2 zxZ}LsUH>Pn%D24UYtak!96$<0Ibi<7 zc3!Y&m7)*-G`{dJRtm!Mmq1{mDFBc|9R{)rPbPUxYuU))4#%slNiM@1ALnspb^YZy zFVzSlA9=?ouN>ZeFUb9QV8PV)>XkC=lZ%9G4sX>YwkvYfvHUw$|L7w02S(VsX_Awpp|NJS}D=bBR*4wplq*3CqyMWpOsI*Cm?~wO`Ata^iFR z6i1Ls>VBgPzR)qymW(#)ByCiR@uJ^3TuG`8a}v}%frCjBd_s!#o=dkNo!~2DAc!O3BbYpv&l-L`(U}z6b>vp26-y30V)@ zACPVU$^{gnDy<|>db(Dfaslq3!C7vpa^0>szDWmcdx-{*LGJH|wUPSkvp6=2U=(^w z&8(q*H&_$w-lD-wr1DD^-ZT70l$gFp?Z!Zdj}NiDAOdWAUmspj6pU-{>^zE^JdHom zWahgrYX8p=1x~03G)GSW)mU2lYW#^W9)BhR~{Z z7&T;UC)grVn^4Ks2F~FUY83e>hTx4(doU?f4=iBAigR-6s}Cli9`!p#Kk9wR|BY~k zB>xhxv~{J|0oJ`Vj3h(W{=2l43qSA^k57PiqHZ%y9ch)jN>b%oS47~6Ea4&iF-0zj z>#UvW2hHwqHrWrPy=67U*X5(n|7<)ta`gzCmh-~6Oz>7LIxd-90u3d?Mu=lq#FHd@ z0vJc`og>-fzEi16`+;Hq!~v>RgdDuf?`x1uifP^XN!|5oByvLiI4bk9^qMS6{0Ta?0Kub_(j=;BO++mbe@T>cZ^15T@&3U%6=rAEze?F#6eNA!;xpDv{l zfY_NAV=N&)@IVx*O1W%(o5ILQVm0stJvo-L=nX(*%-Ey&z!PYz_SqT`r&t!*&{o;b zO1e5#Ka?dt!Lm^qvPjs^Z*;+C*knseas|(sS%cprTwHFdeU8@@vHTnWl;7!&i~gy7 z_{}bGca16N;3ts5uhZB**K5r)6UZI$BXqGWyRKdG3J_n4Y*}Y9s&w+~EUc#Tb~mQXk9;|;Zv(bLm=W<8uYyM1RkYPy>#aq z!OH%dn!@Ne3nCpHU*P^s8dxOQ$2$+B7eCWb6qZpx`tQU@Uxp&@9DeaKtjaB{bp#!e zD2A(DNvdx4OpMWP7RUPgGqRRe?JNz9td|*At#5({{k8!1AWHPdaQPCY$|XQL)`)*^ zCIK#yM5&y3q*g~6&~6vG;BZB)wuRC*_av1^jkfBe+!5H&gZf;j9GH23c3--@@+voj z2nA<{7}g83X6C)adb*j0Kh=x3i~1LT^8sF#Vl43fymKIBB(o0V976tQVH0<@&O6^z5J29giGeVme;r@m-i z1AJn}$E8sB{uD&Kga+4zbq^RTjIk=N%Xce|6f9cDA2nVu89Z5kK-Y8LSf-|b=A2pWW$UuB@vN>F^URKdyyey(6(#$ z#-@G9SfN)p2i>o){QNtH|1C$!1PRLl5vDV?{tn8I0mx9UKS{YNzMb%V{dw{`iRSz> zLEuV|gcauOR|MJFCczUc5u9tun!n2t|9QF=DtKZdXkQAg&&~N1*Gd9xLsvr2o3mQ9 z->ZwPGF|q$O!axu6_u`&hMM9XgQ>0!5sj9fo_LIFaWZfMcEP>_ZE#%t2Y@Hc)-N^$ z@bl3(g;(X%8ZDx0>VRk1Aa(}S&O&R$K!alQDlz0X<5!{0Tf8EqXf)HuXsH~p_kID( zK4ZTNO+-|`6QBqaKuH?kZ?kk)KboZ`Vx`v}<%kHCHRqShw^scc@L#$;V39rKp(k-3t%Z%apmfm2Ay3Q&zyZ9*!b7P}U!jRdpG z<(bvTLO#LI>?1r629sq$$86;jGf70*_Fod)0COL1@Dm&hiLMe66T_?FDY-wDdEnU` z?jh0NNrOn#6VyOmQINuSubtl(R&gNZz2OH9VV3X`4XgAo9l{ogWB~Q9Ra8vuq$)U% z9GU|iAIUKo!OO>F<#{1HmiQN*=N2}C8Hc%;j-Miu;q*#JJqi&#C*FWKRJ zUi%Cv;^wI5jp{`yuK>C37E{$Umsf>avXc!Hrb?M7w_y^ytYRDO$SB7U-@#oLz%M+5 zgO<0OZi7wbkhaTR12aZX6q?j@U=Dy_zcK-&efRm z2r1wmMdMXf(hZ0YKFXp_Kdfn`w^zw`TbU&r_(UrqLz|!{zOr^k^RYI8FaJ$Z(!l%$ zALfsMIhgQ?b$+^MBR}+7pggdvrgJm{s$XbC?Z&Bd1`1&A4`B^hEke}K0}T)g7>nly+3Lepliar6nn6$B*i)6A*R-^; z)m0qUtInrbzx-P)F}np)j$N#;b$SAK@{Rb17fnY6Hm8kzH6TuLjrhpN)-rs%eP_PD ztR%Xp20t}}xYLsO1*>;x+=)RTJ&K=Fu5fW=C8H4ebLeSHtZ0T6+LC6tL|sK_i?T zgLZRK)-F&(Ii{p3^FCUO1xx%pnkgfFJV5;5Lz*uTr>j_2yjl|5K+G4|bkiLQ96EAP zZTLkiYEL>r+pg)mB)vJnxY^fyWqBgIv1X#PITxK?RRx@IfEH`g;)k!G`hP~Qye&yX z;~8}Tv_D%8h%&OG*7dMj(ptDU^X`SX`fBW8uDYL{iZk;B4QNd$%3cPdXD#AmxdS}o znole>P9(d#otyQW$>E&Akh=WZ9%O)aKFTrq#IFy-8hG62)u3|qM#NumUsl~KL&20x zGso9I`JM*mrtm>*S+6qEpfLe}Z4(LJ#WV7!9JoQi>eTYxW7~$`7+LHF2hrnFP z{DuHKVwt%Dn0rI(WVkt4me)D}SHrYMqM8fOp$iybf4m4*2)y_25tcxvBKVcQj-RuF zhBp}lLn_#@@MjlDq?)P{{X{qEeJiH7y&6T#?h&%J1^i1lTIQNGjRkF!A-e|W-*1sW z85efMrSs!%zi7RyMz3Qp%UiCmr)fj$<5R6LvI~MT;ap85$lQna+NOc^Tbo*)Wq&N% z!OTm;z1Xi2UtdVe>mxmFxDQm59?TS`Gv7q7o_)&KV^ckd@y!@=>noy3pa&^o*g9S_ zR+s8!wC7WH>GnMk)l#u-;&$4jZ}rKZw@)UR3xG<_x5_w{tBg#Z#KLr4o$-w)oyASl z;Q+NDBR;h1Fs#hWZp3a+KR7F?YOT#m*42n60hOP>WqQ*ncsdDzrwcd zAkx=ek;rsIw@t&0f{2kKE0pxtNq^RCldu|iZD!8ud14Q)&cz%|=r(l(!)v)>F?9Q5 zimNa9{ko+@+ZfzE4gfGultNe(3ji$1{U82w(p^V7WSXbQUN%oy1=Sg+7xAkux+D1oP+~41nl?>(;0LQTRI= zjIqb|1IkNQ_&m5|0J3u7UVn#IbMI;jBNDKQh!ZcErakR)I0G;hy089F_hNVD_8Tx> z4)}6G%X-*25FkZ{7My=nf0v{Zv_lWGg*jV;PVbvifn*l?9x_p z(~bvqT%&yhIlU}IRIZW?4PV-mhp(8{HH+6$Cpe9g9rCKN^rh%yKC?uDnJ*2mhk{d1E*1mFET@Aoy}t` zI6#x~ZA@*Gj0LWoQO5kx8!o%Zzp~8Uh7N~QlYlEDyG9e&@-|=$<4!_x3VLw1(Y`@m zKvQ}g9CnUiCbIzN>h;O~#!yl5T2;5u`Ij(i;unYi(NqVz|HBa$$}GYk%h5VrWK@x` zAI1xS0*0aKzX(qicNfvjuMrdKPM20FK%7&HxR_OITwJD@*L}R~l$FZNtinijV@BWz zK{Uu>BY^^xslK^7rAYpje025^*L}(IwZPvIMM0#blZjq{C

    `?tq3W>IPRlL-?HY zL@m1D63{FMYTdAWD${!s6t#9eqPEe0#Pt0XcV%>gwyuF5ljMTe%}Jk$ENy_q-_av)N_i z7A~6jaDw4u!b&0HQg+?f9lO+RJq?mUZZeS7zgyFGy^924a7N(zb!q&Q#ad@Qnn(Cu z#6k>4o+YLJUEEkATkbv-ss4hnZug`aJoipsSnj1WysY z-pBrj<*1C|p^BOkHp^b12n`8w+0rtfmUuo3`Nrn<6#=|Qm$3~CNv>*yY8Pn8}!i%nVQ6>N9r3zsuM*ck9KzKdZNtg^Dyvx zfkI;07~SQO=`|(dRtJ+$m(5j^?MS(WIdjc;pQMoB_A*D^(M2jK0C;S&Lm(dSuiw%;~oZvbL70;Z~sbN&Pfflv;|qws?G~WC86e=#QVL{L>SxFOoP10 zG9Z4uma@$15QmH&1aU>yVK|TgOUS|xT4JI-Q`94xR&M-xMB}qywILL+| z$)LPudTrTPmuBqaK>7FbB%qvYmc{M?5HOI051lcko!YSyx2rysC*BI~{>ici(1SEo zo>iRmCf4yJe!6U9JttJ*l$3^fAs?X2SRbg(&W&6;+h;e=wTGijZdvkqPg>HBu}>e^DbR z_<|ius4eb~uv)i{!N%7YWieXo0~6l?mGM;}d^{0w85hqrBUT7h4b*59S4(y+DXZ-_&ZqdjmIynGyqk^dj_Aqao7`E<{&(cc*<$lc4ByG z{qvaCPAcX@e(&4BzpBm`(0JCAkRyAMK96Q5W!b}Ki0(_%P{1F?7fqKx&gfJ$y%K^| zKIY3|jIL!`UXfq#KV!f31G*kNAnV4z@%73(j~sK)YqWxV6}|}yu17oGn=&va^mtLs zduZ!>uO=$5-ttV>7~Kf^%<^e<*&vnPWWe81{W?YBa}TD8?vxwrfndAW2Dd}aloN-K z&z#}4)%WV`(k(T3M{=+kP0YTN^yO)>uLPAp?w1;e`i|Oe9Jf)ymlYw61&~`K4s}eM zDnno)MEC0E*Ql-c3AUj-Ms8D_;cJjMpU)sBaTd#4?k|i&)n0@}D}~5;W~MA|8B^E! zRjst4SDk@jJA5Ud7_!K}IvM!IDf8As&^biYKUOur;3Op=igs;((rf{=?PR(ufx0jr zCx#q}lG#=GMwIGn;JcEn z%(#Xtu_<(0@naX@wi|Nias)fLK8Dia%}?sskezN1BWaus8w0eD0cR}0@t?Sg^FrRH z1e9K%IuWA-$P96JW~rzOe1{c4KRJn z5wyD2;eW(GozT$^e*g!v;A)RMdTkRDYE4FPf5B`zS2^vzOvUglO?2XW|Fr7|qVP<# zCXME50gL>LI1$?*z%cT0CW7$&(^&;fOpd=jGxiD#$MY2$#;^{GZ7N5C4a;r&^(<0>SA#8KUUe%%_fJWD} zs**WzZ`YOpcj5I2{>H!96WgvYrE}&ksa;G3$KYt|Ew!l@rA=#y-^fS6qglN~*jCHn zTLk*EBzwsmfZ&#M#F{|cQw5^z) zcqc_953ATd%GO)-sP9W5mh?&?13N{YmXf;rOdqoC>PdN3erO-Si3)@ba_KZT&+h^< z6!kUJ2C{U5-=2D*tYEaNp<5UP`c zS!vkV5U>KsSOMA^46+0_v*V^uPK&n+he(5hpU~y+Zt0>WfnG$7P2-=LT$U#|pte6W z+u*dUBiNq(YqoP(jKCfmcsjim0)-18sh$agimW z?9&lr%Wo6N37Ra~ZIg_IUbtm1sD0L;yeJ|h9p31q*aNh#;Lw{v9GGPnD$apLZYHK1(hJ zfqu23)NJ@!EHbq)kiBNG1@+^tKN-GFApuzGYrU#w7mI5b+M|(|lR$9;Sj*-49f6u? z#SsC}$;K6d$RkHNU9R+P<|rtygcOF~V`aFa<{PoDxDiJZUP$jJO-ryu;4UA)6Q^@~eH1_V$=UKGy9o7vF?Y0Hyt=$P4Z zB-SR7ggja!T6j_K@A_hZ=|I~Ztw5aY@O2hj^q3gFS7e=6r^eU3Oy_a0y0x3%%>NLq0avaB05hW;1NH(vQOpa1 zK`a%F(lQ%U@`(@cOAF(xc?d&3X;7VAYVD1(_s}m*${wB#9{S#A{8Tca*y(aM8OzEx z>tt`eYn9cL#iAF>-m{ceJ+k}alC#ix#5Wh9M;6SDQhy^Mdl;OgRS5FYSzw1Sn5z9j z&HDIQsd3-)5jIS&xcLn$z)^7sksR{(t*(W31VrZoON{xrH#{vsxy4E^^;E@s*u8(& z-ZluNuc=^yY?~2SXf4lipVi=r?Gi|8XXpNR1dwf6c?N3s*{s(rnOcqTdX6=J`daSH zosd01OZVqD~!uIJLQ9fS0)Z zDZtnO`3Q+}^yrwk24wb%Y0(O)HYHxz?Kp8y_OmDu`4I6lYyix2Y*BfZY2xBaA~1U$ z!ZrZorn*JIA{>S9oL)LdglGI4==DqZVN-J<-nM7xh2diLQmIXRt?F$+{Fe`3*C@)| zw}n^;1^n0qv1ugXvLs8Rg;W zzFPhC6C>VQ0K;65I*++-a}4G~H}Q#-*WBwE2?DC{Yu0~ zhD*np?fFeKF^v>3o)JPo>G4`g}BZOlClJ9 zn{_i!&$SP@ge1UzFG*QY!50y&@m#T6Pa|(t_wL{!8wT9aTkAzUydz2x0bf=H4c?a2 zx;`c9CHlvk?^hNHsM^JXd6*|cAD1is(v@U@`tt)!9gFO`79|78UhxZLof_!GO-GsG zWS?Cx54E+_4D}_?^~@CQcOn%-VSsZG*}6%TkjIMw@^%M((6oBXgE?YPO#MSugI-#q z!C@$Ie}VmD3?=MS#UX;oIyqR=d4*XRH>mU8Wt0Is)U8dBoIiCFuHj9R7^2wH_xCWQ zy0=3wL{hq6UQ4vxgvSzKL)|`;iEapST(rLIyI$!sF+XPl zm@&3$BMXs>q?y``g71@B4~ipx*Omb`4bOOt138z;_xw4#XZcX9k1L43F)zwMqwk4< z3*e<4u7jrOdq7@4$kY8cSG$$l)QJaG`%h_CVMK0Lh_NA%>N?sIr}`36J{IvJVK&fE z`IkysY)WjrdCX`Bv(UG}7cYYz=Lcdfw8d*KINX1k=Sq;PJe;Lio#0jX)SnzT&+mC* z20PrV=299$={LGaALW&9MCpMO(TlVwhLX}0P44Kh(Z%xwR7!P%v1E+m&-8o88_baL zV%kS{7+>n4N>)`X(4V=$5crzP3NQNuDSBuRW3#dlK3!Bhoz0+UUmJPDl9Js1`Y|Y2 z`h>Vmek8R@O^cE>V@+<;AQnvDGav=Ie-px(5FUzNV=<4s=lWOlI{?)`>-jY0>A2=0|*31)_cSsDEyve<2`qoeI zQ(yzkPr*pjN2B;w*3Dqztu5&4k6fHg3k4i}zhd$>f;Y*BX<0yW&b&dEAhkEA|4RG; zb7jMZ2#`;~*RXlW!(_-Rm-o73>l=58rH?JDHCRV?&aqb;^HStBKk}G8)R(vmk}Q3qnHpuZUJZT;Z;aS3*DOeU$g$ zi~8NOlS%?;?gx-K;^fZB`QBlb_7(C?Atr6q?>Uv1J^hHLngx|BCJ`62s`Z(Py1m{@ zSMSRMt-Sw|?uK~v<)n(fs<52Ny05lKUhEuoNIaSebXrGJ?Ep)^o+h=|^(SR#jq;ETd*5J)gX8Qyt$gLW^yyDcB?gjT7$^*VFB>R|(~J&Y!u<$g z@p0%7s#qAD4Gv8`@0_3hMEFl#c|_@3&P=|E5?0FdK0X>18=>w2<;HhRP%r!6=xKq| zgUBHkixl$n%MdX2*ZD=vy8Sl0Jxp-nV{Eo8HA`56kWZrFPoNqTQl!yl*=xv)x;CC< zbTc6nc*cE*CnXxZ1#)Lm#BhD19;?L70OKKz&p2CEiE+nMk2`w8=U+Z}i9PQ=i(4-P zK0#+xL22xpPw@d%E+CzA`vKq(IqaE?VYbV+kN0PyfQW+TtOw!rgkdyYCbl|TCk=9? z&`JiO2*T02e&%_CZ>Rzic=166@j4UQjzi(ob zXx#1dIaRD9MR%>-(+&9uOydBQxOh$CjH3p`yHxDnobj;KF8OTDJ}$FN1|E))we^t5 z)G*>AyQ>9fWqi3{wMH@Ehh&rusDMqGfGRG#>oagQkC2l>siEpve|JPTF;$?i>=MP` z^4-tM!bGbnH&cFoT2P|2AN+F=Q@EAIB;rI7m*6G>;8vNh+ecl^pV3oH$sVs z-|O2ke2mt{tMb9V^|Eu%$)JMc9ab5W(_U@^&^#l}|5nN@3uAk@XR8^dZT4rc@+A_? z8n@x0?{UgMSGPqfz^{uq&>jAmnStNXab1?T;nDV;S|1GI?ePVm0B1-s1r^$L(*JIPRFC>8_=Qc16j7#W&%Gl_B{th?eCks;Y0kHa8-UwR%tBA zlpveaq8%N$$|_k^qlsHbQ$5qAD90|3YsJMtH$)^MUgm4dVcE3!x?T@S2EM=uhK{g_wC%{7C%PBs$m145i^0~dA}$5 zM-8~6)T7kCWX6a{;m6nC5&yMupYP=?|18mi)JuQOK6~+u{^?T+dOmv64;E^0N)Zcc zp$`jOu{+wid3D*gLbpa9;r3Rxjg%*bbcco}%C>fuCMVk$zb(k@Wf##CZB%QL&|Log z(j+M?=D3wA z0Nt(gYdn75~WBLT250mwYE@zAlP)StVPW!p7_ zh;fDG;o}`b97!T=2O=SDKQwK5otZ;6_oMjX#N;;0eE%z5UgMu74(``7j;|<;h`dde z#cQ+X1i)x@NKU@72e==kUe(~@ubwVTJ8@cZKAqAI+$V3`S57u4ap<9F;C5h**ig$&3QB^y+^K6XjL=9pM@L5FEp-?E z_Ra4X2fT=ge`y;4I`yAM@9kg2Lj(7q`SDIqiF#a5hS>uFBK%s}qESpl2I8Q^oPbX| z{!XP($8W3E`ZI31OLjY<=lCmcz-`WK{#M(^Ui5IqZF8@O2c@HN{N(JZIvyNIZ6EbKSNX3{ola4m zjJ3(kmvXBeqe~37EiIl|@1+uNimrRDj){uM;IVrFM^PtLtWgBa^dS*Z)IA%Gzo+)H zjmGj#+Bf0FA%u1P0MGEh-ijyo9dT3CAJ4J}*6|%={oL33)$nFv-P}8fl~Yvs{fd#x z4T}mq4y3J3r5lr&B%HDy>}1O_LtCC~mfZHWgiju!Cfz4~f6~1Jar5t22V$_jhI}ok z)-KEhwOh#rb~FG#3FQ1ex^DzHgtv|w2}dRO{bSh{|7SIVcYU$y7TEmP_jI$K{$%ys zyI7v{Qs!XLeUg9+{1Wtw?R6bV!TI+aGbo-g!=|Yl4?N8Yl*I5`b$izgP`1sHrbn%< z+1MkjE1`t2I}^C0l;#Ed^|0+9|Ml75K(zv=e?R}PcYS$c^Y3T=^?#pB|Gi3n)6Tet z07>%Sg9cEQ;P=LM{QH;u{zkH_|8IX-SW+_o|9+>%|NV&nuP^+6yzPmwsA%^8zhC(O ze!?gn2BC&+dX}0&%*f=>#K6!H>a)k*H{zL}CDfBnp?|Lcf!{Q`&o`yCKFA^4YNq~x zeE3%6UvuX_3+VT7z1?^dBS%USQp&QhxcH;1t84lqGgWwq-BBp>VSw^~7U}Qt22`yi zzpEXHQI%eUSy-QN?}Ix#!wu8Osv_liZD>%5-JZVK~vv$p*fx;>SwA`?GPb#*TR z`5FCsA@G~I?nqnBNRDC5WZZVX%OBm`3`=2lc0+RAx|usp9O2Tbr2G-IIlfrN*c8#&j^js^`=vovWVyR^~YH zXL(e+KUEp_Y|N34>P}`Zg`Iqe{*fJTprBxFNG;q8WgQ>k)4bpbQ&5PJi~Y@+oYJhA z?nIbQNcWAIL<-haX!%3cCpr+(qI-LBN3S{%9f*p1@KbyhkM)tb_v!mz-*r&BJDy;x z+RI(i^j-(io#KkAw>io$E)JL*67^ai!%ScTkD}&K;O2a*AiLBZH&O&z5wb4gey2~5 z8LA3Vj&0r2y!HD}r1Hej<*wJ)jZn5g3DQsI`HsTiN26rbOmtayD?gG=)~ z;4D*7nmg*2_4k?gm%BSSwkvqxk*cPqemR6fI4l|; zax}x0QN8KiX5iHs9fm}AAeyNK>(r?lOY0UoMWG6f8($8Twe^dHC0}lK>d*Wv#;w6@ z9T&8XU7Q>DRv(QU{BU}HXe5!4fN7MTkY=%qWq!Jlo4zHlMbcK}B+DO@(&LViKrVS_ zSUDZ;4j3L=jL|5YIKaM^7M955;aJ$I6y7+dXUxrT? z%WaAkoS4}ZvPu&BHef7)9S>YdX3r|l%#W5nv8(cIP2WpqNKL&flOmSpq%VD|it}=I zt&1~jov(}ELT&TOLq3!JEfTdPdVYlBzY|#M|6PU3QIqH*Mld{Pc}_d0J?T z1WER^jjx-Me)40fCV4s@ym_JvHnb+7)ZNL`B*Zyol+H4^?Ayk?*(lmXQU$xm$9U{ zY%aag=QB(B5onSDb%sQp(D8JJ*2Uim$a9hsxJ2c&Ia*%pChhBQf21*6S?kWyjoMu#Bn-Lkd<-K_MX zt}EEt19E5M%JuNITl}2+I|(A=l8@=0x#6Z@qha$~ zZWtkK(|Z^vha9B2m+zs!+#|e7KHrJZWIM`tO;xAr+RDu5y{Z@2bA_hlYusHW%)_LV zg~iz+XaUMV=G#HT_vbc(-DN*dt&SR$!>+YlrSe~>4WooI#bk?R>-c zB}dAYkA08AIL$1Nc#S7%dpO1d+)~J)EeGFTZq>bI|2RPdYg!zOi(*+rJ>V}}xpX|B*S%6UNR5|#a|$nI$ZHY2xU ze8ceD5Pe(1JC!TG=jp`3DmEu;!Y?u3tjA?_b@?vs*!r#l zQdExz-mtZtWawE@57R-{2hNtHIM;x)*Z+g$-2V50pa{u2-?F;1v%c2Z?%)Q$h_^#+ zlK>s{hxqLoKkcV+&(vK5fBKi!WgI3XP9GGQR^2`QR`8#jO$bS0G#~&OtjWm!@i~69 zmV%KoFZg^lH|o}a?h}i<2U~ilQrio95S;4xWMO#j>wCMbm?)JlsdgI2k00q1)6_=` zYChg!qnl?+SlSZdN^0e3V;2!AihKB>*pfl0HWnReMxXnmfP&GpipZ}bNB=y1Z-x}b&^&8j`j#VrnvR@S2894c9Q5k`}Im}k#2ijhO&bpzTH$`;D}ta>QtANH7Z(juZtjnX3+ zgFXk_(IY9#EHmZ=78AFtq_SvTYe;Yk)qeF_ACgTQ2ov5TfaP%4oyw%B-7b;40Cmwx{A^x$bdf9=P&g^@dC6T%MI4 zgfqVd^2L^>E9Y6ujSN%9Tguy+=B*-1_~FaeoEAEF!RvLwJn_7)aR!}|>l1FzIe0XG zF16j9D4F->j!NZiowKW>C39zrr1*AaHu|gl)55i3UYB|f`=dojt9B$(VMw6+p8lAl z`Tp{T{&wLvgHMu&VVkcWI+yh7so3;F(6nYN)nb}q;dBzZhtnm3&_>pXc-!01Eq-&s zx;usX4Z!L^(WC^eMA~nC;(TW1%(Ob#(lqQnZ{Pf+ zelA7g!<^P6_O~B{5-QyA(@rz5aFUYmahF&dyzRxY8JlmjF;?jZ`dS&uo88j6a}KaZYpCh@2rSuOQOXiIVXugG}s-t)6X3k>s z1QQN=U{=T{>&wUH(dr(0sGqE5DZD#aVqD?ZL?`axKWZh)!cgsMf}YaxXn)#ot#d*9 zCpV9Uo)}lx)U}jkdxI+U$i!ANbI+WE`=<4Fr(M-tu+6;wPPgbrMoGM^@6i{O;*Xiv z#*NkXPi>^?{6FA^4d-aDsTVu7D10ydnbWibN=h|7oU+l#2=g zG)EV)gm-g(A>2D}DZ{Z`4?*x_$0mALq$7F>#f-~eZH3p^CmOFedTfpln3ml1Zx(=@ z!kE%hjx%M_l3FBFj|>#`J-<(82-d{2$=o_n-zoH3EqPgDBom(pOP z(!@;Nj>>mEIN|ekE-e+$XxW0ZG#d^Ad>Sam`bGzwjvxVC{{+ZHHf-?WA_~SbadY7L@ZM&p zqyy7qOcGJiNKRD~!zDvY7Becar9#gAE*VY_5ite7!0vT6uo9P}~?EGQm!Os}|CrVLr=}Vfq z^~M2iGyR!* z#JuJ_)QKqBym_%?zED!NfJ^msr^2Um_k64zl2xx{oB^GEe7)Ic0Na;I!P$Q^yF0m4 zhS6`A1!CYH9$G(2qIr`W>|Sfvn$d(}=-v+YKyW**P_oQ8!^HsG0OGJZQD1CO8a;nO zdHu=ehiqK;6_@;GVV%EiJRKe}l$+V~9*@oOrcaWyoT)w=Q3EK|i2}-N=#d zkPZ2WF{0{NYb5thoNUk5r4Elj`d&EcwS`M7#gm+-+4?;2tUn;(Vi6S+RcZ7cyil_o3%$E= zo}ZG|m-V?(45=7w4MKTjBNm-@pV2#^12ISSDBK3U{?^x}?1@W&)Up;z(aPH{;44IY zDM(q{Vjx|AZP~@mkh6PB1=BXb%_^N&TNwE1&6{?ZHma>PKVHsiM)BHBd~Gmqgm7xt z$zdXx4s9QyJS)v%r)Z~Mi@aiHao-I5u04&tjYf1Ue$&(sM=`a!oL~p6LnS#sbAH@>Hjdo2FBFm)(PPPIe*s>G;9SR zYaIwQZFrFxiL+kvTQAefCTb?k_Vyph0$SDT!4A%BONR2#JG+I3s&q`yiHtyopn9R# zrOAq56ODA&fPO=NT;M!)VAuddry zFL=K?7+7Jf9sj({D4Kk0W7wSyPAF*ML*hdJDPdzYJvZUP8 zA(8gq@t)Z{hQ`At%WxK6%CT&a**V?f|2dYa*5}blAXn#tXI)N>&hd&>nnu^D9IGIa zpE66*M#l^T9(jypT=FS^aXSR^m+;EcY{_*X`Ybx*&m-S4!XH`5Zkqld=H5H3>1_M| zb;cPyBOo@qz_Aya6alF!3XX_~ARVG05TrzUONb6blV+hvZ_=dq8lnP02)zeLLXi#$ z5LzH4cL&FE&V0}J{OaC7g44CBIfWJRy33^%(mV;Q_k}rjLbYmiPmt+C9B9;XdNVC{qR6tOA*Vb! zC)$mF;O&QsJYOB)6A;t42|3Fd&!>kaE^>-rr&IO5w($baJ^^@Luq70X)V-*zH1fpB ze(0adIv`|(8J0v?8Jnbkn(6KHadppL%oo8=*D}0{$q-8>EThD-}*IPmkq*Rb|I&^)+#&SEJy2# z>o5gdSku138)BiMKQufDNP|{StWttryp&%Y+j|Y0jNcHSOIXXlcU+uP2w12)oTVQ} zQSbzZF3ZlC5qN`Zz;-ZXDTxQZBnf?JNa1#KNtap>h!Kn6)^g z)Oak*1Qa&jE~f2yESYt4{%yEgX%@djp<_?KoiB(DT;p90?X%@q2FZ&Z9Luy=>7l6^-|I%x{i$m3PflH}=F=!ioD+9N*0b7HN(gc2?~X71gtW zkiZ*NPp8j*?CMb+-``rQ=;f8p*Pa)0;bDA7P*{=Oh|OBAa%c3N0%z#Er9LYk9GIbq z`O$=c+R8Kom=aMz0>)dqbjLWu{nLqj10PGOG`7Tq&&R>r+Vk03;&z9kn5jG^t2(2( z?vjYFS&J!SMWiFkF8^|81*P{cjTTdBOtdp)X@f+KEysL3NYtf;BDe^5933Y8MoUuA z%f9K>pNld6fj1F{Z*)v2BCBdUDP0z4dqg_sEIW;nuG6IMZg%IT&aQEEN=8PHVwaP7 z2$YRSpM^6fQ<9+YB7+tc7u$8Hl@zL_z7>2gn{8-mFK5!0?n2_5TFQ~p_a9E;M9=c| z$)-hmENo`e^1mD8eZZG*l7yaOF`gQDBGkS{n-cY%n%8?7TqTk8av9smnPlN$=_58e zo4-t3=aA%;E;~+{cCN9Lf8kcv`3~YUXx+<8ai0{w)=$AEIjcJq%|a8?BR>e;0b!A# z+0+|lVqtGcLr)$huXUxV#>fy{AIqs!I}OZwyGLrE@?ICf)TnX^X04m81%yPhf?(?< zz9whQkvE59`I5j^_;uV6IpREs^0iZ5UyYs+C=L|nA_oACEXUyV<{sWDqOLlLD?6<^ z)`3>MW*<$u!xm~CtwIT{(k}-~>!)D%fLTmQdvv?Cll$bpToBot635oal%^*cIeJoH zo2z$%_d8WfV0toIY)+kdmr-Lj*u}I(_i`FKmMPpr=8fw-2P;A3dBzqa2Qqvl)(gH? zsM5jvDgD%z99VgoEM5OfM2rj!uRW+7l^SMl`0!%|m;nwJ+%6vn?);!Sgtyf+H#w1LsUe#;M*2UF%X9{wCs$Hx(eeY zi<#W2P}dL%<;lIr(l&>X_;eA{C9F%jhmacfz^Bj+UeT*eZmj%-O@3)`U4d~lm7hC} zPAE%JLNjjY?%b=om^0p}_>o8AO;S1^e?-$gtGwcK1XLATR%Lw!n5B~Id&{2qS;WC} zipRRFozmYgoNxMsIrniY$ti-`Qo=di##;3eqBk1cBb?+0HX!;B!rR8H8`n(AaunDR zb_OhXC-_AqEFO7#(xiWPdh`}hI!@dCxk5aAvkP;xb0n0sjNqh<__Ld9 z*SRmYt&D(l#G)3V6WFor-?lJuu*m>_5H@_7<&-Fo5Q`lN2p81i2s(Nzsi@v%AQ821 zNTJ4ii{yQC^?F)K*0&xJ35tZUERqZ{l2G>!Zm{?_hxY`(GKrL&VS0h#EoX0eQ{9YK z-*nc8Fs*m~ z#F1i$SoIh)t3_8D4r+PEtX&9t8Rb~TN0L%8kwPE#G8(FR;US(LeD|C$2CEGX&Q}_& z$R)pMG%W%vcw2e6^UQgBAu07Zdm&fMHQ9S2a>VCRKaFa>6DP`I&+tgmQd@!JS1UD;=&=%$J2ZJMC^zu>PW8oyvXfM9 zJx(KhHmC&}AZ)&oh4*GlO}D6D_8E66NxPmpZWH72S7`P@HZ#WtgCI*~tgWn~)}_q@ ziiQWNs$hSxmeI4)QdH|vEh~H=L%Bb8mK&k*F>ikVwai7bX-HkmJv=kzEzT-azG+(1n2|8|DEA7g zy!Td!YE$UsPce@8m6OLy4!!AG9Dec^T{{A=$11>j#89`{vB?4AVQR;VFD^;GdoB@L zT$seAj#hLQ4KvQagVAXhzXF+*Oq}D>3eLi)_T!27LsgHFd4f08s!v1m+qfQz$Eztu z4@;dPD}FdP%5_J%{USrg} znQO(T>V}PM(q(B&yaSxucLW&`U*%0>X_UN#Sj9+GSgh4pPw#lA-&y0$hDKQ0VFB1X zt7r+8c;wpghG=M@?}hD8Fw~lF>zg+bei`|SM4%f` zPV zIkWUB9cmvI7ACrA({R9|qKFc?-Dq}OEd&=Z?R_@2Ax7vQLrdPdS)B=sF@bY^*RvbS zPX*mFjG(r!-EKI4N#xH(C53Kt`3H3khUZESgqQ{W42k#j`Hk!!na1OGTc3ykNHLMw(zNHmeIkYSoh!s3N>#5Ry;>600=63w{y_dqb zlDyXiLSm3btCVP!HIr(kYD=+*Pc65kfA#j-wwQr2M)OpHWk=3HSlS$T1(Iq93A_TG zGu(QYZ`pAL1m3YemVVLA<;I;`dSCbLRt^phjnP*pyu*y=tHfuYblQnPPq~qDJ_NjO zjJ#@_=)~(%RWc(9-2s0w`3RARse6X(s;0C4TwiCeynZ75QHy0~lVVHqZbJMDJLn=P z-vK_2YVqHNq}^VLDxp)G{>ZO%VY_XwEF%X4NoH*p`y07TxNhlA5Ca^`E8|}mkW=*Z9BMqI&2k_7$2>WA&z?F1K|e|8 zqX$pU#ZB^JYs9M$^KrE51%x*oK%h?x=kJmPE1ic=uBN^JEWevw{VTt^$Ca6~?}2xo z4vI-m{fjvGp2W`mM@dOZIPeAz6FskoQ~FPk@k*?ur*7W}f#z4ABk@Wd_T-S#H7%w| z0)L9H^{B|vFmq)u%h$#9Re4trQJA36UzCR|d_opUaz@UfId^f4LMNqrKY%Zt>Sb(O z^bN+R&4$Tq%1h6Z9F|`Y>5If>7~FEot9m}uXKpWYu>fhiO=ONA&eK9N&NZM?9n-e^!M$Jb4Bhr9z)j zaahmdvg|}s5DW-?ee9(KBjq6Tx1Mr7c5t}EtNRztaGkf5MA)_`5^}*=zThp}1p89i zwz!eY+d^u9%U%r`IfXij`+G_Q)3V*E4(nec{F~a!1z!t*z zg@xQ7cot~h4h9BKH9R7X6@t&{unWlnpSD^YKE?|0dE;I<9wgA2DZ7~FG(WfmIh^WxO!=Icu z5~!QzbY;+UWQ6AY@eFM{ z(IjhH=pu3@kJvx14K^u^F(G=_J%Ixcr~vJvcMl+T7)PkIY4j4%-Gg~XPRD{t8nmoyot z4s;E>&*q?~=IL!4J2f3X>&|AEK!cA0Lf|{%WCTI42n&fGJ;vDHhg?U~q8Ub&%N59N z_m*%|6W&!O3>PwOXIdQ6aH^e1gD`}Vm9_&!DRSBg@=iHSJbwCfZ{!7#+XqAO^yOpk zSEymgb<+Y=k;IB|5^eEhQ@a)V4z7+ex=&>@WhE^YFL=NGBgW8YbRW_q=zzq)(!rt~ zBu$IF@sZi$;d_3ZVq06=j8R)kAI4zs4v119>C;HF^p(m=$r%-zIs}sbD5h7!{XKHMgQ#CQ--M){wT5jkbug~E7*O1Iu*fxh+W~H^&#FRORSY1O-66Eom5E-C zt4lBJfNVB~D}EZ=6J`APv0B~weBF6Hrw)g3YFlP|sdYt@3^f40u|Wfn4iANjM$W&U zT?hOl$6Hgxf`t(Bg(|HE$P@Mo?KHp${BtyLs$4)fypxA@agr(5t|$Fto5yY~1t9D0 zdV)bryW%#$K@SiT8(9m|qFIeG-;4esQ+cfqLg6sSua|?<(Y86%x23jXXSKB}MHZHr zIAci-{eudvnzooTVD~BM_gq`#=fUj3>GZDV3iLRc_9>ihP;Y&xZJ@)4RA=Kx+#(=j ziT@Pb{%62m1BdKvoa{wDKRV46kexzUQne%{(Pe_kl(O?D#gJ+F$OQZ)Y00V?RT?g7 z$Ixx!Lw#n;>sLCQYs}GarVQTZ!3~`s`zX6$a~NW1z_KIGSP=)0wyMR*?ZndjVYI1_ z_1qP=;ZhQLC5qvc2_Z%j4y8g9V(5(J0}hn~KAJh9ExK;_2DX(zToY&IhtnxZ9WW4W zSl2BH$~!R~BBX<4 zBqQ8vxC!35vc6mpX`rV}LToERCcm`5*uPl6UI6{EViTI)x6@cm@d)Q>0d}Y}$JxxmzRmp*8uY z=&BbVSvZ{Jo=!%^#!gBkrIgrSG=8mwUkN3y=Hb>=Hk{F&PXcUw8OtdgDx=3iq8?zG zvMoVDDUPtxJ1C}x=E zfw(eJ{-~>}BA=AW>qVMA9@Z&~MyG9XTQ`m!cukvcH1gj zX*&7@x(Rz8CeTfOWvMY`4;EC+hd7^>rJzVf&n zAu>RC62LUE%`%&GBtwdI37H11Ou98~YR+3TQ0_7*(TF3>b4wej3^Mqvs&M z^-^f3$WP{K2@^^_zWC^nWv52kYw9q>(|%1|c|~JZJBdM?o(6FiM$hmrT|ByvuA1ur z@#qBrYq5QMqAivPM(Z|$V+FD`rS#=tr0f}KDq9om?wjs)D`aba_^Q52d8I0R+r3tN zn0``-YAMcqn5BRG%PdQ{c3!HxM|SX{UmKkX0uMEx*bUcr5Cv6bDb{NrIOyRZQi8+b zn8uI$wE}~n&)9i$oNuo(Xl|8P$WiaQaWa?SW;D{%_WiUh5?e6^8$nH9dAKwUESFG2 z@fr+0O>S`M;9=c@)sY$AFY131sqmAzWMO1oj<6nhFt+c(CSZ=w*2oGTR@vfm4-BVu z7d#q9HWKM`E9(-0TTI@4Lb{vITxIl9H?@R+WM0?q3BJb-sEpW6Se(5xT>&3U*Vjpa zf)?$u7W)iCyX`2=9#5jLX0ql~PoMR5z?BOlz3r%e;_%~28MHMMc8}1A5`EQ@EFtT%xYc%F@UUf@+_cM?tQi^%5@YQD-gMFqK}3J2>Q%q{Q}dNZ0hQMS9f$^W>z@dY_O zN@;%+8{2lY|6X`=tf7q)_+`d0R1SERJE(&5$AlxVGRTWYbG2pd&fG&PYuPFy@eDG9 zP8O1f!wLI*n5%%w<1VmTDF@p=_J?duhKHF{o6JDzzR1mRm7UoRk-p_O(|G*gY+kKX5X1MJK_n*&`v5kD&38lFM0Nq&SvjQE*0IBtkosj?rESrt*bl2(& z*mfrIC0bZdU(3DGyuq>IT89&$^NS?68KRt86wA&fro|#|!|00`&0(xCy2+%&pi5bA z&SvvyDr(o&_e;opmdxBK;Fg{NqjN6v@Tg9su2^C>iksxqtpS`i4N7tjgvG=AyA<5`1GWj=BKW=y^1K`J5xY zL5tS=hn@|iUWJwh+H zZ=K}KU30=(+$Hfc@z65y)9w#FNdM7#ec74n6%KuYlONAx?$!7Vl92RcMlf#fzIyV7uitREz5B`ka*L_mOeORF-`>X6B8GX;+V|VesqNnO0Z0b#?%@93 z^#4=$c)e{&*3WMZF{$ny_#es(n0I3T)_|)sAj$T_4o?Bd^3BK-L%|Bw^cMcvz$h(jfobNvi@2+IX<{8L0t>+VYL|VJDZBxm&ew@T$>_xwz>P4vi=P z*Nm@+Jl!d!s9@B_$A|sJ1QjOrb~mz83oO`!Y`);c%R>qSJKD*R4lf0jIe> zj9%{dE8Qs>mhZ7LRgI4RJfT?iVw!(nTmGvc^d7~)tY>xGqjmm4x3cE<7M}Y0Xw@+Z z38vub@=1wMdogUHMfxTwF<|K^s3K#xJ+NxYU(mh=Xb~ay@JGDT^3gb-R6lBGJL7tj zfjon!&ok9o#6Qmr?U-YYD`fMck=Dru?#r0abgG=bxG!d(iqeJkQQtl9$8%NOGe8VQ z!>;fYXI3!xaD^Ryd28O*46)XRui04JAjYEb!vkoGX%aXlZiCKV=}vxDIXT`~?xAa~ zzmae14S~=0dFVb&+Pl}c#*J)B>EEifo$nWD3Qr2*(+PGCRPK4UH(&rZSUMLeD`Pj} zG}HOAtBVEM`Y8!iI}9nvPn;{LE8pr@Js^~jSU0Y;_Z!;zW2wU=jGpZ_>G# z5ns#BT7#eKMmAfeMST4=UQ5~U=nTL{4Bmy80DHrs6C9JQAtxj)uO?nwIah#~RbajB zwQ#*ussxfLXx$rEY@GTBZ)j`jvI2p6J^${So30dA0X|TZBo9iW>zxDNK|QxWfkR=^ zPDT?wMRMmdk&>S-CT0U!jMH&XE*fN=Iz>%Aj}(GYR>D@tkMpq zf}-@^*GKNhCL5Yh+=x!nD=>=|*QujpVxewUXwB1IAU|Gy`%-7LnU5&OGXtTfCB3)= zunbS;*YHPj=<#tAtCvF!@+iB*F9 zwM7LR0C&(L66o)%URD+r;#&LWXCFC?&=;!+L2@|@2SEV^L`>;aP)x9b7Q5v%Wn{hG zV3h7Mu(>zvL41|l#O;lS^&_faGPmDRIM9{qGoS?b?CE+I?e2=CPK9y1yEHn_@>QEK z3m_<{BaS{rn#aat@)E#EZoHg{b($pgARUaYk9jewUfK=>Yn^RBJKXnvChye;T%z%C zQ+^_mP&W4I3Ml*Wi@G|IyxiR&%sGvI?7mUwd=hZh12DxIg^OZF@sAglnl%@TQoTen z1qaFj3v5G0?+KjOQ5VY(t{Lv?+%$H$z36LmZvH7jtKV-W80&jW{Y zP??s*5?==;We^7B>nJKTOyP?Y(~f`S7Bp@U=O83}IH?lsF9t;jHyv*+bA{%VPJHg% zoZMEPP^yof7+WsC9sV3*y?uiqZp^9V&5KwsQS z$D}nkLedHQ_63l3uC6Y1eXU+_b8+Ev^H}*jtJ{C9%4L?2)O07|_wbhJOB2a90Yx() zh=|mknV;96Cb zA4<;>`cl9!HujQdZ><|1b1tDtbv%$+XarUmrnTQKzgZLYRU$nE0p6JJGJ^saN z{5LE2gjvCG7HjHE4#z6zyLi!-Wc!`hyHyPJW;Qp}ht#m!@B#mFj|ypFUpKbf>6v;r zb?qq8D4ek^w&;VUSHN{-DemgTT-iC_avpB4&*r`wN6CMtEDO(8%%1*)ysdV|%DiO7 zCVr-NqgkGy+`sUr_^KvhCxg1^AuBU76{NYS&8u&|=Rsdk+Fy|6p`AkA+z?62qK{?d zE1*yT!niVaTwA||!Nl?}qtjw$+lWGZBuA~bpr!XuXCiaD`jjO9JK5*Er*25$; zqv_gK(4ooR-lHAXJN}vj`4IhYDUn!EA8o@c%@@$6V>gHe_(teE!}^v7QFhlY57!@v zvyYiHXFRH>ggkuFHstO1GF3vZq!ARCd`xceGb{23ks6x`EGU|?6|)NR%%WyXe<@Y) z_;fZ_SHnWh1^~n^!f^tq!a&awPzxbpT$>wgW=u1noH4IkjAagqJ7sm7+Q#t}Y_a8r zEAF3OMPKEQveXeOb>%I^ZJ%QN4%24Q?3=T)#EEG0q8%sf_Xs-wP(Er}-@qpomMG?ptf~xSv78v00Qpl(}7M!lHzvn|p5C zEpPE0ngb1LGu^H7t6~*z)rJog$fDS_U8uIc?>BN4}5Rd6)S7&$gA^!>H4+FB7MOsP0Tp@YESo| zM5yE$Zb6WCYcBU$7zD^hY=u3=3_>$BP#$A@Ou4cdKD^hFN56cp6qPZRO*Zr)s$&XsP7-gx}e zzO8PIV`D0TFF3DB1(b1;iyO@6C$VvsjK%ap_XNH$4PB3|BAWXMCpK*wn?c3p`yGC) zIB)PyIqFw&#Z%}(_j+zwvxriJi}b|#Chc(yO?LqLi`c3p#$y2fLAqZkTsva<4~#;} zT}4gH&NmiD9(fb{ukQedtv4j3iBL(|nHgU0zTVwTR+fs#J1vr~^iFyNJPPnwCJih5 zyu?d7QilL=c0^N5+s>r&I!ImP&PpWeDxM-ZQN7%6KL`2#4_S^5c-4)mLN&aM!dU6m zI{RbJ$h0Lsg%OWO$q%=A_gHpX0=)my_;zgWzMZ)om<(VDuUVV1F|Z_i+xft?WNb6v zgY26_gsvESv~@?F|FHV+rB8m&W|NneHc@_dE}yd{ZE5pny?c_+lwVhuHottWD`gXTYubcPYYK)!Zfly}VeeX{oYD&w2PI<4P$Ms8I_!o^XOF3V zLBF|J-PbJ**o_oefO_G1FHe2<>eE}!ev2Rure;cZdZ=ifZ^VaaSinFxzZMUy1#Gd_ zA7!294SybHR4qLBG@CyUvbASjlgS+haY&m4(m~5QT1Yc(Xe6QP48$kuaow`# z=m;VdlRu(5zFR&JMD=i^7Tb*FdXegiIdA+>JRrX6e{wHwE&SRe`ti(~rk-_hKJsFs zSE*ls2vOm)o{*)MR*D<0dIPtF@t{cCT@K(WR)FzJ$JR(yh5rsO?Rnj@5vf|6+vZ=Z zHZ6ybBpL?_A5jR|xNS@_-S>WC_PEFqv;(5(v0L|ssdK7?0OlO^+6}I zU3Ww|Pbr}VVD82#sCR5F7BgTM;vd?fz!+Fr>RQO>p#kre&jP{9FCL@)PZeqo<8qt_vhP>={L#+g+yQ~;FAPkuF6Q#ZV`Z58fmVKzppOG zyQVh6?eSD&L(|HnXz!ZC=UbtZJ`bxUsdW}U@nG4Zjz#ylV{0b4WyMS{3<=uvi>j2B zwr)jP__9`qK(d7( z?0c%&jOeEX<&b1&qarr1{An{@X_o9cu|1ug`C;V%pBwZz6yRaI=${+JX`1fOZ-=Y4 zxN6xs6L4R|U5zrkSV87zACxyWRccA@{D^a&E(yl~KbF7$n8gWL`PGPju!5=zoUwyj zT|$mK2ipi0c^VWlw>t1vxn$SwjE>%QBRA_!H&?_=I z78pVEek(g(jf%`aEb6@VyQ*t^HA;>rbiE6aO3LosA1Zt#9R(bKB#Ti@#bz^vql6@5Tax{L%ZMqbXRpYwftFTzCdm~T&T8<9)qOx!Jn4z;?CHVY z&DAr;5m(7auUO|dZA}%iZMEyD&a&%?HiP0EVkH|G;M+4Vyobz zbGBfEV~0W*-7w-k$~RQz4O{P;InO>SVt24d2{f|77-~#)+&9Ml$y96ALR9ZLf$Bb~ zW9LsPZ{kpnxw};U&6;MJSgwHFMl$C5<3i&<0 ztvW6SRB?jIdQLfCsh7BP+Tc2`%VolGjeCG}w+hy-^wa1CzfXSyB)ph)EDyJvQ&ud4 z=7l10PQ;^6n=0`4?d4|1F@F%Y+$ZsoXRxGemp`_`X=%CY#vLircc;6xKt zd5c!a>X9qoNLwhBi64~Kf#>yZTp?-PXVJt~HVw@HIL^x9{1SXBfep+gK8=9P(|BwL z>FPV5BRvE6h6`ToV)*@&x}e$ze>B>9BzeiXcMdrfj?1*Ixj%O>xk`f*UTc|aCf%GUYaisSA%!*Idl@b3=Qg)BCm!ED0D3yx4~`fY?DQl zAnCvH8J5ktej7{=eg`=$z{{(7SZQ1%f=!(y^zE3_tc}Dns5FlfI$ZW?rZr*VSnv?U zrFZ2dhtONLEwK2&4KMD`mI#zyKcY2vGwmAT+Kn`c#}P%tG`>k((Y>)tD(@vTQc3cc zj^%i(_eda5Gr7N;>vf z#C#8gnKnbFJ#-o=p<@xt*FGn~zBFajqnJ$>hwY9~>CHngy=|gu*kWe|VhHYTr^JjN zip&?O`D*;+p;-2Yqk1MLxp~NyU&qtd$!U&|v^QL!L%-$%*c3uGz|m3n%CD}xOGQDhfE-!yPL*pdC9sPe2?)XbMt%O2A#p-mgKbyzKLj*iO1t#)W8~J z0_Bxn>*nKHF(VEe&x|$B&H1JGDeTLPdJ8%L<{&3|m-YvP)H1*Upx59FpM%|53tm3! z682je$C1XLG|ewxteh0!{eBJL}xa<$yy74_A2lFcBo%Q-x$+B0wE>U(wDoaysNi&Q)lOs zTrJeY`YD%)tYrK)1yg2g=qt4{u2G=oC+&Yo&Cq@zA7!(B?rFwizJe^QjE#5fc@Qt6mxj{2c30hKcVhG|(-YHSST@YXfr zXCTdyP4XlU*HNNdRDaRw618G4hgOMJv&mmZm-d*%r8 zg9CtTgbYkYBS7k^EMTj|*lfr!x1xMatIe1N@6Ld?feoU_LtYoKRkRYSuN=y5t}eCg z{FokS<#yD0qJ=yEF&%$cMa+e7kB3~U>FNkYJeu1}*>Su#ETM+mh=sk2Zdc^93Pb=} z&c2umiaKI(y}%Tm&$d60osBKyJbE%?YH-lQ4F;F*Mtwoug<1jXo>&J^_s+32_qn)C zevoXBWQGO)&brU!k*C!PJ_FkQFE$ooB?mc!CAv~u%0Bw+cNXqgyZyNW`Jy>Fn>|J) z=^D-@9Dou#V`fl+b2u)ftE0Cpr@)CM*ai9dEa7 z=V1klX=+Urpxpno?95u4Do{09^w@6nlgC}wGsxNje76*kd>OVl^_YH~I&eZI^b+Al z_Gy#Nf9`qjq@O$l!Vxec=iZ+|?VJl{FqE5WGha<>)4o>bYuJ|EiE;`G?8CkS@|WNhvLgR z-=&@);lIrl75RNXC-`S`GKgsSpL8F#!QCQdU)N7(i|yM6@u$dw8T`AE1VOO@*^MIrf6^$_k3cX28R{d=#MgO1uFNr?p zP@u&@8^nUfM)mlQ^cz-wEYhsIk6V~o9;WGptw2sOBGM!jofk3J3Lcrfpf%Upsv5_e zQA>4rrGfR837+Q~kQ#bU<&Bw?gqq~jP>ytoCKp5Eaj^EW*WBUY&R3&1M7?9%a&tKR z*v)>Tv2A~w#7k+$g(eNIq@GRabjN3N#@lOq_dr|#d+xk8cS2AQl@6%enwqmw>0yNK z+F^BW`$wPet=4;OZ=F4J1~#&^|NYO`b)*1&UhS$5f05dUClHE2K)0A5tjIFN{PyUp zC$%RPLG)C*3am;m1rMK(P__t|%}0Pjhs%0lB(Ne_MZ3BJ? zo9y?_B;qs3V0hhS{>(Xs|eV^|HaueV9_Q;jFp>BO`Z=t0`o zU#kv@?@`QM=^2)&FDNKL!}X88`AV99cz$qg{^Otc2yQcz`%u?R$(ek-_k;yuNgJ#u z;zE;vH*X6&9Q53}&g^^rxyVr3MX-qs4ePgp4;PizgbH9B_*2lF!4=?KVsFL8abt@o1?7}cEn3*81eq$IS5mXFH z|4in;>`a{lGBf+nI;T1>03i;(Te0?T<=*CmhF2Rnho+4DeDSiw6P=D0o>#0)Drg7W z?0&I2ddvc`z7lEY;R)RPjOgr#ysHM=gPntsAKzooxnp7{{B}y^yj=E8R8G!4Qk)I2-zQ4t3zulgj zS!UHHKd@i#Ii3FOFS6X(lL?lCatSLJ7ZK5w#M3~t*sjY;{W6k%tyF_mJqR`#ZJzK?>?J3yh5ms&~zWn=Q7H^lXsh3IkyI^WgyCSDK z5hImvv`h2oFXV&el9^Y}6_6eShr~udH__f2K{Fn-(uOM-qdHnDNeeq_?QUAPWW8zdfwklu+e`&*XX9 zG~Qvi%^_n?%`;}BdQ$QnJA!Df{FbuZj@@*WGjuc|6e zq{D!K=!)$0cu+16Kjk*FEh^YqFYa4hnku;+=E}`lVv~gLr^*tlU!W!QD%|t-c-Yn4 z&EKndAmS=NeUC1SsUCW@h{*L-?C{W`Y|puM$N>8DORD6VIF9O55=SCVr?74Pg9i=2 zv_H8`FP#x*E56PKaf{ru$D{PTFBdau_Er250M5O6Z1M^U+o!|3HzbN=ELY{Wfea)m z@mKdNHQwXTmY=Yi6ziJddnmN^{F~FG5jjOprZqkj9!-Nl*-EEo1KVHL7+f>Iy0qHr zI8lTzGDt)%WD5J);s|viU#+_{DBNUAh1D z!oPCby6N9qZe-+3U;Z!G{P&wMd2l8h{=eKLv^klXsbKxqXa9azd}~|Vm!$glr~CUg z?d@O61HNAqYB|o#*8gX);%_L%->HiKPG0=&Cj9>myKuGGZIbuj(t>|SD9-FA4F5B^ z;p$A8Y6FnUb^zup(?1te_n|9Yll(qiL}}+FC8Gu=r=bsfy~=h&zIxxip_;8>`Rx0r*V4f1^15o%;X+6U@Tf@0iEGUoZzMS?>IoYyJ;z^5382|A0W5nwt8O<^P7Q z{Oy+=cGC*XA@=Q>IgI2ov!VX;VE)@Y`};`#@1y;1H~Ftay;|bmS(yJjoQo^vrY1|M z;PfA?$(o02RwkKeRNu|`YAHH_jTJb?*WwkQmS(Zs)->*R_?v(xE4cF9SI%dSN$rFK zU5U8gn2W*;AJlB&l$)h8(?EQ`U0Yh=ZCa$T6foJRZT_$_*T*SaOkaYx=ojGSx+J)Q zAe)DoB%0uvukknsW*yOgqv!@lyXu&(L=%hSe1{v%uEVvbTJUk z915#vC=ZQtm?!c+=M($XU340x}jluTIS?&2G)0*|V&F-A#tdGk3SXkT_2{luBYmNi` zc?w}dD!i96xTX&xGWPB}P(UoS=@W(l74ON;l#^*`?DRJ;RnnpAW$rE*Z8L=ZT3*00 zWqBn;qc@Aes@An{ovG);D}5mjtIuMZsKhN&%EsrVCZ@hpan4tn54=wi_8)sVH2+^g zM-+c)Nfs_Dztqj>E9TUO-Y>(?K%^OqN*EEX+*Vv~iA{zjsJr>j)-n4dQ?YcK`G%H@9!*n`O+g+DT^#fGXaQBYWvYxulcW;N{2 z0U@}!j82;3LC$2NQNVQBq#9_d$!bJz02Ro1h~1F@51`Z;yBFcuK3i;3wY{1&+|L_8 zKstOLS{v^PqBOf*y6Bbj=t=o>|9Vl*bcMUUoMflFjaP~&Mh;ply?m>YS}=r1kwfu7r*3q+NqRkndh#3a~&(L8(h%qrX6ipC3u0Mwbj*@RNlEt%1AT z)ocbRET|1^u#gI-$LrTRlWnQDcvOBmp;}+RJlx)%kMag`-||fLN*Kv`pimaq-(Lsz zN`uP9wgUv`X|WOLs`}2L^@6DFGY*dM2A)wM=%qHRD0o<1_gj+9=nd0<$peWsuK+S( zFp+d?r^^3E1<1!a&#XWY6u8%eC~;Hgl)_^88KV$CQJp(3A(s?7vdT~v_{Npvse<~0 zNI-ws{hj`Ziu2SC4dpYh4^8vp$?Nx5`UqXIS5h_@q{H%H!HGm*bSvBaYR2K?SE#0q zM_NhpJxYRo^($TaLNmg;2aQX4hPTdQ#ckb2{88P{!%9P=HmLdR_R9`7dMt}PZ*FPj zy~rAz68A2t=m!}(?A8Gr1aF*x@caT7dOfkOf2plxyY-2ecWr5jZ5Qm$9)rUt-jk@Lpr@7Ubj~FEs!hL#8EzenhSA25n zv9G0AMrP(;;**RW6|QcrkaL+C86L{>vUb&OJJsL(r_K;bs3tbKOD-hQyxz+_uF^OZ7QP$?He!`H6n8`L-4#gr~j3J=6y z;x1{ZY@+GLx_HK|QO*CDI->b5U4o_fF9AxyfShjg5P7TM=u3wO9vvd1-UQ<80;uJ- zK!NSDT;JBXdXO7I5;|lRLpa_hW>E^naWHavDee41^-i;wA@!mFdRm?OYX-=j_~Y*VV!06;zirDEAEQ2d_Wf0r^B?{)+&;ZpqrIZkX4p7JAK~UMi%^A9oQT zbE`CJ@Z@4iSz3MSt|`@o~f14(_o+_1>vIL#wWos+}tcs#@4Qr zFj2IGIJ#n9weI3AVvX(;<_fsUq2lBy-SS6;{-mH53Uv-b$ociyj0S%bXRw!T?qNYj z$j^{!B`#xe2ti~UCf-;#-}}TQ^OBD}CV{L@42wSV>W*@Tja9|lOStG+f$k+q%&pbg+vD6415$UCF*6Q;EhM{} zwU@Lv1t0W*YeP#@x%T7 zhvu(FzrOVT-pf){n$)mQMLJC{AXmj;djt^LegvHH8tM%&{A%3Dz{=4`Ot4-tK<1nXQzJ$JA5%;D@14MDx5}q zK9ihT!#tq<8?-6?MePwjum(6}+T-5)&mBOlETxlcrLTMDq?N2%z4-bI*WUf}PYYCf zlb$2kJER0TbG$QaQV+bD^QF$dk@!U-5zt}d5&0u%KJU2$bwF7W&^gb;f!w1I3ij}W z0Z#EkoP>?!yFYEHYhWQvLMeF{U-XrF_kw-C9Q;(F-dYT^BU?AF;+ z{7L)T@dMC>7Nt{nD%XPJqTYmCM~laCwZEGt2^(uii13HawG#LNKeK!dCMWsAlmVod zC>)pZN{Qt{)kyn|-ZW9gM<5@>E^a}O1TlP{ZK!>e_zh{jo*Mb~i~YgqjXv@)3|&`I z0Wuj^nP%+*2hd}8oe#c`{i7|e;*zj0I5-RF=%|2js~!I zRCuD0&t*gfQ=*X)*^p+RHkla}Wk__gdvY~;8EAsPwKh^l^=*6eiya{@{EC8461*3# zPa8B!&PbwETcRZhXQ{kdlL1Vvz$cYiKLeU;bNJ)BL`)fA*S6x0>=7LK{~essm};ED zHdOcr7i&W_P*@D^w%&fPd-QjU)Q1t=l%3ucPq%3QyBS9USss;rJ7)=RxRCFUfJ5+}z^S54gGk3D~x-qxN1)>ot_;6?1ua_O-sd5qnwU$>Wk=8QVh z7M2M4N1baTP{!Ng!<=fj!nd`4YPb#9%A=151m|F1loOAUU#wWRhEqk*-m1DXJuJ&3 z6H!^Tj1T$j;jjER0UgGQaQ^xItFoGR?E6ed3_71|l3KdJIh+nl;;S(+?zNO4kni0P zlG8OfRb2+nvm6Gr@Z0gr!oJs3ZJtl}_D=fqUr&)59v!J-^^=@=nbyRw{8j;RjYFu- zA&5giQo9b8Z)QAsR&DFI?^rxj4x0$#t}W)^pa}r(S&mrPulD`|PIEZwp-j2Mk_U1s z>GeJDaFf;*Gw&d8-qfwnB_{fNhn;<+f~tEt>wkZsQUYH$V%ceR`jd+!Sia%}c5ZKY zfZ6yuj;rv2xybD#L7o68{(c(}O{=_2_RjF)bpeh?p7=y-A4~8s##uy>$TJti1&5gw z5{TfKf@I)+i{!0Mx0gEnfnf9K~1F?q>L1DmB$Bk;Z(sWqk;BP?&!KZc^h)acrFaWuX12=@*n$ zH=(t2B^X2C%0_mb^{3H<$C?ED3EQw<73sDz3DJz?$G9`hrA%d3GCi30PM|2t`eFT# z*FsUlML>_SP@!zgyDu4(!j6Z8p#sa}wMp?EnlYpX0OaY9Rs6K3{9%b#wQ|)DryHJJEX{TSxeaTNKN#9?(ch{-+=A(SPk zGD{tDdas*(1c_96$du>M{(At6G-|{v`$%05sLf)dv#?$3ns#m3a{KS%%q=*w0p4$J zxcik>xbUyG=MiwVPjQb7xwX*)x;qF@j+(9=d^J!Htug?_bUI#2Y8&T1pO?N)eJ{%& zcxd{2joEidh#vGz&@hrC$!ofoWw@1l(d&t|7iazq$qGqu4zeW;^*JUZu^jBe(ayo? zLhU%H=KunKbc7^i)O?hh2`7JmorC=Peq%yVOWzu{eQKIBd`ccM-H)YK+r z**df+Y5u$aoD~ofBb6*7+U0sHmr9qh0MJj5UvPhV+8p*W$1*- ztknYrcTN}Ro)P3Tpyd`6zA24%`n0huWY+2ntS(!u+dmPGb1800tnL*Y+FN*P7TlE& zddO^4QD=E={Cj|kI_6v}62F6_zPV)P{s>nUAzGW-@Dgt!L7dK$EF3}!_|Xw*sS8aF ziV_Z-F73IqZPs9NYgbOQz$x*28Pz^FR3mMY5tRIi*-(rp~7lUjGTu|q2Lny>fI4t zhBQ60y$Wsg8q>k!%naNQK3r8Ra#~igN6hKpMQEtmdMQE_*(yU+4vM<4O@z#v^{PQ7 zB-_5lz3OhqTw|8?{)w^$XTD8&w`OP-VIjnR!6j zU8#*Fwdp-R2Wqm9n$6$p#C6e<6s+Vz52Xi?2j|MSu5;|A z&IW`ZTb>K_ApsiH=m>gvW*!kY5c$SGl!MA)i67DM1)TJyo065rzw1Kp_`rt}8))NZ z2oQRlJj3mTwCvajVkg?>8S#a8ZFxbTKhA^!$}qU&s;BHTJYEK}izWTx>au{4y2~|uV zzhC!bf%MvUnzvdmi0QCTH1fn@x7y;}+^EIR>uQiuZnJ%H=wO5EKDrLZAB&6Mqq&XN z>i%YBlc*Pi#Ex8JIh&bBBFSn)sBrB9Mcy7#JzOq!Ke=yN6%)cwO*h+HtF+6%)^g|| zIm@B_C6ae^miJfrn(H4Nmpt@>kzgTH3ppd04#hy2a%O6Tf6@TF*YZgNFg;i#R&>CP z8LM{H{e`I2TPeGS1nB)#6_?^8a`m~gGJ?x}Mxxp5>DG05WZ*OG?&;Mjm9;{Ew)ijU2aBAd)VpW z1?6kBD<-e(-)urW70Arf;&f>oDHs!Rn0}nmTNxSRG*EZfLM8B6iJ0+c0gk5_`e6~U zH8m~44zdxhSil?D6=CusvzTNYXYk!qrVVC7h?tRz>wd|75yn?#(7&K_{l?_IeZq%R zr#V#7k-d9LqR9R~V)#U&MLgrW_oN~3TjH6x&@bkO$42WO0+#fElwhg)3&(2O#S&E}5{(o3_ z-7hD4z*(rSUkqwBAT1&!(l2HaBq{r5W`aG=5VT^%6%dA8!gIUnN^Og5cs9F5eL1u3 zc~*9WMom>5smWw0`&k+J4L#|?*4&`Txub0gJ76DT=!*CEKhW z7rL=8@+mig&Zy&I1e$%ej(_HOVh?n^JGb>bJNDTnnS?vJq|w*fzHr>W{tQSc=iXnh z?OmAPVAj))pWhJcNtR)+BlLo{?t8?tWrwnkR zM(rFTgM3Ucy2dTD=QrFFrfv(b{{7|7$ba&j>;?|&Vr3N61s)GCb^QABQ>?zpCQb>nB(}41M{^|Sr@N6TGMP$WqBs#J}Beo$l zgfp&xiX(uFxu)*0VT<2C4wFANHIOrWiW0?{=^e#%arRv={jSbS2)O^rKrk#eBfK)P zPiRXlCzSjwJP6|>K~MiAtjnnH;7eC(cNqJP$A>d4fljLS8M4ZMP-Q>pMI}tlp+R-^ z^20Um_rE|@e6m@$D$iYn+4GqPs=taI(F5FW$ZybLP9JlMSM@dS6sS(LgdN$mM7s8H z;rFvJwR_X&I92k6xF1{2#pL|Fq+V!m6*{-EEA>!Om)Ic72Ra{7Z|x)o@38=E)%mxJIZmTuPM9(>8##~|%YhfJg|K_)8VB^QgRdP@}l z|-!yvVYx%hgGnEZAnm|=ODctr_#`GD_1O?=0ftUeR+9Y!Je*_!_+ zC`rXiqzGA;p9c!P-_UPZj@MX$#kiN7E6u1Z{;j6L^~8`f)7MJv6K>*BOh5NF!V2BK&=wZPCo2FqtD9|gU<_cmw3gu|>=%FV@25dtB%gl+ zOCm;Zv-xj|d@Pp|`Hz%BR({FqGx-ki41@7ZH~Nw^$Vz|>Bft5@m=*FNEASr6;VLTz z*nbCTWcin@ju`o`qiFp9a}+?MxVX3sRn*&V(knC}0s+9&)3YlG^zuX9#!P1cP}h0- zxk*<<05{X-&BJXO4ghJOH!CStbfdJfV{<@IWletCUr3nRc#ZYjIkwSkjcJt@feOaP z#)XKKe~xW$0y4}51wAgwj>wkq;xo*t?)eA2N|z9C5d4PXwby(1cQ@`r;3o%gZS&{w zui5#IdK-VZBMX_8OvO#g9j_$q?1uZ>=&1(i*aU)HyJXAW^cqE%JTjAa?4(I)`(l6ghqoy<1XMa&X53ro@p= zd)g~T0&1jm3VCKE-c zFZq;=o>21-Hi>gXK59H`e9&0D)%Tr-zLK18n6A1Uu3+9Za65Zvi_p}pux|%?qD=h>ubp+H+Zr62!t4$n zIusUQ4qnvV*Q9GQ+a$Cx%-{JMm%U}O3@7%Hr+oyR?Gs9x<0Wv_No;ZTQHJ+ZPUodL zw2u_hiB;jopg}?$WF-?{6~!O$vV77PG0ZX!iZNMI)Dc+?@7bp5@>tJGoiy;@r6k?q zO(8yo1oLojO3E8-sAA#=>Cqk_77h=eiuQ+Wk)zLRLF z_BV~YCRFyTaoa)C*q04}MRAFD+YePrel!?o3EXmGFJuAz#qMI6Xm!M@$f}(;?mVp} zVUyz6pf60jMaBuSv0uf#&T1Zn`m>-yRv#siRCQlkXG|Ullw4P8ho+|OQ=fU~H@MMq zFc`f_ZVCPnpMGOs%ZtRsG}e3Z%~NBC+p}RaEosuMVXmmCknc3FPEQ{@I*h=ZpH%fx zAv<5VaL_)GwMolzvc6H<5=l=4;krkGyE7OlHnY(zYK(Jx=}<&{N+T=Rb;s6A9n;gV zdYZ;=TfgYdH?MZ-J7nb*I&Ee4!O0OKDq{A*7>|bhECrk+X(}3! zXbqJ7rUo8M4)&zx&H<1vs8=`Z(nTl)cqZ4y`E(IN<^_=uU;bpM%SvE(sd;?@Iime`XWAii%1KG?F?=og410G|bV` z0lA^0C1U!|W@x5f(qa6b7F)zYb1p|=!l?G#dUT?NPZj-6sc4-xl_PT8iO}4mV;y4N zyh#V)iA`3QzD*KsdD_1}N!02?lIx(ZQjz1Nt}UL1N`xL(9q8`H?zQbt&(j`6J@|Z4 zhp#}h?@YZoLL{4DNK0pQXAe+_=8KCfhMsO>Z+VV1t)KRyT!uK-d}>f6MRZm;U5?~PxXYhLKx`zbghOE}ZJ~}WsdZK@Ly=gne>3lu zR8)nQp*!y+Y^0~kTj+Oc{6)CXXe`~F^-*;Zfev?ctG#RnoLbyTQ-&20TSMARTpLH^ zXU|$Aa0w|Xy~DWjYYuJs!psZ7KX!JJqJ|nF#?;P-)OvabV}yBXQw8$Qd25`e>#+TuYI?|%ENYc%&Euz-kv}&Av` zztV0xm>%ZkiW0InYEoBYIjwpdni$!M;PxbH5~zEcF;%gJ9L6aZ3vc|`IYD|)Wix(p ziG&1b6Sup%QQya^1{F6eoZu9X}=UJay^S-=?KVd*wha;50BB z2`Jwadpg%$PE5WGQ=g?6J-+nTrVWa3P$wKvxBIF5LBAe03mQu}I8e$)uG>Y7rg0}! z)onmuEUb*egVp~ISFG~S&8xA_@%zkX#!=|1Y*}>GyxRsOKhBLJNIeEA|6BZ?1VyuV zR<5WzHsYb$RP93)8l@oD$Hq%u4D{6Db?7?_aciTJ62d57Ok-ls-^1b}eZu%txiwF) z>%9ny8SM&V@5ROv&b0{P?F2xxZtFerbC7#@r9k9pJV*Czu(v0W7l)><3`$Ahc7T+N zev8=26)y4%PxtEwSgxCVk!NR|Af)0%@N*}mdKv|Jxp>ZCdsWUPp0CL$-c~&{^%A#1 zn0mVCrIL!Nnnf!QbD7h-u*Sq>h-Ag2cR*a6VSk*KCQs(^J4&3IxveKKCf}NP8fjKPJ?-ys9x&T~_apN6 z39qu87cdAydZOi+~v{BmXxz`+Tdd z-SNs8*;wCmQ6ijLa_X@P+*x(HmU3q(l{ago8ZzBwX2{2R#akYS)>R4#W#F$2-Z&Q( z9MBC^-AnhGMbL6^-g5Iy4-)pX(CL4lELLMf2DojW$Z00DLt5=mDtuzJtTCq{a7sIH zFaI`6wRPxC<3rAfZUmG!V6Be-u#ToR^+5YIllBF_{)==)C{QtgVtOJn4%3fR|9oRV z(6(>OpSc!!WH0|n#oE(KGF?Ftb>%`AyAszMzCMr156$zN$$DqjHTj}EGHHS((7{|g z&9k6mr|DjKD&}JwyoW!&wFqK!g0lT99gT_@)uElt6mg&q>GMLLTtb_Oh`Ku-bhpg#}Iql(uoipm&k6l>KWK8wiu1ow58>?CwrK z9t@|TPlE7vO`4h%p>eOK$=j-T!N!zkoFd+HKcxd+)y$25D7uYQZXB@+#E-vDB^xhY0G&S~v7r!gPHVHnxcpAY{vYomPk6md5cB2^Z zi*9VT#W!*F_RpRH`~H=X;_^K9=0OmfVNsvZW+o6t*N`aM9^Z~fwLwa197w1LVwg7_97h6OJ39| zT@ArQ{BK65i{f;6;ervhl--0Lp{_jjqE^7eFLOnnk!x>jQ^3%*)+LtGWKaVd&L8c3 zVs)ILSa_}X85HmiT`ilQF0LK%aCg7hj$a2?9GfS<(?Mg36VI2L1&tW<`c()hyhK0TurN zH*585+Ui2ID@Y{EwQTR*lY;m}&k}!whQ+;@k^6@H>vue!<$08q&+osEvZn68j%bg)#5*ebbnAx{UvJ-|@*D%VwwHOkriLJN0YktCU{m z4UUYO!|36E{d9Mn_|lro&7DiV{)I2CHvZec_JFWRcKzDJS(wqA1OWgBx<-&^6^HmwZN z_pfcHe=hE!ulmwR!I!?Y+W2q(+T-+ZKb%#3BZ@Nno zu(;g%%bwQq#a0yPd~>TTAHO`VE#H{S$DRZ1C8Z)I0s~0bAl)S}NOuh- z()A7cywCf6|9{SP!OY(K?7jB7SDo2w#XiwdCn01Y1ONaek2F;E002TI002)7AB1W7 zX=l=(P)q8Fl->3?YgPvqGhQ zTl47llp?S}`nL5W`>GyWDjw#!n{Dq($-3BHEHI`S=8l|WPu0C0pHg0S6yW3Jv3fgw zP`R%lk)764FgSw@V? zqXzz+^6R9)`a3-ZAOK*%YAeYYHXmid53YvY60O7wakw3Xe^fuDcw~yi>=l5-l>h(` zpDoLXQ1j1-Q93c0`2fS;HS4~-*TK*g(By`unJh~MPP6>op1~~ef_Ie#!_)ANwAnq~ zo;k&ixSa?o%tHVWW~Sl7Nia6rfTYb0vsnPdb4<*ltj4`Db7&dYm02tyBf9wr?=LZ1 zNA<9#MmA=$*p`Vh94HJg&=(OVM6CMsXK0G*llIV*rC&lscW|{nN#V;!qo=P{?_=gW zQq$CVQ@0VPON1dt=}{BuCOulLg*4al_B*CKWi_$hW0PKSfzTbZ_|gX`0cz}`4F`(p zgE2wNYIYH1CEi4sujB~*Cd+NcW^6RgXG@lU1A2YSIGyVD<8n+IkS-qQ|o|G|$O5r~7nhglxEv3=Bqb-c=* zcl!+kGOCRR_2w4$ts`W>6&s}Ha9a*y^vwGf7$qbP0tirQ5}-j0u@a%4Vi5Q*B>-T9 zD$-?r@)k}240dIS)Vci_S7#ah>FE3+vRQ5w*ih>HSK1aNv5tevKL}1vZkOCV3W`tK zxjx;51yf=shhQJoYarv#s{V@JegZI1BW7njD+vvMjuY;w7Uqk6b{mus*aHGI`^(1a z{sVmmF1p$}Ve&6kbYCYuK|UpuM~at}h|2#Ac#U^E;ODjj=$JzAmI2q?5Qq9(E&u=$ zWT>mC*@?do8*eDeJ+HZDMzE{=)emD?$O+iTtSpBR6@9ytwaJ?_tAFAK#zlv;xorR4 zAe#1n9zgLzYDg~h{&IA|vMHPVza(7XVa6-`qp{Y(v`MGRk_;EE*L$nB3vF{uE<)68 z0J`ASiMe?Tx~CG`TJ8Mn8wp#yLGL_Nkb?t@?n`X9$ohg?1_PSNPzS(&TJ7eDw7h8= zv{*8{)m>i#l*vCdVeIdJZr661{8Rp)liSxY`E}~Q8~%mq2{QlJ@f`;p+@xbt7D|k= zjU&2+oaWilxoz7mYhYmkvs8#&%N9JeaMWKMhx5N!x`KUzks{dVS)>x*}BWxAT|89yLaoa|Y7*qSX zeFrI2W7RJ)z5s9+aw{(%EnDtyA8o*xE*3#PbgvwPo6Xl>uiDAHHY<^= zxyZ$kpN*bX)TOd!L>?G@6dpOHBvy-MxQ9!NxBQV58*AhTgt-5MP&!*SrLa3Kj@RU= zh?1}Od!SKkQ4zID(qU@tcte0v5r2?^f3;Qj_rYMd+=juLbb+$fn2nQE zS692tFvhve3fbqhXxxhbv38!`PG$hpYYGgnl#p}Jo94VR44EZIa6WLrLk|oLpeVDX znNwKu0wr4ZIXt}APA^xT+gjOcx38FVtITJJ#N_j}FCIB43<&FV&JEfFiV z?2D2aF)7J`8rTiLq{ad5_XEnKum9Up|2qCw(t zErH`6BpkEfJp<^4rPWQkbal-(H^wI+2pTG@t$ag~@v@Hk+V}Bh4fMyx5~zH*1b!I+ z4}LkfRi<+T0UJMHO!F)P7^`pt5plit-BZLz-9p!J`r)MUD9Fs@5k`_aZ-BF>Ek`mm zZ=7`Rr0 zQ37DW#gm^PO#;jqr6NN4r=%#@>x6t=M<-N?1HstPf1Y4>KH4H^&-(-q9pfJ@4Iv|$ zr9_yO)|{BPz6~lccx<|EbCVag{$^bs2yiql!$AuWqO5nvFXxW}{Ll2|{;}Xz;)b*5 z+u~Eqiwvg6N|sgK>lIr9`7!(X&IdWkR7m=+F;oAB03*X8mA-2&Ce-eX*X&{(454G> zp+beRE#z;)%ooL;!nr^|f;*0W$zZ5ID_6|jC#5Q@e0S9!x^TVMba_SR6zLJGBuy9z zQUQ}%2fvfl{;;##TzTO#DS32gHzqbcn?9Ak*DrB_?k61K!jafA1<7*f<%I@`T949( zM<1~WaXECN!O6+!_edHQklF2x&l*9lg3k))hhK$unL{c``upRel^c$k?ja({NxB?@ zFRGzay)T_-^Z5g~1tQ)eOT#moQjk8hs6}=-e*|{*?R`4~W}4giPN`6P60Q`w>1ed@ zP|N;jOy}pt4OCI(339Z`xuzB~s=*ptV$SsM;sns!`yUDa=JKHiU8FnYEcZa=!~>4e z5A`TfD)_fUuzmPwoV7p=olK+OpD+5Mk~noiL}(&ff!*1L(1D?C!xDi7TE@{_^@MNl z{Vz{*ijB&c`YJaw_k*&_ z72jjT{A2pB=W7FI_wVOLPFLU5+T1xpgx<6!arQalShYS`j@ZGnM@t8<^~WmeIrrzv)S z`nwZRa%GdFI)Qf3mCT(qmruq|pDLGGHMn5$tLW?L5V{H=*f+C(t}lNut>}vLm`Yp77 zdaqH^XXdnJZ?-ZhENhi&S=%afEca*XgpP{cS!=d{KQ?_0E zW^UF!1=^c_ltdEN`^P7|W^f~C6ZP%8>~3}Qa7X@FD3k8yNHJM@{$aNhsUb?@^RH@n zy0D&v`$~KGNt*xJ?>jPnSLbnjq~Ry*w_2wG(Zt*ZCCHG?RgMGd1!ewN4G0nIEhumF zAPLyEaHjB1HiYl)ubmPtgt#HUesd0U3Q~>Z`SS#A9;l5KsW>gaUsz&pW&cZZAM25& z%k~H(_2V7cg_qRi`;R<7F{c$gPj5}sz&X@hNEc6<`_?TTdM#-6Y(T<#>7GIKr&v{@ zMXY`*mYbvY-rkJPs~@G-g>7Fs^XPuf?(*C?}eGuizZ~tOS0O8KuNS7!^bS^%a z%Ze?r^?Gix&}p_`-(-q=ThV57boG%Ys=4hVjS~ybqZ*Z#D!3}-Gqn>o}{JH zIGa6tw(86wz}kv(&c-1ez6}2OI#u9+8Rox~c@59Au}b!Bb5B>F*vlGVuQTO-pWqXTId`aMMnQ}`fG!ZbH?+@ea((h(tIno&}iTB&I{^*GH z6+9=Sv-2aI471(mMI1a@`}}@cFgZ1v7hy6=GfwgG4hFODkaLRBHHsK@F>=(R?+`hT z&uOmJcSNuIA8E$T4mxh9`CBzgE+#KC?rb3NTxG{--in*o`R0B;XZM#NJ28v?5z=jp*p z^opnTU2h%d6#yZtnP>b3)JGM}_^mLdb5?Rb#!ylv`nUK`tMW-cYvNJ|8S)xObfu?_tMk9lPz!%j1#u;h&n zmY~gKc8%@VE3Jylx=)}u&caL>3!fts(_#7>k2TNhFdVTvD|{)kT9G4!DVy`^)R1BW z9?bv?a@YV%`7t0vQM?QQ(_R{=D|sox!5UHg1dvhTFf@mFk1yW$!JL5Q_3794JK%sXpG*rzO1&zb7QU?y#JFiM!w|IP> zZ8)(Ty>^`s;CukvX(g>ZVZY^BkpUs14~Rl19&OEXmj2oGq(yZ3+4a6Jx*YRXY;4sy zz0BIP^fsCgq2kaI6RONWjet!h(Z{m&Hwz!O<7nYt@eJ~~9?L)ombiKxV@Ke#XWmzH z&y0Ls-cGh$M`hHXY)A*4;`@NHoZ)I|N}u8wrFjrfY;BWfNU@6Xkz=Tb9IUnQWxTWt|M)TCARoXh{9DRxNM|M&Y}tPd`ZScZ?PT~hg` zHcI;jo{5WBjpb|i?YnjUEE?0`4%JUJ`|K*Yu>0bA96=SY&`q_G@*&#^t@yFttVZ^8 z&c!e1vM%CXuoQl=n`5JE%k>XffutmOm|@<5PD*24Sm>*4!wK|7fY6AuBYAP{L8rjT z*$NIje{FG9Rh7Nl%OT6)Qy(#A@jJ~Oer-~(ci0WKXA%zxKI5>&l9I(y7aH*3pl!Qe-C-s=@CaqacU}*_*#fpXrD*sjTy&**Zjf zI>U-o1_+L{tp@|_H$`fulB7#nPV6jx5DDDre*n`ms5MUuA}hAiF|FBso_<3isfCcm zdy;Z(Q^$y`Kc5n}R=qghsp%O;F7k(vZ0cAJ7$=Tq-+}z-$deXe|B=2OU^SK$y5GC=O(0+{2}NOU{U@y)x-VUs z)KI+l2#u2lwCN{<45bNE$=K-XwL*@LNYJJnSm*;PN;7sDQDKgE{wd(1Vz40vOdGC^ z>x3SgJ^?#DGf!zX&{qBxu6`XqN9_p?~jr%%EHtszS*PtrVfc-~Xd z4g#c>Ag(-jgW8l|ZL(wGjX$aE2jP%m3=s&*2_tE$#zv)zT$uv_;0i8Lp{4%9H6Wxi zx=;UZji&OVRq&Coxz2jE<8x|~59{pq4NQa*9m zm9a$~Z&(jTHC|J6-AxxGj9~du_ za>Pyd%#^tD=t#2m4zsD-{sJxP6G^Gx;lTVp{()P+md&8$yViUhlob0U*b3Kz&Um}= zID!u+P*DI6(pG@;4x3PXYwuSTSd!MIiNuqN7uRBXi20Fkx;z-RGcT{4ox+%7G&@LlWbK&`;7uis6I_1I3*|^2(C3ho2d&`JXo>02*PE|O5|TpQoPCY z3=SEz>ppLdmL0pEfxPl#e9iirGxwqh@Pwt(z zDBPy~B^8%YJlH_j)!9-3yq&Rl2|`$U)&03k4rMJ6@&#kYFdhKtGY80+l}R&{`E4{l z&MJG5CTy#J^gH(gSEL#Wl;D05>rVaQ9mUT~X-}#XJtsv9BAm}#Q6##=;S!GZy0FYBu61w z^`D=$aEHC50(&hWmB0D}P;OOY1-k02>eM1V*j%Kvt+tPM`&FJrIu;nH!h`MB#{96q z-*sK8Y=M5sdsAHJG45&24PRYi-b}l3OUr=ex?h1t3*Yi6T=zYQhSD)7X*F6HKVI&X zY!@fpuIdu_v-*m(3vFuwu~aK`OG{;$ zjj7N(PqP$56`X5Tg4*Yzn4F`W%hAplWNN0;kK?JnJsYmg?@z=OFWi5OrLSB{YSamjW{E zx_|Pu(2bNoJm$!pRfso?e<{B6BpWX62^b}1(#|x_xr1B-yrQVye$p*8#F#>cSY*7n zM6=aR40dMKn6LT_rc7I!e4k>P+{II8d&V3w!HjV$Fua@^I6Ct#QMjA;a* zsW^2a@>J1$WQ6HPZ87*2at8yFRzd!bdtEuu&``V+tt6N8!y&Q=VM>Xs&Dm@&ay*(8 z%7bT4uD@|bm3ZkzOg7_v8k6s^7bWz>>`jYV8^k>JyEK(l47T zSvy&;A5}KopL|a~86PhC#sv`AHzB$7;`iAPzk~E8E|0W~6*GGR+&lKVxG3hCF?vX# z@s$T9XVTK$!ncPHG-k9bb_d5Tr+?S)o1Af3b5qQMPdwXxkNP|Ps2pW}uB3WGm@5*$ z2C;C`{s4n6CT^VZMBFDzc>DNEH*Z0Zjs3wp2h2j1lqYw6Em2s|4R6QpaHh2_^}hie zChlA(L|99EFr^DnDLYm+p>iEu*DZ0$N(ZnZp4$@QqTIpff0Fldwd2U2tAx?re;2ZF zf1HO1QRgm=pz=>R#6l5=zc9Ju!GIUl7?^|?@KW_&&G$q}rX{LMkOh*YG!5$= z#Hn0SSQ7zCoJ=(w#q*Ce2c?IZ*N)spkSjej4xGI#U;2maDjtHR{K zY+M3lELebl$#|~y2f@;V*jk&k(nZ9DRwgqVhnUSs-|O_d^8?da3I&l>@Jp2 zio-wXA&2FePBOInR2Pt^9hYLa8buzDVC$1Uw^xc z0#VpZz52>>h2%a?Q?K*dRHRNnc*JgpbuS1J^^ogyJ{@t>wbupQDg+kN$Nb9UZRY#1 zxUmYS@)SbI(As(w{oA2cx3!OUtgN0kxc2tq)DL|e`-WeJxG;VWQU2;g)+%HVgwO`9 znj`i(-Nmy~xkL0%jc(ZiB%3xow6w9NWxSt%t z;|VX)GNTs=*NBYzfaVA;R4J}9E*djj)+_ z#YMf)b(QZst8e#6nLB%a_9w&T!!85$w*uJ~Tlj-V0Ey&k-d}0@n}+!dFJJ?+W(V5v zZ=|32av!a?S zQY0NaAd4T#aTP#_1vRWM*U$zbR{KlbUK;(1O>*sidvtoxf?&Sd!DIA?P_F&NLc3Kt zH3p3Xz#$2?qJ0A^MMPI`nIY^}!TPL3Zigu^S3;8eJ;H5zn7QyJK{pG*Jmg)k02yJc zk?PGa#^kgXN8nh|L6R~-D~@u=lmSTM6Nf8Q`8*duB7ZkAxcHp>==HGS3G)#(HZ1F& z+z~()^KRaNGN3pkuE$GJ7gXf`{!aV16PlRoEzdH`$y&FSWL%BCug8M8g39)HQ~#nFAtAcj<;cvuiGr<(@L z?pVzJsGBqwCDC`Bz34;Vy zg^QAwgv4fO-A(D%MG~uVNXj~)@)0k_UtH6B8oIh#84CY6T(-a*80NRqtt0f>3funf z85hB89Mf{X#qtgarTO^!E!W0I0|oEw8~^Aqhs`@cL=XKPC0hjmxarSm-?QwuHl;!; z53d%u2@vDirSpMq343Y@SAH!ZNXyl+A_f;Vb%LaGaS4gej`0iIWBF<;0P5Ag76F2) zKcs5giM#Y@X9XO37-#t@;QPJEb%}>0giomz_Omo`TYzX(x+t{iqif^qt-Z{O756l6 z#(z>Q0?PtGl8kDqYf|+!GH0daw955_lpfC87Ao2C9G)MA&-mIrq*RG5arcIiSd(OH zy3ar2WjWN7f*2L4_PsWj-3d4&+{VL`rc5N@VIfHTjP`mAW9kpWIV9Q>7_*ToW`Owe z8FS@md~y`ovj{iNu+Qvz`*Y@VtS-wpl_tlLrAPX<-1U!QaGVWZQ&77wF%e0)PW|xI zi6EO+{ve{bPx2-0_w?*6uXIKESf>T{idx<=#t2w|?Bqo2H(s#Sn}j52cMK0t-eok0 z7bO%wf5a}t%E)A7-fYzxG;75c+{Urq00(8}=e+Xqg-<0z5` zG9fGxV|+(j014lf=8CEcMDaq#$^6jfr=N#ebq}e6pLGz!oN;T^@x9-V53ch~b#c0F z6Zdbrr4=6%N%2@&?>3&jfRTKf|L){z3;OeI%9CxrzBn+?nH<87JB?-Ije9{YE_iTF zd72}9Xn02WHto1G1_({+p>L9aWzi$^%L}s-BBkcOCf*Op7*JR#ssN!-Ohf54!=4D* z!0)t>+{*Pq`S0s^FXGNW@O%gs;7+ZS>YCr`*Q_f)xp|7sl`(zq2n4p4Agz>+Qv<+~&?7dS%{X|_l$MIe2Ub&V1 z8Mtvr1#~Ex;j0tc`Q`2*oes4bp%(yb08mTToNT~%XOeC|$HS!tWKa?e3Ubse@L~yl zRpo-riQ|7~jBmzYd?28||4-Zr=#dhzSsECLNn&Wg`Emjk#({>|S(z`gDxU7Q#j>*m zIhMJtv?a*>I+dvdLV;JK@Y6i2TDMPJyjtC(+e;mWL8Tju-=9EYiKQR?x{Z0jsU@ctAK4pL1a7fJ}AJ&SF3{8YRO@68|t$UgGh0{Vna zmHCt}t7elGYo6xMSX7@X_-#Z!n$FWql>!`m3Z^YkBzlV8qi9Y9qU{}Y<}?c%Kc+B4 z3K*gn{~OK7>cX@)4LKikMet3k2;2!vreb+c`UMNs%JU1U$jK#>lwsPU^A2st?4p_S zUg((~j3hgOUgcmio{?=`R%HHLPI{Hv=e=F`0H`0oWY`L{{eF0^59%@!m>x2zmFg{y{4RYa4S=S5 zj|AEl=+Y)_UY^}NX72EJghIcGnPO``J8$-}tZCR6Z8 z#Is@h*yF2!uW^qGjgk=;!-&SP^Ftia3AxYs^3bE`WxO{+?zM zYjX-9q$}aUr2F4^DyawHV+(>#px?>CA9-<73*0$X|3W5Xuls7{DG(uiGO@cFTe<&| z#v<`+nue32^=$2l&l74E2$8__v6s7C9@b30sV-wYT z41fmrz6D2ds4Nk5|2a6dZ3k5MyuXKwAc1t@qVDlka}rf_7at-_9+MIxj!Z>kEHX7- zl=(1BO6UIKkybz|1Rg_UBfheP&d{SmqbLB@3u4)G)2V@=6VJQ5l)!!DDVSoB29mlwSgHaq|CgEW4l{d@xmOuUK;E_ z>s(e=a48YhBX=2&N7g#xo_`-YD#W%gmz@nlB}NzEqG}nwh!e_O;7~OXRy3cS9#Ac* zhH%I9OU?&?WqmUV+yd8D-<1aJ&PboU)O8rV%(zpJ9_kJ@k)uDzeYBFgPgYRg*9Gty zN%uU-W}#+HVaqsMHJWI;m{SX?EWi55*i&vBT0s>VjH>{U^VT?;n79LF;o8+12m+Gbk!kANuegV9=6%XHWA|mbBS*^vg{9 zz(+tvxl#&ea>oHZZmE^~bHzj@L4Z$q9EY!=w!HnZ9&w8|_5_BEKX(E-u#q%*x!yoO zeyu@G02=ICFFi;0uESqNPEa{guN#xA^;o{;@5Kro3EKtInaOKI(F>Pb5MrLGa;H$?#sB!$l z66T0OL-hM*xK{Wo=3MKGb-N|uIqik_Rt~e{O-JPGGXpI2X&V)6r`G%KDVkOb>axH~ zNZ~6lJuGuklOuINdqZK>dhhLo)7h(jtM3KbI-@#lZO^n|7A|QYX&Sh8hOWW!!Y~W3 zH9KA6tM{mV_dgru-zpLQC)gv>gQB#*zW{zaAT5K?ia! zGeY&)S3i3z*8lJdg6+5fwL@NN*T`5jc);X~tCCG;?OqWX?hxGS62IYePif}`gNmTu zoIJoMgg!8+j!Lq;A}0`Nv;KHP4Mm-$bo$eCj+p@f8OoN=xao3|>W>Dq<07;<0LIvzvU>=;5*-trVUFp!U^DH!cvx$LPQ$QCzhR2)uej z62!m)VYgNw6Wtbp7?$}z#Qvgb?rm~){%12ZrgRM#CAOQ$sLTxQvQRun$psTqm@z=M z(RTf0FVt96R_2M&11ayt;?g9&i1#-I@kqlOWU?*sYQ_Mv*ZcQ)#kQ$Ozrk~7pbadT z43;4)8^PCp*(=+V4GIgzd^$zCzbD9Y=r-gyznL}}GTt1l@jG!Y%CVA0Sue}H^`YEE zgjtR$Tsl?)(H^3lPgyP7*R6umCpPz4)Z4vU`;LI9Ps>85{|f;l68H4J3P_AF^KP^0 zis0j?4mDgz`$S=e?z;LLSi7=aChd-YXIzRJ02KqZPyJEvxvuc-0&@mn7zqG3c<4<( ztU3Y!WJeuGdnVY`;wb&9cwn=8TflSb&p*ZzOtH{?fk&|tuQIS8`YE!%sKD~}&{|;| zz_gu+xD+zV7>w+c&jN0;bQ)N)2`n`U?JnDroQR%1t7$+IC4nFtMRC;d|EQ&UB5{m!OeuX;1h^?9P+H@fp7lZdS<^yipqX#cWmhuFKB3L=H zvjM1)sP%Xzt4%6`x?}oxuEeqzk<>E=^R|KVAdl^n-7BZ7sG9b+fzsXU5u;}HglvMZ_z>N6k%ud0lJ}7l-bGhHpQl5xgNK(pf^~l?KI5Czel(g( zQ^^28=7Wb$9d)57#Ns~%fRZ3SO==S(Myg-tY>7{>t0!PAPrVaQ?A&}Uw`5PoxY-GB zEE%FFwAW%2w}rqCOaj3w;0?+M%~7y$-Q^LI*ek~!7ZXNKXs>UbVdAo2oJjs(I$1Kw zQPk#!3oIjI$aqD$BHCR){|296TAyPg?ia6&j+$!)ZJvEJ8B*g2HsuT?kWzTwSxwN&>!9DZ2$ZAJj(<;UgX1W@JUN`@)v8q>lVJxa1h+?{5sIbumQ}^?SDvth$*+XSO`vR01Ct!4SgHM z0YIglsR!Di4gdbC_5lm>ARj}7rHf0tbaX<5lMq1`2?$|Ap|c%PSGv4OLWDRQTMo+w zK~-NE-Jir9B7OMqYds!+7*H)xv`v7Z02A@}(?&dIur#Z2Ap;<6$#msHm;#L}kH-;} zRuG^IZ{1JV=Go+`p3WQJXqM&=ulB|}u_95My?|;mqN+~405+mX^Ce&!6EE0d{`zRa-mvmJ_kK`$Mv5@;9Yp*wAgH}{ zxjuP$eTH_KdFGK|%vq+3aLjRSWFY{2e~5qITr5YXByWx;Ir-1$EAo0wK)`KcD#qNs zRDum*mz$QiI#{~jH){N0Zq(FE#H7Nj4uJX;T@cSW%k1Z7ikcE^oM<|Za`4)jIjh=W z@eFiN5UA|!Nh(lXJ>p!G-}F3?V%*Opfdqn}?apj)RFdV5?{%YY(feU{x}g7+f?(ir z+sl`cF^M#F-4B5heTi?d5aO_(P6L?#{$CVQ1|LKbYrmC5W-H!2Ql_bLmIsDjYzRj< zpMX#S>M<_V_$*$tkQj?Sg@p6^t!b0ohw!RCXwLX^MYkNG6%1?j~E9^~D3lNHh z!ng;_-a0uM)k=kxP4zNJ^UDP)RWW_^)?C_@00FL1C>Mv zrm;roE9)epkXKSTKF4ii4#w`BM#np;1dYs84f4lCWEPpyhj@9jROXqVR4oHHY-a08 zdR(%NX7Ylp2++GkP$k77SC>Z`PVSm{VaVoaya6c-)x4CerHE!kVs9)ms`qPbtoP)PNd6*x^s84qL#}A| zz7i=FyYb62Z6+qBkvsWX{vS86B^I~NeqLS7E4a*HGsAJ$t00E38l38eC3)^&aF+fN zPI43!%QdIMu4nYSnmeFF5FyMq_=aHgg)sCDCi!`3iyGC3d;k%IzoSBRdR6bp37FTu zEk!ne59QY%QlO818c*GRN1lUdO!VT--u)>_BM{h8ekk3=L*VIMyIO`QKCI`p!CmUY zQR9U73^h@y_r)hXLvGe>yGF?Jco1hsxE5W@eOcNEN&{X?iV+4l5>J{YYT>o}7cCX% zg(W*waK+1gc9vrb)D)R+fZVUKRwY6ISA6sWP)B<}!Uq5b( z&TWPblNk;{;)!}M66QiEQ5g1cabCZXoLq1x+In5(JbCEIdhZU3zRUW2S>rbEn-bL_ zfPl9YJf{#!B*6CBQ@D!LFKt=OzsoBloAaHewHlF`jR${eq+COG&IS|1uGT$2x;jY^ z{#r(xg|shu*7i}*QcJMlffGKozD*(W z<A7ZI3Rjm(c+k<3tTy?;^o?YrJLw<#G zb;V+?KQ~gj;7H7yGzZIn@)ZT>;X8biv&BX^$o3X40gx=b+?GwG8#J9(pTO-m!-x-U zc(xO6(XUT|vYU}r!u2swCwNcqqKhSPF-*lcBOm2>^@~zk7tXu zGP(LSz4z_@$g9ShZogF)IeHL*V8v1R%kQ`q$5+~B`z)8jeArv1qSjdxt?dDHWMNx> zHs|XcW{|~YO$V*f(^lr?9<%2s3R6|<3z)27enjQd1SZ9JsjlqNgv(R~d9Mw`BHTmw z_l)B&@^X0lkGRpVs#5OE_C!S?7stVxv>0z?rmJ|!M?d5xhReimL-!bx8=TG6v3xkx zg1=nncff`-PA{T*x9w)z^|*Y7)uC;SL|!MnQVIK@(dm$eF?(clv-#>c;nHzxlMPQ+GOh+S>gjY*Ni!3{C{AzI_|GgpF#9(4x<1-f>DtLAs~)e@N5 z;lcD+xWpAhDtQ_ZJWOQ<`ep-tTTa481m~}yMs=u7fXu!qMg31<38uUQ?d7eq^V4Z? zn%TqG1Y*N0W(E;e-0*VEeuTmV>s$WLuE(tT9cpoWEb{hzK8^W4Jo!nT{$3+&%QK7Y zYBKj3GiauB7I8h;5b0ZQJhtT%e*eAzX*__8EgW<2?CcDhxc;U+8mUv4`kRJy8=bE85z6WT2n+%xH3WSZOpgkr4eT zsQ~-7G6hqc@-O8%@_*`JC{aeAkoj!o?ys^Q(9=6IdfLEAf-h#*d|U+>!0S`Atqyp? zV&dYml}YJKc5_Mcaaqabhj$ThHC_!!&G+wL?jxMd&A1hRcjv+%jkmv0Ff<2U><0Ft zXUnpvuCQ?DHiKT^LMzsml**H$XP@V$d@FDMn6Gf%H%oVP>&yEo08qBx1 zib8foyn1}W7IVXP2NmIb_iqKp|J456ruzS@{t3QA2vs6Fiz!ab?M+u|yj(LgBPnnu zf`-obJT(y?DTL27Lg0rITwaWlt&gui7v$!BE%EGg6G+2@58V6K(zH8UUv! z4SV{ozFQPAYtlWQKU67-Fd<)<8u`h2H|o)my_m4GZj?)2`EaZ3+^=81vN%(cNtVHH zsNL3I$3W5VF!d0x@B&931Ye==W;>1LU!aNJ>cR&C!-s04<1mFovWZAbTy6sB+`~_| zH6#BDnEoqi=s0hcqc1(#yRXEw38$Abwzsx>>QtlrvmJ?`uV&6Oz& z42pP2%D*n0_tPiK*Up_XcBr$z-3v+h9>daXak0XPTbAzIWLF)aQct*v=e8o%lS4>YZT(09 zE*FgB6~%$|UmISI7X=E+z=I}(%n09MdtvHHW)&TfXDcqQH=(1LoKQIRc5-l?SKwSC zBtvt!d#H5+&x6>lrD^Wy9vdNA4M*Z8j0f=$F8@OD>ay<%HEM?(MTV(#c^{9DmYkE3 zrz4JHyS)gZI6liV@S_H)AZeE)9EqTVRim?Y_9N%-0_U4-4aued9*dR9AbJYG!%hCUw{t&!bMyTh6HhYP`~MGkzb@cmV=)9Ss1}1-We_+w+Lb zUWf!zOwOsnnL;&|Svc5c%O?q;iR=p_xm_(+*XsH^3kwh~LNpt0cql^LC^@;jTrGZUKLjB+#6C4g!~2Tm-!Wfx8bC>$WK?LNBo;mcCrC zeLCg+X$pU1%`hg+cTez^h1D@!h`+0U)8cxN0boY(Vai9U~KC|k)lAanZ zuie_~$O{{|d@nN#_El=tl;^sCa(odozPKQD+PP6P31bQ4ULk3r=QifuF)9(?^H?^Dd* z>$WLf2A zC?Mtyf1^RV7T+;*4r-YCwROoO@c2XkV>$g(rkrVdB_;Rl2+?WD<8xejq0ePKk7uxS zi0DDGV`gHM&$OQHVY|)XNc_1_wAV4oJhgr=-ADz>Xgonb?QT(Sj(&vU8wtTb4ss04 z&CSh6m$$SiQd9iBaH>JF>YQ;a0#Dt~nj@Y!W8;8iBf=lCS)>F6R86+M{&HYmSMg*s zJN;X4hY99zWKEwIMch$~4B(JyZO4{)BKP}8N8RP7?M?gPlZ9PE=-s#HKZ_&}_dZx1 z1#v24Dq)Px{tszy9Tr!!>D8>R?&_}UURCvLy*T8j)!)AAAxZ#gpkSwfO*?Zs z|1NPu{GT7NT_4dOkV6;q_-$XG1)9~K5vhnWP5I9ckKaF@t|(AV_z*Y2Cg(q-Vq@Q= zb@=XCQ=zP}KT2~Yf}&llJ{07Iln85Y2dhR#))aD!G1kyHiFagGAG*#({HiF#K^tZB zm1M|#o9&Qvb95IAclj}q=_pV^Lx(x)X+;nxnIsxFXndRC&Cdj%kIdevbXjs&NBQ|{>*Xc`-1?HWgu-Uvg(W@R5d)F1p2>I#NIC|% zE{i7JT*SYzx$DWj-N;R7!1UPhrnq$SfcC1M79=7hL^^agx;h%C{!4~d$o8Zl6T`8t zMe@uq0~bs`s;mU&@Vl0wfzzjddx#Ek8i#`RKNYGpkct|=Us3g_gY&q3*uA)A(>x{0 zw{6_Dyb8zSKwsw5r&~DaxE5Mz?MO7*NEc6pYlLg0kxhjCVStZVQqfDl9$aq)`UCj_{-Y0zk?YeBuDL(L?-Sib zfp!k7T#(d%8VhT*0$pU00?@s+!6!6e>NN>tuUZ@RRw#`u67=tX%fh;bM!J!(c-W;y z-#GlwO~2Bk75qpX^O)tFkGHWiKRJjXvXJs`vA_Li7Q`Z}Wo*0db=(dH|B&;KFo|-= zArhVU%?+WEt)l?p8vH9^k5Qq9Z0W+I!^4yES%j4OZ-s>YTW%S>kr-QaPF^O`5dBvp zp2=~0rMcTbAJ!j>{F0*MN!g04Q8aMQvB3lVlPc>~R(~C7%AiI3-{#WDPB?lTTD%FS z$ABE0sgd~;v!1n)5w|tPyokq z@E#dOf8k1C#l#qDnUyT-255*d3^RsjdN-zb=DZ(T#}9vt2wg5OiT16H_IB@~{l!JN z2U{Y;a0LJuq4$-dacS>hNt9<$2Pp6vlLPP!#iNht;mGp0y;#qn17O5wR1{eJ@4uG< z{sRC*`CJedO!wS9|8H?ZUk^%HHiZ4>^6i~38C?C&xyex5&-}9r013gJ>Oe` zZ)?Se)&CEDe+$BB`S;5H76icVv0v_gDXe^EezmHMy^NMKb^!{DoASK0D*PLUR}v-l z9@}{#d}Z+%s6||G@MLIIVa(rc@>Rcw^nHt>2_?7+dB}+{kC0xPK8VcN~X}3%IF5iIZ zR!pbjx;oJozm=ZuElV}F@Z4yTK#m9NlOfF&y?kdPAA6ol50Dj22Jea4S6jxEU45as zoQ;%*vEUCpp1uXg4TR)P54p(WMpHRGMi*jN(>8Z|BAF4RQ16f(|Ji(>3e zt7|+b=VxqmD;pYg?Z?Z!WDv3O)ay$cO02+nAt69?v>}&bN-c4amRZX|ZE@E+jibfZ z{zxoNy+NRoB8u;3R!cT#-GInhrzAc2@0&yv?}={^MdEJGgk;yn?pm6esPgs0)* zLkF>=@ruIhmpkax9-0P*g&BP_lQN5c>W9d?k1qZ4;>8!D4@FHq7Gd{wE}y;ph&;4< zJp7t{2%T+M?XcOcMZJ?!U*BjGED|6f;J9Z%Oiu3n*u851yl?vkq5Zpk`=|_31Bqfh z){M33*}g6#Lj%^9&jTalt*8=yR@Fy;G=UFpwDF)6n*E+|ezBZj@9ut8Vg(CLDYdJGOJe=IS;9o?xP#C;b?`Uk;3n+%< zs0G^_8pZb9UVCp$-}t1(`~0{;k7~kx4ub|}{udTRutM+yd&RILuFAu0QD*PbO2CuZ zUG|@EiA$rGQ$u-{fmJA5{x1gfwvn^GN35!4kMMF??%#_i3Zj?Z#nP-9*1K{G@8r*O!SfV zL;=jk+j*BHzw#hn=Ho%>o`eocJBjLkmtBlVfJkPwYEkT#-aWs$M6-LQ8AdT8tRy@q zu1a1BcDOy`ZilqN#Mg>}YLuGe15eSx(R20>76L}tFLmZxTp7Vclb1%gP2f#F=#|=OaVw&PjMaI0Lz?%ceLiyEAL@bQSLx1IbCd&Ty7! zeF8b7c-cAZrN=fk{k|1xA_{kM6e`G!i4A9+c~Lp!=%BiJQ$Leq z4bEIDE6TCGon4{%yR%KJB}D)!;wp{Ad0pTGgp*?`s0wf-#1K-Q3K6D4IyBQ8ff_ zVvj#Djg3X&_)@#A5|PekvUo5z2*niEQ|8R99r_X-npNn^ae$@7npqvB%l#TsB7 zDYGX_T^0d06zW#x2Dr`Hg~<~4F+XiZDzEubctyy5Lb%>WzoXxIb3PwSF1WHeQ3Pom z(bmWOQvPNAOOub7!t-Q244+;xG%#>Qmke|MuDvGFlky3Dd8J_wD!fnA8OQz=8I%0o z)yWB}o{Mh>e62hmt3jwA4QdzIi{w8?B{SGF>p=Z5C?M5vQPN`^-LaCO<33Y{F)sOX zXYi)9Nsi=QL0fWaCuM+(^n}aV#lo9TY;)t2N2CsvCE~D!na5ru;qRr#W7}LVIScs0 zU5#k{<$1xr0X`cqn7Ps2+}rc3j#?e(qvE=~=DUY{X6)oi>yyIBWDmYKZhooY_prB=r%Q3tCj!aqnjehpZs`lln~1Q?(2 zJPRge5bi(ni5|=VUZjGXIc|Pc8Qg*og+|(q>*naGwJ;YCXfhatBpIuLB;WVj#g!b+ z@~HRgBZyZqtJfna?BW?WTrEmo#QORjMtVQMXO*^AF z9e~A26NFfKIlIULD!yg4&bRucM11_)UpgS_U-J)z4Qw``gXE~$YH|S4}Sn2q&8N$7)Z>X|bRj-;6 zzUDhq5(;Q=1a;Pma$?`9L}`ZCylZqSpy*o5m4K&_km=xFYz>+o%!@`}WYiH`Qql9G z;<_9g1G^x6cb(YS>m8D-PJLZV6l=EE$Tjtzz7k<#=X_))%|3ryo5RcDK8|K-0=pHR z+jHa7?^vLooXkN^9ZL9AXPlngNpwG?`zJ!R8fs}*t|>w_o#|E$oy zTD=%2TS#%OaunQNeVhnLMC4Jdm~N*MM|1#hN3olidg4P-!=@7g=L8oh%FCEApAsI% z3XGx^h~;(Hq~2s`+fD+pYhIOUFT5-4L6IOc%*w+u0wtGm;Qerx^G+ucvmgXxVzPzy z1hpx-WEfp`?EM-^5S09L?OQ9#ewX8W#D90#!PYXWW9v+aY@1|U=w#b%DI@dqMO8qm zu{*o`XJ3LTrA~(#vSl`A<5#UkCmoulCpNTBDy^fXyRYl~Z_tMpA~^a8O@r;DpFg5& zAdL`K>~j6289^ipwHH2XOPp&2o2mMsY9dm#Cp!xdYvj1$^$D$-d0WreS+xBJod)Z- zFa#r62{m; z3&Hgc7&3ig)a3DjHLRr-fO$|rrzzJlDGtex^3tb+j-#K*1(2y5>yCypD_#O9bSAmR z`;WV@+v$`xbK4L<=(oGj>~))ltu10B<2O}CL32tMLWEuWe02_{16*iCV3>Rl$ztNY zLM5c{H3x5*)FA_WB{ZdwKOiGnT?@-Ny3M>(IylB{9qbpmnJ15!h`uRXoiw_|q*e76 zWkUL9nkALW*YDn~(mQY8=Ro0AkeRe6hce&Y6u!2@4}rbJvp?$Zj)q*5WVx1*Gn5!> ze1_}X!iwwttkV#A)G}@~7fv}wkAORa{tCf2VZgTgnpDT2u^05HwQmMIad~yW`4$(A z^z&KZ>kd+8@>*(V#!D`n?9<5Nb%EPsV0_@fhd#3d#uIcq^!=irkdEf73OnatKm9jX zFW%rmzpN#n{26*meWjTF`>Ff=1y=t$>`j))SGiXo7q13u6s!r|^6WfYD?RnZw- zB@bi-9?Mv;euczY@Jgr#9bLX+-m1y;QZ1x!XHZsZo(_C-7B||Zz89Z!JDgGWG@hb# zY>5X2KGD5mNhbDOtRt~O^l;$KZ#1q#3TEjwWaCaSeld(o6sJb_yYHKxz|?0^CYt?? z);#aqEt%Ypc6}`23F6}#ZQ^zxNIc>;@5IhzHSnR8M)T&u1`cwUitMcdxq4k3kAd(q zZ52Shz1Y4-ait1SXhTC+S?uQS|3*?B6P1w)f23+{3_h17%*k4e;%h`5&tT*>z zCrUR-!RMf`EI3=>rf2Mbu&SYnE9 z0`f-WscB?@w7F%&50qTsZ>Qs3-$ZuOgA;hYD_+N@f~15y7J-6khIEQqs#2zhi9^%Xjx($^IkiNI5IiD|&Fpe~!JJX79Z=$zqGZwg+`Xfs8<-tn4EL z5YsOKw0(-oy8z%_g2I!|2awuT>o>^b>V67cW7b$oSqfrfmB49K*W=T9L~E5##L5hA zwZAWC0~s3X%l5`fG@EE0v(oovBQCz~lMpg2=q~dRs-hnDLMi5y79R9kSFxD~_w8mh zO%)41^egjNGR2{s?+^+uw>B2E8UJ*(?6F)|2b6@uJN+#}sUr%Z5l_XXTeNa>2^Ndw# z$1t^H1imQv##%5JBOP4ImL85n4R{)4~JV%(=cb>$-33i{OR163^m^6DjzPZ z7M~(4tRBJ$NaleO%ZE6@m3d{gb=Y}Vd62N`uAI_Di~g{+tzUY~;AP)HZ36i>TG0)v z!&i<=86fOng&!XFKUlKsLm-f#_I2Sg<_|wlm_oXvFpo>X6FpUZL6JE9aR4$&&>?uJs*v9DdwRUXSH<9eH2h{ zJjnLH_Qzgx~$J6gJX=6gcv2<+y zXfTwrcW23xOZx|b%Qt()nx4{tqlGI68$stVp^nRzQ;{(zRc?G}2iw6Vx}Ax)yO8r4 zLlC`zW-tprKEBg*g^!%{w)E+k6K$9e*3zdVYJ+QgbUR1t=K+A&_*ZK_%Nt-DgMT$? zPveAopup+cav*K5-bGf6mx1c#WYThhqF1S}1n7$5ONCGK{bzJn2Gf8Q%`=q!pGkU3 zn&HC76Wa^nU81&3*lWE&5}y4eFEW(eHrr zl03fqu5>~QzmvHiShy7HG1mp1l;XH2NccbUfVzyJ_2=D%>JfO@SoDR$ zcvki;_FW&Zhb-_NhE5PB29Ab=>Rl~H($oFZ%{p~#A3QA7l{&f>8if%pEG?bWQ-pra zRw+nj(iasMi(Z3cOs@i&lm~fW1IH#q8%r2%|GG-d=SzBe*C038v^Yd)lv7It&Ca7bXXvG%MB;90 zI-Sd!Ey1mK;d@SDArFhCbWntUy-N({yFZ#5%e>RTk^225R>P@d6~*grp)s@KLBz{F z$IcX~Gs(6`prB^9C964YTv`c#aHR!Hn*7o3;m-Xn$)*QcWg>@_10A-oZ4hh0SMNie z9plTxy}VNacyG7!E14x~dhZ;mlduiz{}5ZFX-NbQtD3JO+Fh;m#cQsPiiPQDPi@BT z!s+r}T6|075?pNY?XopXWYeJs`non@mNq%>DIV_YOlCpwN0|@Gr=~YmXW0@QACvut zW{&88l`47vu5Rv=6Su{yCO+iPy^Xp8~F!yI=tmH3_%+jzW8&L9s z=|>nQ26->;{q3H$Mpr5%Ry;TH&3IgFoKblV1nJFD3Z)9X27-qhzy4DDHHY`SJWvnO z0U74!{C?NPzkYi|3l|}HIOMR*YW`P za|8_7R`2;2oxT{F{Eszt07mMATCHw+)i{LtKA}3W^uX)!_jOWs;C4Qk86#NX;_$v= zplBq4+0auJ^YIB~Uqdu!sB67tTSEzo~o$HTyb^-6!1!+ZnpNFRZxrVi^y(=`{{T zXuyXGBfs1ti0m$xMF_6k%k|BFK_aNt-lRHT%M5lmmQ{-;AJQ@@%7=^yz6vQmWg^RL ztp6Q*>&_NWTFpT@IT94+6yflZ0m|;eZyo6e3dAw7C zHa2FI;1@K~mJ0gxYn2C?xX`Vtk#!?z=bm2~LuQkoo+hysT`xHD^Jm%8{AMf$L2h?y z^Yzb$rdG^Y9-24jVp>YJh&*5M3zp6{%43HKD_~RJg^ABG3)!ON5||nE^LNoPxtI<} z(lDuk>aDkM9wnG#4DHQS9!jqqqGvk!Eri-rJQ%lgPB)l^FXuXn^j;Rxe^3aKjvy_@ z$YZAThlqbz#p1^2LVI_?%r`1ukTZ7GgJR8D%cAG-F%F2)_gh+olEkaZnNVMDGQhft z(c}V6LpBawlz8uJnxmQM0rXY%=MmOX3!_eBZ{la|!wio3M+xCW@u6`tqoqiX+T9w? z15uh3Po193;vkZbkMNYU%78&9M>2yN5lJtB{J89MMTW@Pe=06NMl zh=a&evFxu>v&r~bnA<-*H1PPyLs1lEzh=)BHL@LSqP$?_@T+J(AqrVyCXYtU;7m+E zv9vpd$@Hj7o5&mo5nL*lNhy|ks!#laOgQH)3~=BK3vo} zT)fF8RR&gnA8KrLmWjVKD5_kmKYu`MsVk-T2hPp9YW{-k_L=+&2Wxki(h2*dJw^}j-QVdt!gH}0KCwP>R_4%gg zI#IHXN;{sIpIN_`le{{k@i>}e%gwN?(^5Fe`0kC}L|lwA!TIrYqVRdl7AorsXjC!Q z%wLetZ{)3QNVECT4|6*PtF!V=zp=_sgv=;2^4el&1nGS~B-j=DFh7pPgelmW`{R-Z zM+uA4i1(`bUZe7<{eNT2u$cbEc1k)t9bKCYZGZ8mDt5{sE$PiZ6yvT4aVQtJ4)0jR zt<~AO)_hf*=b=}lU$rt}u6LVMP{i+8`3M&9dN$Q*J}gF0;)`9?f2bOU=+zl`xzCSy zidI-99Z&y#9~WLJpYji$n9u9ru&`L?3{vB_XQ7%!j`7otq@x+=b{9;n+~!mLeN{zh z;vlA7g2|pel%F(@ib|+H)fG6>_G(|pn@7B1Xmw-xyJW1THIh+d$n^GA5>vr2W%Qvq z^5U^&ep~xCCENoy|I7VCyD((vO{vt`ux=+)HX%6?p%5d7e%Ro~2jEYO!?$PF%4i@? z#g+{Y>tsjmoOl0hWB*w}!j?>kjoyQ#f}WBqvkRC;vKA|$C6pd9+e&V_8`KhgudJIc zCr6XKL!^Sq7|{2N6YGO)26O#8IS@;*Sv)_9V(}pa9WzP3knSMD{c>ChyOKCgeBMZcE! z7e|?Ti~;>8T_0TB6GQKt>G@wvFfd>?Lq9RB+eGptl57aQCTKb6`l6fJ8CR^SWTQ|H z!;G0N-vST8g0+X*j;`Q=9zmoNR-OXR_k!%&B%{`f+zzot9OGP}#8}{`DJw34v@dIQ zQ-+30bOCDufaZ7q7`PGk0sI*D!cSGRZ5z6vAazDh1`M~H)f1h%IW!dLVqw#zsr~LH zWlW4x|L`BcA7VI|LHyZ3+}(BVP@99w(jTa|4m;Tk76P` zo125~jYy0oHKrltu+tGVG+ykNl@UYX+sa@+!3NLYpRcgnRj}{E;Br_tF|6SK_u`EE zbHUH`0OFr)sqISse?Ak*euq3?G5&fyn1D!N#&*!(D*!+UO9wss0ssKdN5Bw5{~iSMHw^*>Dhp41`j zYUyqpg7vKk>GPiVwH@#8MW>-7^sDKGyYRohD*Qvw=5|8x3+UT_d_v41JkQp4xP`y& z5kW@JXMZ65C7%6X^Pk!JoB?aWa}opkPZENQK|Qnw&Sq|mmxQJTAuZhfxI${htI z#nEn~iM@-u;#+Bg#i4itSr^een>o@~*Jei_zD&IDX20j;ncbyz##K0uc)fJenMVm! z5f_(kSs%<}Qo*~cKGKukT`(?6|4Ac>(XQm5GT{u{Ana7j4s$M^{gJh-9b&RIg@H9z z1Mc@O6z%M8U0d3z@SvWaJ8l-n#&hC7UdH{8_4BXE)x-Au7p*j5oWrKN9=)Nuv%qKV zMyfy`k+ipu3|gE1PnXWlCr3%ef&RRPmlr+h28NpI#mbES!6o39^vc+e z-peGD(t`Q!-@o&++^oFRg=zBckV{fRp&_4-@DvplW70HPbD2(q_(c>!@l88fUSCj^ z%IPOy@0oBG9sSMrq_6$-@Sy+L(f``6fp=H

    LSuxktC#dLQ57pOUcXg)d)I<;?Xp zjCgsQ4$|3uM&)DsTM9cIQ#n);0o-0(h{Q zT7!>du);rv=6}p>4B!LU5eU8q7aQOjM-H-~<4>zE)j{(6(s_rAY7RZEZ9nHytD3Z# zRY16Cc4+%Xtl$>r8zp0F>xM!@A15rWf;9vlI}VuH`G3fMCSeBMj@mQJ^8tWx`THJc zla$(|rsv=QE8qg7b5dY&SqoHM74Wt)mi+TRb%JdJ@yz?U-j@n{gCl-f#SmINC=U!r z&hGLT-g_ofNn@d!!_bp(U?#9_m?>PZYdke=mTElIjW+7UsJ!W9`s;fwG`nk4<~ftd zj*gO=5;4k%m>oBPlv~kbRlonSsy~w#e9#IB)9@S>XKV)X>tT)wt!7qE9?3@-`ccj*=Vxc=(?6%D8wQN?_wJh^ z?}%n+r!(x^zV(uJ&8zVv(mj0E=tj(Yiw6~b95Csy3J2J%wR-jTZbd^Lo<+VV=qcjCiy90#^6BL{#6B5bA#J^4B zyvTRr3wWodUS+#T3*P8_(i@F%9$owKSu+c=`eF2!I3VgZIr)hLV#o}U9c~imu~T4| zkE!Y!Gy+^TAGy%{SZSY3iv)~?+mfSIyXw-2UHH*ADUQZSR&=BB-qr5MoC-$^SE*!2 zx9uLuX~@AX{>NFtU>*j%$Ti$amk1jKBsX(+vSB8cF!-iWK+f5>?)rNZBhB?~#$@wr0_D7o$y%0eCP{%W8dtiln3`$aQU$eBIhxuyE z%o`jV#V$BS*wL`f_4yuL6Q$EDO!`Dzagkk;aTLm5W);e(by_X-(* z>2g;Lix(cq+FOAfTTYVQ@O*yaQENRk)zR6R{8g-CguZ`zMY_Z+4`(>4iZw@$x zT3BAg|3>aw#%M2-1#xAr_*;MNbRNGPE{%tISK?q(8D|xREj;j;OQ`VPF&BKW{i8$_ z<$^^Q86?U5nY86Du5C+sEpI^qu4CNBT$z?ssTkePpITDcdZD1oFd6Yzg>5R{asTow zXsuZj57EB< zJg#p|T?@baHl|ECJFy}OG@sx&cjSM8YygjhNUpO#*W5Q$R+=6fLvxT+VL1D7r z{xXeEsNC%v1nH^9vuaocPlfQ2D3$jTd$Y`FMpvt3Tr@HG}4Oj-23yBIP+hKK^;yT8L{{RT1q|+B~g1pm>i0*T_9N*97wCg zFShpU-KX@An~V^H5B|vF+ovem*#`9R`>n+?!z+>AEHoaZN+CM#Q4&;k^}ec7A8Q`0 z+eZdBbd)6sn8v@Q8gkaWyc+8)^(CLtrWdW(70zdrJ+DQ>dl%^kbO}=U(%e0^s0A2Z zNQm?CY;u8NTs#55Iy%05*9z|IFFOs)L?JS&8$PM-d|L0d ziCN;|QpXi9(qx*++I&n8OHPUGBUMEc{UUGpGOq(GiO9;{Cfktu2LuVBpbr!G_pj1S z6ZC`pxuY1isH8?Vx`y5<0T-qUX52NKry9*L6;D%{ow%>^MZbYkgLm8-kPd>VTNz8W zTZ`cA7Q`LomyY5d5vJGaLO%V*Tz&U>u?Z7(WE|ZKh3@8yo!+GN?a)#z=D|US!quEF zoSh8-I?cqEydg5N(SYYody;_q(L@Hd*xS)qD4m9%A&z&lpbO`)^~T*)p7Bgn;ytGXv)NnE~KQy7xT_G=t+&L``FTI6wG=iVl|b`eg>t z+i|q=-%P6W2V|a)AP0v1JLe>Z5ujp5jo%Ob-*mNJa`6wo&h~ZKR0(PFeB#&+xLt+0 zB*TGROjR8{f(Jqbp8wkiU=-3 zBI_M9v{;K=b3?9v7s-PS*q}AQcwhwV3lz&N$HAf4$Izo>H_7M}oX>cHG5tVahJ2(^=-_Hu1+U^I{qjiPY}tLYg6JN zfi)$m-?xkN%TR;vVYrnS2r&QkTbpVyDDR_|))7t`&&^sr)uny_yc*HxF-Y(=hQc}L z_<;8X3bfX!HC)E_iMVuN)>}N=2B25)+Eb}_aVj;~ffg}{$$^H5`Qzw$lf&{iAB#8* z3P;@dqtF1@tjr5Ya#3gM6bQ9mM2C5p6h!+QB_Ali8 zbWe=fq9(g|T)MZl-9uFteHRpCEqbmC$mHl%rwocAyaPgyI(;KoHi34J=;D}BO8{uL zqNS}Z6TstNBrS}CB|88T)yyofK+945=DHJo62&ynf_F*EyV!7-b35TYqLzIN+5YUV zY|aVrC_Kx0O}C7b0Rdyf2;m+bW4LQ4=_!q?KV7-8Ky*&%G?Qc!KRytMxTc{h=-??z zu(14KM9(!lE;Cl?d{vg$#t3gGqUXu^9aTN(J@72R=5$5PZD)?t{72cN>$`thgywXBeO;dn4Dix0OK~Jl$_w zPQR*%iHH@F2N`snvJNH&cadnx>DT%yxPRpJau@6xkt5`Rv7vHu?eV*~$m=wic-<7x z!wRdiPf;pSr3)SGN-HrEe0Q3L8UMCst;!ju*N)lt?mnIsjUCQb!3Dj#@H9AtgPNCn zU#2QD`qk%?UxnPiu|n7H6!MG)WXj3{?$$VS$?o(0^G{BW5WsU*J@C-=QL$=7@WR36 z&SWkU1oOXD+dtxxn;7KSh@1Sy$7lK0f7bnM$!niI&ev%J8;*x>uF_IRevizTI@2fOc){Nr>J-$``KU+U`-VY^kfz75?E2gWc^U0vA7VXa_=2Y< zYA@}fGI_x?pt0(+H-mA}{1K<6my29A64+CSMhsBWfk-H#bV*g8NP@12C4?7T!uRR$ z1w8CLqAb-;WhwF^hRTXYeN1uNHj90bE&w&G!>_`T88SoLpsAJ?(Ad80PYyO@1J#j& z=)$0fb!Oo!>+6NFGD?B_C4IVGvv;c2eKB=>hw?#WJb0 zG)(VLXVzE^EAJJz(0)X?ZVx1RT>bbw_cptg>9b*-YqlOzq@7;Fh_|-s_rQ;MFYSkp z@2^fvC6S8XEK`I$owowo*fk=ZD0WJ&h7S(Pvk>9>QNV#;?G+F%p;~L+0x%Rdz~zr0 z1?bPJA1BEFvThU}6#nmOiueVX;^?p#?0PhY2$f=7@QdXC-j@B40N{DI+$dz;+JloMQQ*HHr?ksKjils&f73G*oxfY7QHRp^vqf?XFTdiQu;Y@J=M zK?X!1wWSyw5{4R<8fk1dwb;%uWax1aH5}b-jRQ`>H$Ql|M&h|RFzif^`i_?de(=WV z;5jv&n~egmF;a&ObV^F6^;slvxz>`j&If9DfE~Y(o_ONhF3-R}v% zP)sQ551RJLi>^-5frH{qUrS7{AKGQEA_mffLPx)TyT)wtF7vad4Z&TK!S8P^(5;+e zw)Eq6?B~49lo0wIkT-v@tcX5rh>+=MS2}vUelO|1w;nzie5f%MyJX;x23UBnH6*zG zp`dtnE;)DIpTjLlqVo8U$w#D*@kMBKUE32AXhImH=eivncn}MMRxYb|FQ&57s;QI~ z$bfcu4#!q5NKu)KKP7WEzv6HGN#)60h*#$!5r$|A@9WxQo95=YbCHvRiUES03>>{i zcRD^nxokkFEQe{xMqiB-nr$X$DK=su-2Q4d#T$s`O_1_p+}tba7ts(lcORPVyzV~U zB{6ZS&l2sw$j5%SvzZd>|Iv=1xGhmf5Do0RyeGB@3r!^)M|&M>(Uky`xW@SNq5_sW&@Q1Y97vJY{i}p&;)~nd%9e%$z`_PBY7y3b|NQ}Y zj%0mz$EA;Xdq77ud<_QN8Fe+*-{*3&s95ampT-f~72AAi)M;Mwp~%^my7?6qIWe0u zY$#P*uGevb1m&^Gt3=XMO%SNqjt(VK$1(}+#+R(2>Nx3tN#Fo(Zx=v-C<2^vTG*++ zM@0>JqJt$FpJ?6!Ga|>hjb0Hpl}iQLqb-+VmLS==24y3IjZ0pm$%PlPTf$JYP$|eJ zH3!-FO>^9H?BL!2VO<70S%c{qx3XeJw6n^9`gkalT#b#7wADzR*aB)#9m*VCC(2r; zZnILDu&q}f-F~~8ZC#H;o!-V&v~EZK0-||{a7XZB>jA`1p$e4}txOuGWWWHU%RDS& zgU*IngC2Sxs;i$SG-8`!d#Hcuq~|U}hI(RveJy8KPJJhReZ;FnX5x2@;V#clGo1*q z+@sZVDQs98%K3*NLS%=Cd`EQi!b1JrOZ!2p)Rw zq#eW(&rpWC=4Ax!l~7Wa?8QYNJsb~NzOUYbpxdj%SDr;n5n-wD z`$9o6PuxPZ4dM2j^mKz?rR0t?Bj6qE>>73LJ6Q;i=}?Row7}pq2a_R}wWmh2`kktM=)Q3y%Tcz2>uP-Wt7cTEr#`~p2-l5|SBic22u zwHgkDb?N35rp=ZQ0dCxx4D7vUFv;NSBefod$*Zkn5Ve1y;_s6RzP}LX%?P9lf{ium zqqwMb{#fRr`(!6eq}ipVqY@fFTC?dzwfa*}&~>jEUZvDr1RT#Dj?7Mzl+AtewmgGR zW-Lbx!aK)A??OHZEA_5@kiGO|d50<-50?M})F|MBEZ6T7z_+rKdP1 z&t>U|41_=0CB>=B@ei3#_yFkk!kl9L+GxY!QOsJP=we(dZz%jBX_kyF1uJ>gL8sAz z?~~ODR<%cS*|SP2Wq^nNji4=YJM=5d_!}^LI%^MR^k#S2mLOdtuxU>W^$AK2boNK6VpYsTTo z^vbV@rbgUF7a58TX|(OYMNIjWp?Xye(4#8KJ84(;$5i{CP>qLZbA8W+LLu}eg86eO z+iQs;>4O6tfXB&591e(yN7m=cGA zH5D@?kK!{QYj2GXH2Oc+5u+&++PI&F^&hWK>b5C`K3S2h)@$mmE^|fB1tbIXX1ggZ z!8@}wqZY$)@9W_vfgC?Ah;;|kL?HK{;5@ul9O3N*Gn~@X;K1Yeej0$x%Q)ZM;bAte zOuhRVCB!Eq^P;@bT^*!$Nq5b9U zs&i(Ch(gc{;7vTvqgiAyz#K_1A3_BY*f~1b5S|Y=NCvo&5e~Weajl^`ehJpIL7}9f zCWIgZ@L4_r)I)91f%;fJn|zXa?D9H6MO$gvu9l$dC@#&5tMA9Fq@8dz>sV*)ikv9_ z(R3CWc<_ORdc~jJ>-QTxcW-5^t4#oH`z1|qm(v;^T$Q{$azNe*}Rd@x948uRk#94aoP>je1^;MXr-f%OZ9k)YNFN_Ph>(N-=3 z^ZPnN6R->s*GsukTaYO080GCXSOy7<=&~=bJfnjQMS^I~&9?hSJP>f2(?am)6PUQF zjR#(cu7NPZv&X|3tZ}Rx+nbx2VMqLI-@k|Bv3@o2K8+(=1PW{KjtbS*_ZS-P?1kCN_-c3uhF7E9|{VhErzh+?S6L%$O~Y6sYd(#A}`2gYQ7UD zhXr9m;Dy&nR8b;;hpH0Ms1*wE_3((5`SYJkhBiJ3lFEX0{p&XML?nsuOn=S=e*QZr zpA+nR+My;mFtvgH6GzvaTGRBQ!VwFXZ(qtGc~1GghuOC!t6YoL5}o(+PFZsdp-;D0 zBR=wJLXUt<&zbhGTZ>HV(Pg>*YV8xBat7xrs6n}_sq-I^5NvZ$ZaF?Qz(4lnXYyc;Wa?H?T?K zW$K8kxZY!^r?-VO!J3fmuKY9&cPSRH4iBQ1Gg`Zj{xYfC@tv`ooisO{0|hwBrKYIx z^zQbz)7p*PG;0;9H2y*w0F3U|i2|uox9J7&e3!>~i~LUM`n2Z{`ex(s9bIljcb0!pNg>Y74okS$QNxI!AH#i%%DnyR+Yg+1$w$ zO@qJRoX^?amLj0c(N~SzIoN*rl<(%?Al@Y<31=rE0)(2&2m|j)03_z`)L>eK1BW<< zKxhmPB$|;`B#J@vKQsHu@+d%SG7QL$x%{W^5LB06_0BZO-JrZCLw?ziQ8#!)lu77lfM!H2B zMH)l|X_QXskQ_;YA*7p8LZm~wL>h^q8;0(#p&JB-maa4SectDNpZE92xz4%Hxi7B) z#{1s)jz3ZpzEgZqJZ0f#@!7H(5ZkL9#u-0Wh!1yI7@? z0-o+8T;wj1e+z^8sXTyIka(V;mVW_hAoPAa6s+M74?jRw9_TFoPcDiFZi)vo3m}p% z0_X1?XvoUYLmzM=e;@k~7K;Dov#9uoE(7r9t~4B)iE;J-`zf>r=^!yTUJsvId`Jk8 zHHZL-5-Kcw0ORmMG5-z8`G0i#zlb^h^``%C%0yP_|EMvU>wa2Qfk4FC;jXHILC0Gu z)vsm+Tp|7%*y_q-^Y%{=o!Y7M-ygBrmPvN%s1|m<^#OMFt6Ppg%C#w`WP87};!Yl_ ztHi|l3cj@>oPO`zMR4;M?}da?d9|Ruh1Oo2jjr`Zj~?B1z_u&OOuJQ5h5ZWIR;2`D7XBqYE>Z)cYZ`kChxm`BG z)rhoWsp+0@Owp*xx8hdCu+drY0jPz1pFUC)-oW-BL%qcP_8T)-knzB!41a};9+ZL< zmj{{iGI2WTZn`E>Lm*5>U5>hr9fSW05*GY95+sBaQY0$f#rg$B+)#Ac{%%4N5-j#GC6N~} z1=6I61Y&Bl3hgFqI982ao?f5F7YkQb7+gwQ^W5H|YVGfY4ImQX8gE-a#PqI_5Lcv4 zr=;_%C@ZsO)q9<95S_?cStTX&irkz}?K#eH_+H2b6s0l>Tptw`1G4+^5G ztl%>^OYXmO#O?}u{e5AJxU+by`PmMPmH2ui3&~Z4?pq0}pB6SUQmgtjZgKK$UwwSz zocS*#?O%*RZ(v5kKz)97o9lc7XHMDBU!8I?lkXDhX8BXM*rHVSa9L~1r)PX&@yvbi zHl!1L=1202!oxixrMq{Y?+D*6FaNlO>{)p|_e(@|J?L(Ruy-j-h6w!c=3&F*wA?Dq zC-zFT`Q=(|y~ip+J(Sb+yk$O)dbLH3*`qsF6_n5o1G^L(g^HRhuGzT3luo}pL=6el ze7t({+wRG36-Vtvh8(`;|E@g_@Vc#2(>8(6K&nY3sA+mrf}_UV2KB7`tATLw0u7g& z1-B_*UANwBqaY~dYcijppcUN9_U@aEjLbjM^e;vwLa2@p%vDOprZiNWeP^%XJm`)M zPk=>Wz(M`I4+Bi;lOe{bt5K3xePy}bhlw9ZMVlPEdvy;n)X=wd|9lZ3)ehW#ZMRAM zmw)Il8xK;cgG#R;O-|G4FnChp_ag1@QXg$S;=qWL%ZZ|I-@XlSfdV1PofCY0K zevU1St=8#lFcVaHxKvIH+qn?9d3k+J=6&%3NtH3if02|#c@3N+qD2xu&qWmf0a2mj!ywk~N@ zbacumfsz{%{^JCaYWp5}`l1vwTkVFTLk)}P2EI!Nspki>3TMv zKa-M@$|C8E8@RJdEO-2PKJy@Dl)ahdE}YHKja;Ry^&OJg3XX z_-U&%t)is#9o2PuC5x5Wh@0-1#w*cMylwu4o6)l~FfA7*0zM~l8`#_1+i-rhMu7dG zxE?5)kdwV{UP8nx_xQ8_*W}00tk1=6S$?8^77ag52Zt-=x?HWXyF~sgXU7hG=KgIsjha&&gLF~LB*}megp($HiWnB0S z`o=GBkSiLZjdfyDGOHgb?7CyuN7A*wv;h$n6HE6s-|0_~G+daG;rgFth#-mEC)L8! z)PgnMnQenbIjQMIMMVvb-rompKo;DthceI+cXyw2MLwh&8sfv9WStY{>nCeoSpCOP z8NwnwmcWEQcQ?|7mV5Q_J9A&Z(&Y;BB+P#o@xDBsG=Bu=nOpeIOie>`n*yPktG+jQ zuz~;8l6ylK*Dzhp9$1BMU}(r==7sjbR>HRfx$XRgFx_H|tMbw4l%#UQu$Cn``rTOxE zq`;U_Pbz_tm87>XN2}t8Wb}8k$!dAN5_COP8#Q^1n4F}yPwI2ae$qeMWa}04;bNZ^ z(GZwZP&%M+`sCr7+PXG(aDtp?vG?_}EN!z3W1NHe+yF!@2D>PPnYpef+6b)~JX{j2Zui+9>()zFVE)2muG$D5m*zO2ntaaTDN6%`g` zI21O;p~wtyB`DSBZCZy2X8RAt|Kk^M=vV<@?5Fxjv!-5cRk zy>s~}$NXg}M9K{@P{S1}^>YRdylcz1ep^@LcE_^sev@=3_$K488u-_|MYao55Tq#E z-GJCV-FB!dLnfK*4a2{#TXb{XU<5UMh29!^R&8%@gHc>-kHe-1wWeO%F1}ov{yX&_ zf)E}Y7(%E&9W*Cr!-GHRJ*JN3%fimsyia~Lle*Q>)R^D`tiW5*50Z$_(K%>fy35r6 zRO8p;kKkiV;y>0>eb6TQ3X=yuPo?iqEE(?DDn`U@OYLo7!##b6D_~9B-QOsWh@d_j zPJ4e!#0me)7@r|sGkD&&7f65mJ`aN=9Qzr+%spi|)$0yq!#UBA@sm3~>Qrkkr6*{Sn+ z%NG4svQu3`hDh4GJZS?d7}uzGh4y{x>p6YtR9P5pWOuk1LE%B7bDH1ddEHv3|1sZn z*g!h{nL|loX(ReXi513|Ckdq$;dOfaLiXMtmu%;`^#ud3XHMdr|NNBFLm;%~=6H#k z8aKJP_(B|;j1D^c`cp*=&u~f&TawKCH**Lu{OfeWWwIFA%;Vk3Gp?)Y#5`}@1P0vf zmXHUYaI4Fc6w!aHV^?Wd^Ud?vUgXjD!h(JAl#B(Wrbf)Q{+gNDukYB_=hpXrd07^J z(YI<=iDUR{dwWJfOJ`1W_x#*$wpNRwzOm1OlA)N;Ra~X90d#!4V*a&P_fnI+d8}LB zxbi9YgOolD03mdk_o2oAsJQ>J9({29|7$V&AHP2=82|tnGKlHTLm_~k`zj)S&Hwk& z9{;z&A!PsgrvD!TPX1Meu5=o#hZbQS*U+wy$^Gq?hxIT><=?*o{s)oczqu8WqW3?k z9+4|-Bthc?Gvj|ULp~IZEH2o`YtD)-xD9ag#ORVdMCORF=-HGBPVi4H!2sGK(Wsu$ zP!AWKm-3S3%9+?;*WGjTHfa^FL_X)_Qw;zT#q*DXL6!~y{B@(y9$o|^|EG$7T?6D7 z4_^hDzj~sA(GrC1k|aoCqkohnCEz@Ijm|F|X~bQLs-~*Z!Yx3yZ_vUQ(n5F?uP&NK9FagrR5_KX88f>~{{)|iIuD{;UXVi4(UT}~qb-n6fv1#UcWH=acHkp)>p2oDPR&=~Nl{3D# z)!*cy-%^|eM=nNf7Xv)NuWBfc9w!fpj}`s87ts{xPLM9Mc4Ot&Z5~<6Nd+IQM^~zFWvIWC@zs!6jJ*P zXwXfFk({#LC$tf6|M)8DF6V%b8XNknRF@A}4bX>h{x$#0++LQ|^Ie|;SMunY;yBo2 z%saKszG|Xz8gxjna_ytNCGL=bETcf^frK5;^&+D*OOOdh)$5em2v+*o1!{&|4612i z1tz`)IW2kuuz>I{*vUeERpuXa8@80rw`sNfoIFWE>@aP?3t~Z%D1SGJy9^>2S6shM zs{_b|CS|5svkDbd(`5Y2Of-Y`r|oX>f|>OvE2d~TP+uM5bI0VR46H^0qr)FTwb_)w4Q{e5R|en5(tpIE2^q;d-HW26&zg_~Z% zl(32o1N*K>EUjfh=u-8*^&QVdkcCJoAmtWb_&$e|7y24zG0g%yn>y0rPWkwZ)Z=r^ zKAUz#WFO0wU!S(I_oN=gu#-=8xE=*zeJYK9Uxx~=wb`W45bW^)Ak3@}r2a?K9_o7) z1w4ywC7L*Nb{;Fb<}O7$qEdwQT;^LkINw~_z1%YtB;Qmm^joG(i0o#z)YxtskMiG3 zd&c!S?bmy9vnO=@&p!VMN{?tCN^kzKh;?VJ?rf?t6;{=9^o?@!s|S*&s1EgvHIQ+W z_*bh#mPpbqYOE#bBdLW~%2BP`M>B-re!ZR640uSBkGEz$?SqN+l$e-sO@ROJJ_+FN z&z$WU>2ZI#!`KQW#+NAbu6d5ESbbB3j1`xeMwQ+cHV8mZZ%n!Ag5BBp=o?YXd;dT^i;~e1T z821>FeH^(+^3njGj{rZ?38VdYI}@7ODO766J}z$3<*~(1faqf%2e|<+SAP4JHry9N zVuMB>+p4%^=Gs&Q-toNVuh$kJjwMMeqr(6+@ZUam-0`${<0d?8zYl1rd!C$}?zjDu zTEuBFlGai(Q(i4aG(i8@A-d@~omryL0555p`6lBt()YTLG}$I|fz_1N@OtWd@G?MZ zdRc<@JlHvE(8B^0`HBGg!QYDHnNl4xwSC!y+Ia^#d)=j3`?br?`XoI-K z-P7y{>;?kPB1M6&X3R@`gb%*g+*Y5K3(O7{|09f{hy`c=u4WISBE zrcBp9zQR02%9Iu$oYLRlKl7s80?CA|WwwF!6?g~!P6>u@VlJZ8GP>J+xAf(KnV+8z zYc)z2(+(48p}rl>2@AVYsr3}=$cz`YV*#&m?^_qYsHY&gN)KZY0rtg0 z*cMdckogJg^Z2V;B1pJ&b_-LrYqpo1Vzy-8tN?S=SHVj%EJi1xx~a~O`h?C-BmRwT zP0F+{`ytX+W=+r1HoiJcCgrh&$hS!4$9B+C@ILZhiA`L~Fq&fZvFY zhdaFYRSk6DQhV{KQ70i;KkgtZ;?|#Iz?3zt!xadISwKE$Xg??1P~Xr+2GsRu;CDWW zLkWn_*^jVGu7HdH)(4azwp;QDvIAwi$ztmDIc!i1`{O(w!K)sd(Z~O7R){p$gMOcQ%#mIlIi^dBI8(f#j5iOlShbv~L zMeOH9>~}_#1V(V7!~Zpk|MdeMPX2VoKERS;(jlS#Pcr3W4jX?oZ3)LWwTjej5X~yS+1YMM(%9yy!vw!Sc)a5aHn4Lay)M7YI^C{jakO3m-m4X$A9l23WjWvq#2HII2{O{SF&6#d?KuUP4# zlzVt_4+NpYrC~V;#zXeTEKn>lT3DaE>{PG%AIg{DO*a~Pd^>u?3Vksp{SSlkgm3~c zC8Cc_@c?l1N~_Uwutm)HOB2Z%fj^i*U1RRd5SpcVlTV9T1+nt`xi4f}h!dV)cD<|U z+1}Bk$g*7t`7=Q_l&YQX9fjtv))OOb`B+ zZ?W@4W#Z+*PFZg|YmhyqY928g16GDW2=fv$+eaOpSB5`usPWPFTX zdKysvwNnOgoz-pgV;Tcwd{m4)XXUS57~Ml2Sa&z)koX}>;kWkqleX$;4YFVEZ%^PkOc7E~2CqQRBMGjG)c(ThdM=Zvb~ zxP07-dQ2t$rrQw6KUUlt?p$IP(LF9pKf~X3rff^r&pj}+{ucY$8~_YvGeU)8lDobs z#e>l%F0qobqsh7*cap zLT39ulUq-A%vsz=fANCrTrtwbfVD)w<^W4Bzv?vjo1e5>B^ro|YJGD6_aL4q3YSn8 z{7z0$}2U?Q^(X6$&Rv)K6chm-xIp zj0Uu3#nciLQ8q9oMgfNFaV6wh$y%Vs|(;ii_B zjL$=Ku)ATmr452mSI^TOt*n71-|K_q0r9(wCD-?NUy^w1} zKz?$m}d}$u&>ZFJSw)#t4vb$o11D)OL zI?$k(Xz{FFl$A?lpp#aXz}L0z{RR$@@Ylf!M&pIN=+F~IVWn7hyCOV=NT2Nv1r1F3 z(xwJj1u!w;QjFkNgAz&h)LGy-$s;KapxRbp(%su_?%@J{vc6Br5dbf3E3erSB>4;x zXt;RC;JNd*3mFDzy6TJ=G|XYe1GtXCHV^gGAO<}ZJu@d^$At|gjW6Rsar*VI!bAWK z!bvw{7mTP-#X@}{VPS3wh;qbrRjKhlC!P0U>5o3{mYeCyKFguhUd>^Ocx_Lw7?~gU z=N{qiT!|C3|Ev?^q>%vw6UkM(NAWe^capE#zb=s|cYc)$i^0_SM_oPX2T&0*9XGJ`e9I!$Vf25RR8#AF|MTYl5?+S zh+^NDh;P4hrL=iaz&|>0k)T1d7{?-uQ$|T{-$QiXPeAZZ$)Q)ZJR_5;sI!3g(qriK zD&;qg2ycI+PCLh3o+k??R#9M3S4DkFGDisD;;d zJln>m{vwSXpQq3W(C6m?)?C_!1E54Zkhz=_A#wL+03=-P^!cUCgpXzq2kh<)4}09q zP79=f0z=|nJ{Z27*SES{uN%z8j^JLYyxr1ew*SyzA9&<}m)nLo#@B%?j4akD1*>(~ z=zDD}9w`0d0Z=v1qQT`ZYyqzOUp9f6Ozp`Ui+3ANNl+i4tE7!O3L=oNc&&u%iE9fN zN|QDc<&28h&0AbwxNcZF(1sL04A~@n^cxAYU6L{5w&iO_Cytm0Gn7LM9YgmlBWs z$(PlaOAhtWOSAKfa8k!5vUU3N&N43_8S20Jg07?XV+s!B=ccB~+G8|%?R!GaUaPk{VdiZ`DNp-t*dI40 zv9PL2^catE5Qd-cpsPc#u*nukoBT*1;W7O$vaJ(hXJ^-5;fPPOIS$3`l2e7&m>G`S z?T-Q|o$J)Ts=Uw{?~5p7LRjWHytS=^^#n;|wFDs+Lhmm(@uBi*dy<+5P>fJ!lNG-< zMST&iRSfuxI-(X`MgZ8+L=g)O_aG1z?j3k!vLa@ZelUOn&i#&&GsJ0k!Wl4sF-M_O z!+fo{8_TOOOQ=JgKL5P379BL_6)faxl&%2>vkkC0VZeLrZGrG`U-xUTaS~#X@ypCM z03xu;@GU@iv>Z9o8oq!A_L--^v%yY04A4`&v-O$DSe!yt$76O?q%uVV+0RuQ0;~V- z&X~#uQ*!CLfZl#QTI-I~f9zD~XNBuiPHphdHitE`e&zC^)613Hyp*W?xaATNMlq=0 z&X?t7gcub_A>n%~IPQ|}c@EsM@xMtk{hQ+UGNIeTEbi(-!0Lr~10aC|$9qFnhUE`Y zhp$jN3jBU(bUcp40kykJjm@i~z+=g640a*;qkh{7)XjiondY|$3mALbV%Mj*HB30M z+YH06#_Z@FXe)E$E~OS=NWPXY{?dff#U(Ft6xOknljoeClR{l%?Oci)HzK zJB{Rw4r)J9!BcetK>n%jL{0!)E{FyVN~D+i(3up(AQko^K7d#40F~P$=UPds@P3~J z=|9_Dlgp!i_a6_^J9s0P8qHB3-RYimi~7q(;vy%C7WHh+y_S4|eeI{s)rmJ=vabzF zHWo9)8s+~r0Bq=R{E=ueCGyr&ODTx*IlbZ&wL@eI5M0!Dh@Tw(*qfqH&&bflqlL|P zluM9th579XlpbPMkoJooy!tP?jh)*!F`xGmS}ax|U9P;+0N3AaA4!0b)a34lSyat~ zJey?zaAH{&P8Ko>H|j>WT!!S$9F;#c$d72aE!5Ttg7>JXVqG zQP|SK(19w>#x)VXsZZ8+4J)QD32~yNgT9k4qJsVr-?8agY`Vy>A64Tza2AxhXzzWw z@l^8>&V&+QBT+d4$jSDR4%FuZ5~s=2=!i5ppRRE|B<;rC)I>4| z2E5>o2@P&2T0jp;Q{ZHP*&XcTVMFsspOJ!iZ_NVMBR)D_{n^GvLGW4*{Y3U>ArAeI zUCI=?V@?=c61m!OS*-WjNV78Lh!nkhCN3v-jo>A;b2s#2UtbWJ`^?ke12dxqW$6g) zB993lB>E9_*f>!RkB%@g-I^uQ=43y{uH*XvW!Pbg1&;SO5ln+8sUpG|>u)5GiPE^( zOWO=y^a_!T)O&(rFF!a$NsSx*KBtz+&)|nVev})8w}TOsM#Z`4;+!2zvU>c7+ZUot z_g1K0YjUCu^rA`8DFz}d^gD}eHU|1B|SSD`y*72crcD=WkqcYzQQ1*t_+Zsi z9B+I*H877jIks4Ih-*>6T%E9&tal{h0ilUAgsYGd@YrlxdBS2RYVKsr=i%ssgRLpD z?`{CA^VDCe3-c+R0IDmt9RgHDVV4;p%x)jS0NdA3eS{5l+M1?>+z!e!X`R|?xp2l+ zDU*H6J(gP21;Aw^sNYw4;ywSez4B_Mo^_#jF~MloCP@TIyo_*anMumGeda~$HyD7l z`3(nk_6~<{G*;XGX&!shd5)NkMT6wsIH*W8;tS0nzFQ{T6GQsfQ4 zoo9AQW>X`Ql&Jl{gTVdI@2>>*-e#Z~TX=(aCyjnJ*yntqJszK0{L|{TW%WY!@qTW6 zNqTb*fi3)^kNw(=&pzQ0cfRJnN;!pY?>B-vfPWt`jm^)FlDcj+QbHmg;3i^p)Ut8o>-Tg1@y zzVRGT2< zWQ@(lAr4rxNZxD}_V8`IT=Qh&` zL=pd*65=xxusZa?#o4)%fL$LM4i)Kpvy8DHm(y5|al+wSxed$i9p}KCF7_jgefbmx z(Q@mPA==9%;7JG*b45IU?s`-cur%i;BAl&WFr@DQeV$+aozH8ypWu3^VZUV)b2=~1 zykF;Td)SZ1ujOLZNV}`?MKwhCV76*be+j29_f4a18%8c;<|OqSdWRu$%C9L@KFX)i4qSOq?Z zqk|kOa|lCHo#vaP7o(jt z^sU~(GYst9A?A2M$&Qq=pEfTwIDf=e%3RKqlkbD{vOK{#Fb#PUp7)@;pxdu9nWM&% zi@WIedM)SA%gk|yYkF8HvZe5eRHXW7*JyMUJehg9gitfvJ!Sb|LCEwpntzTHjPVca zY7(PxV7W6<(J3Fyc0$5{h;L5GN+dT#0*M8mD>30=G5q5@AyhWEgUwJuqy%OQA{{Y5 zH%}KrShJ^0F>*s@^LZO80tvbdA}nne@4`hUDO3KGz~QXe)@YslmC(nZ13h;1q7LcB zrWFIHIq?YEX*6=wZ!?;l?#uqJzdx9+67cg)y|N_Q<+t^g8PRQheIXaFHOHoo`y9A_OXpksTJjVho8lY8!cbt^l@5VFk0|aG#Ms!_Djc$151cu% zJ+Fhf(gZpu4^7YaMae-8If2)$#BH1+ns6LJQi$OTM&9maC&NNRdrQk+F>HuciNYO}nLsK~-Ia8y(@_eypL`?5%a*;!$w4i_MJjqkUh#JJ7u zpZ5ig-4-l5a+X}ig-(sPl_r}-GbR#_ZPnAH`~Av&gJiD$0jS{TT7@Ll07!YHlL#3a zlqyAUBKr4IHAPXh^4qAm-Tdsec$xFF(;4X5RlZyjdJ#Lbms7@bEE9C);+@C(4Nf;E z(ybKS`zp2ui_?T{Dmp3dZrNP~+KIAPv@(z8ATAM4SQ=adQNV+Y z*WPQL#hAxxBra!-Cdjzr-t}NV5kSiLmMP9{-2SU2AEOVco!>Cvr`X2UCXhJ{Lm$Zi z%Dhys_oj-|kYA}nE#oeN2gfpVIW>OUl;X+83xI~^x^5u2Z$qd0Jw&vLEAL{3cUQO^ z6@kp{ThTq}3b;=FR27E~UGh0^l9c|_$4(^zxE}3JS8Zw`4On{i!lEleKrW5~WXT;q zXnUETpYPaUoba0ROwrHhy~Rd2l`$3NW!t3vY~}lS101MUi^ZO)d=?akt)y7M!G^th zSw7ZMx8d+JYR1$+;@ZyrpOL2-e!e}EYuq{F&iAMU?;CgnF_4~x*0SnNzGg;bNXR2n z%2JJ`O`1A8k;qd9!)<`DH;~sF;59h=RpT7Y{>>vDc~c*?VCu-yae$b^GUS(K+2^NK z7q8fXI8as&sztB+UnzeohVjMf_9<;Wua^!w0;M!HH8+6Z4Yl~WGH{1AVAUj+V>!N~ zR=NBaim}E*1m}A*hf3@}!JUG=R^P40Vo-%)A%_j;7Uqo?Cl=Jm# zsCvJ{Husmh+4-` z!G`h$RdH6sJOK;zD}a>tP3}khPJrIxT63G>R5f%UIOyH!*&5k`@Y^UdOdFTapy;7j z-%tZ?uplVNTr$4H&)4vo- zIP_-chHY}Z0xE%m07o4a#yY=?*~%3^3~HbQQY@qWJCdyoq--}_OvS^-G1>K~qJ9~b zrjG%tm9Kx+Jvvqz))2Ef!$yaT#5VhUYT8A2;rcD`q83pUx+7KV)Z|~1PFDyPdE#~c zhagRGh(?AFd7PsnWO<{74@ zO}IeZ`{FQmcZ%ZA?7RAa{Ct+u%BZN)lU9FBoEVmsKZxoQy;~3)OxuhCd&#C|8##pu zDTvk7)W|TP#oVPuJGv4Kj0#j7uVO;Lv3LQ5Z{CwyPQ0s)!uRw(V2SlJ@{gk)ZxIQ< zh*rSQS1E@V@KpfBO(bX`;qz+PmL|qs;lpoyD6^O)d;}M*dNS^bWpPwR?Vh_lGFOt8UqG$ZiFhGR^q@287|bNGZNDtOwMY8mN#E;@~RA8X5pQs7BREc-s+bC>bMK7 zXvCXNg?H5V08~{jr#K*u=JWBZBfi1!JPBL^w~aHTm{5vpSAw5}knpZ|iMxO^D^nzh z$#wReyZN{~c2kQT=DV|PiL)9&ckxGKhl$X!y!F^dyXVep&*inV>uGCPoqzm&%vGcF z_+Sl_*XNvDfPPy6OP$N@RbC_|e%e>&YC}HVm$e*BwVp^VSCCwJRe}JdbTNqxrs5^I zrx)O1EpG$iBlKE*bG^2Do~I$+$yZkiby1n1uc}ozI`LIX&TqMHB?t^uou6<@#{_YSe+7 z2%K-oS3E+6EBt16D&~9%T~%DK;r9Q$=W8~_Jw;QgjfiaXGV3K1jH=U~`kLsQJkm(sF;XbC#uw#MdN@*-=1~Rt=a;}H%`46&``yhP}Fk&8B=+zRx(qK zy;mz~cSrfGt+xRIm=g+Qzc)}Kqa@RM3(!Amm-kPbT~7DX$wLKWW`-NhK-Df;YwZ1| z$&o?wZ>ZI}W9P-^X1z4#O5b1SB!DSipRyE6|ri^lR;FQ@PY_l@gZk5BF(pwBt1Q8SWTE=-cZ^Re2;IQ~jqu zTZeNRwe~4hLfgNPbY0t;->uj!PR(O;;jQb?jX>ioJYq4t+Te@PS zYk$PG*&(llHkay-z)9Ly>BMY;}7z@tGOM9<}FwavZqqc{&q~!6d$P)6MOBpt1xri1E8TCVUT4xj8g* zCN~s_MQ_CAP8g}E2Bhp~0#X_#4e9ZcT{~UO>I*|}v%g&$KtanDh0i+e?W|5S0@Kdo zE7;tO`Q6!Eh&06QCHeE^bJTO^2 z5Q$wS{bSHuf4ow=hlNMlPlgWp_LiaJ?d)`qvpRZ6lie*ZxKDl2`^0wk0A9vx>J`YI zu;6j4F`P?${?RQWM97zjIrU)2+G64=;T6?=87nF1n&JgB4yfgDRk2gxmaNK2XVD$U z@~V^KzW+gXdji+GBm9TM9ibJH~fJ#Q7NO_e5>lMG7wbF2m!yF!Y}1d5NJvXrnI1VFp~bpD~U4(+}O z-HkbHTo6QUV`$}n!!dh*goRoofoUoJZkhdENEjPts;Cf4uB!n1*)_hz+edrPW9@Z(z^cZq5!?F9rdPn*9^`MK*Y(K^w49(gU?%(q;~C#eN=hK z(=86H6vtRaIAwhk8il1B8O!6&KRYImF%b zWvj@zFTT9n@NN$5&tOg6UbmvW>Tt4`=|s&JJ33o082VAB-m^b7P#AgUDHfR=G?Fj= zHG!+4zR9ciX0v%#TC~>OKfl8Hv#v_%Zhc{nios3x?vA6)%(Px|?j6#{r3o!HfNM4U z_?6jI>IZW89gGngtaO5kihA3F0~L8^xDI^wGS2gON|^POA{JF z9T&-uT=TNA?MQ_{s2%Z)Y01c3%7N9Bglu%Mp$$hGS?3T`aLV>fvIZwIdIpaz!}}PU z=6r8%tFh)OEFz@PuBpmR#TBBw=KUkx?@rn>r*mAWs~)Am~;;jh-{2x zwj=Yldqvu)^vU~qsjJB}IB8Dwl0BQV=%J-uOi$yn0Xh`|!5zn8(3SMH(WUT`2Mf$+ zu2?(l_ZY zovFw#kURtP$Q1WK1@2_|Zd=7-pO|t~ouYqv)T2m`exxQ{RIeL~QcX`#@a6O~xd{<^ z(M5<%CjF0(tla3}vM&fOvHaOL`L0*W&jr)wlMF4VO4KfV9sM9)!}AMFANFTX(wy)! zA-5Jd@Ip}7R3tK-)mLVw`F1#6v^~(+(mds9VGniNP1z2K9KlbBau0HgB`9e#wPfE& zRH30W&fF?0xq2uX60T&o(A?OlUI9!t**SC1qvm z;>jS(9VtEMWwnyp%9~912yWjD3D1q}xOm6hK^Fuebd&J9wYOu9=No#ZOF|t-pwvn&{>2W z(>nBK$(n`3X&;hgSZgVs8g^0IGFa0??=vU5= zT5@gU4OU+^yUts?X)Ff*H*;3Es*8r|xdlJYz5Sc(t@`sfxI_p_&5i;>Y+h{o{biwX* zBgZo%OWR@jVR)tWsnA*z*0;=%AewsjZ!%bApS#z{WKsC^!W4}T@~My9%Lh3?iqqCW zmC4_Hy*%{cra=*C3;`(oYM0K}~e8=Q_8j4Rjdvfj&%1k zhCS@}Vd3F}fztO)kJ`qQfzK?y1snM^mQ?)0{IGwEiLaRYq_n4lRpJZYkk7_Tj-~H#81e8=KtC6hDc(hy-SPW+$o5$FOOXBEC5PMCNUKIq)weC+a32p(r z6%bC&cgqsaH?!zG`sa_b`*3KmC3;?wK`CT8^hwD-e}0@Ankc8NjAKkp`K0=pwFhE{ z&&CibVRA3NsmS5t7>5v1B|tw&2L0{TLS(=+kE&BGd6FoxRCUod=PyU>@<0W}mV^Zq z5-3qgXSPRbp}6G3w8GKM@T)f8P=M-UJZBIWCWE%Fb*r?sWqmLee93R|)0Al-3_C)< zrSHrg_v*6it8yn-YYc1aU5-J1`j zD)~lvj@+rp?;bOB@FSOc%@G%fRBs*)G;q&wNSWNk?QP8mdD$<7tNQ=NnXU@Uh1@!# zoW(M$ST>$yraiWCm&$%7nEA8Tz8{8;*jeL?7X~jyKh&F5MP+z!bs>j}-@?LfL9O~g zRk2@7$*6wfH!|+BG3(sP@r22hgH?ms`W>U<7&ww@?X$mZHbae>%^v4MX#%SGd{UIo zo3|gzd@xNvd_*h!VuOK!dC*g+F7f;YX4mNEkMm7Vb1l}}+o?I;A~f*4t`Ne{dcNb8 zL@>!vdINy${b(jZoQ_-uN(Cf*XGwXG^Z#P)t;3?~y1#KcrI8K^2}$WjTBN0W0O_HW zjsX;;1Ox=66{Ms_VrUqoTgibTq?PWD-+}vnp7(y<@AJOb`^WEGbIr_&*?X_O*IsAN zUTb|m(KaZQD|3;wcAl&_Oyp49JB|;){-H`nqKWMKN*z#!oyDgpD|g4m$J3$eaANKA$J;iYSDC+9@>6D$uig_H_DvdVf6Y-YB z_&Pt2EMcR!w^dlahYMo_Yyk0t*t0QR1LT-m5OFE#R+vS0Io?aaQ zH;04xRDafoFMAZXX?8d=fx|S^Mj4wxqGfL{iWTR3Ccmr#pQxaryF~Zu(Frh{{QT!B zu{_7WI0sNO(12(PS|%NKG+4S*NLrq(f$CF3)h1VmQ|fQDE2PzZyxfm;#;kvj7TrX7 za#-%o1Gc*>Ke{ik59Z(#+fxDN;^AMZDw522(yNiD5pdxZOT6ztIx)ek`V_BxV9TG* z5A;hkmRLYKL?ElRL_0A#Igc^C=HdeT(n0Sx*u_!D`+CBwDt&bB${yG4|^8y|u6 z7fG=ilgu|5EBX8!SWHoJbC&%ZAzY~e5o+qZ{q#HVJ+7mBABahFwCsKT-wNgoorn*k z-U5eQh(;1aybU7hN2pfzZI_o13&a|UgU4oM#iV3Q|%M8@uvSey88yOk1%>d6|nRPMwN%j|?5`&WP?`J=MdJGUV zt_QiJ9-Q?Gdgz_!8by!mWe#8ee#G(2yR_S2aw0EWWk?H1A-_dK2no3o0HNeS#cB)=H+Fd)5p2~DU576C=AfWJI!5+Q%JaCCSGHwPZ!5h^bd-{k>I$mH z{a4ogebobqhPh^`E?A{{{Ky|Jq;w@7)`oCE2e*hm*(OFv;%=KiK>ZJ*xH~pVMAL z^`h~JYtbLmzXILn0HVYG%7MP+*aF>gcin#e^ThGZ(#z%{t3MGxymd!4Yp(rb0p>5r z=WnjrNbT|3zOOgN=EgOCdqt}s#!bgf{J$nvtB){i?sK$wy{oKge>}^WI4d)^m8*5D zlfDb60U71pduFXy5N_4PFFs&eXo)?nZ1PB3?`n8G`s-k{im;=Bo4#{zarCHwW(-Gf z{9c*DhcX~D;lhS7nCdQ5-1iTvWZ25}ygyYtU5ahJMR_8t>WFEz8L!r66Srmf!P!nY zfAB1B@DDMKcCq7j_lk`R*v3RU%)BJSZp&M$iTfb8=Rm6e;IAhAL=7P94{Cs!swJ}a zhPd1%M#4j`B)^~dOAq$1Y%%gaR89Pg)@(PT`NGeICT@xUIyjP&Z&9w}4Rf^Ek(iQk zTXE8#UY#kqS_8Go6nb-N&r|eS33myznpqF>Sue3E6RVykq=44;hyc1v`r(E>g?y3v zGYqnfwa#`9RiY_jw-KOs_(6@wa~nDn&dk8JA9U|FJ9f9~c`;o~JfoDSFNau@1;-;4 z6nMy1%2U>f^OfDO9XDfA5@16Yx#LKasDAH9i>Gqz#4MgR$U}jb^C&rb|J|Q7;|rzo z(m<%@5$5^Hmh6vbc3_QB^4oFtUfFXtu6Q^XprHtCxLDvj8aB_(?WabtInBf=qe-t7 zRCH0-DL(M_O4HG)7gCsyXXQT12^buxIXr~9GqWK+$rZd(MW=)ED*{zgf?{p&uJz1! z1&Pr@B{vV#4opqwr6fVLZ$0+);;+WG$I;w-<=K2LJErE7mhey-An=wOkUG}pZz{J| zwp-a-yYJHbvC+NP%?+L;Jxju{e--3lWtwzcyg4~fiIRuF-N}!kwi(HXU_Bq zRX{lb0M7H-mHTZ8f7j0lEjv-}m*O^Wl;IdQHSOMNP7mIiLNyhl=~`V)&B|iL3sGxH z>6U|c@`OR2V~aZO*3h%l-YVX@E0agb zHhUiROfuwj7JTIQfq_~Np^xfH)~GSbda4N{Kv42wjxh>*Q}=O>3VE6P@Pj0^D<%or zwdt!wUSi*Xs-{ZEo$z);$Hqn{o@EX5)n+BKlwkz{3b75=1^nw z!$dV~e1c8G>7GFpxe_YoLkp+@d7EB+iZhdP$MX*ht_nXDm8vCX9tEMSXi|s1?MERB z+b-YDXJ=o#*Un!Ae{ogGu$*}{%l1}$6Ns?8r9mQa191^5byG3GW|ipXcWg!be}c@s zcDrPc!**Wj>kkhP$8<&@VH-5`^x+RLbzNm1wSF^!FYGj3Sb)tg57x{>X%WYc0sa<| z;oVEUJp*n1uj}47jPEv+)N+-8nqt2D!xs~s>!fQ2k5kI5bg+q*^lWUFD)0FoZ{*v~ zZxR}=Y}Rq`(&xNKj31>ElScbCSH&sOAOBoC-obkZn!(-mKl4Fl3d0gn(jmWn{e%~U zHjDTNT%gS`kfyk(s1#y!CVaFxFMiKDySXyi6`^Zx&J{*%ne3Nbo=z5}s;v~RhQsut zoeBZmmOqY+PtNnvob1Tz*+F@HYhgq#9S=}eYlqvbUcE#Yg~r-!ZLF@`|7vWyTd3p7 zO95TDl6p`;Lz~f&3oDnC?W0H^g|cOYF%V^#(gjNha%lyIda$D;08isWP|oOFj@uWO z2*GA82*fqZaiL@|TyeklC`pkqE_v|Y^h=5GXV2ceQIN+$rD0(S2Ijxs7M|b!q4@1E z*nmW5n z2`;B#Mb>A-;qQsRuYC2I5FDOyS;T)QiONf934R#%172{~qFfFdgb;MTJd~hD3PRdp z@$Pr+AG&fAOIN3L+U+9O+~1YFVZ^bWoxxnL`r?jeG>EYGv5ufZvxaw^=450yV!B2} zzq+4?f}8@zzKR0%C)RwJOF{7w!B|0wJ#_r=%h?a z($z#^4kwm(0O{VTr4Bg(m9D%2XiAnferSN9N z0p;ha`J9Nd7|1kiH1P35FYyeDosy`x$XTb%8Ta%p*z)?o+XwBYY@&hdg>1u$F&%`aZTCWv!Y0>=AE0TUZ$JH;08$m zuGIMy_VuOv7t1|zNTBBz?SveqFVBRQ-oG25^U|chlF@l5BP$zUUDL<8RKLmJ$l1`R zYgwQEnX+<;!Fp*iBZHBLm$y&gSM=!*?Th@xAu&4kHp0}Wuko2OI^dcbYB^aM?eRju zGvtOV2wA!DWv`4!?U$)hPV^Qv1x6Sc0xB6h%Cy|9zrW zhnB&ls>v$TjK$AS!(&oM1S?|?OX!ccTG0E22f!}QvI)L@Ysb;MFlmPPXJ2XLc@m63 z+G*V|{&5821)X)rgIMRxw%|v``kKf=@z{I&r>9{?pK1#s;-)0nxkgDEB4VelTi&(( zq_^T_e1}7h2g>4&UncI<969GBsf)R`T9~zE=~1r%8(c!@OPF>$J=GY;?iOs zi!TB*R0H|E+R%`vc9-A!)t#N4hj`xnL6hV)VThP|kl&d_+^OU>c>Y4>)#WK}D*kj5 z>N`)s=!cg5ZC?SBRD?MXR9p*e@RVJhTM!Ld2nwA&Q!g(%iM~mK^Z|;mcj0^6+8PX+mDCW+b!qTDf#@V1f4* z*8jPg&XeV$HbrEv7%*eD)k9MDjAEe{+YMMCrbxV@KF{ZS1(qhl%?Fg+=`uDHl+s-h zMVI~0+ayz3CO?;z!MwBnSi3jCS5eMC^f{V!4sz}u<}cHVx_%sg;o4Kc|7QzXTVdw7 zu;_B|>t~nE^>^@Sr~aZDw&rBTsu8>Pt%=)CyP0uQ6fuhPHF5mh}MCcS0}j($ZJKD!XCZ)h_n;G8+!g z7r;Ziax8;0Y$|Pd5LZX>(rXngkV9=IFthUd_yr0}7O=Jl;`;l}i~+;&p2P)*@$pXi zbh1ZVhh9Bvo!r&ueP9_Hv^p?lgw+jrdQ&V(<`%Xl^;j4mzZ6Y6Lo{mZ&yUm)K5XhpN6bGjL(*B=XrgA z+VPnbuhBBlJN{tmQ1E`m3z$k;6R@#3t8yG2L}6p~NZY%W(Io$7!%ZAV%y-w2Cdh6$ zE%nuQM6F%lT8=fD)v?Q~&EdS)=cl-gf+Nf8<0Zf_FJApzO61hg9nC3-N%<1qM^Ci|xl$rFPhmz9Qo6hrgeYNTnJFyE=_6f1TbrbwR@*s{es z*>87sFk=B_C?TNN-r^wNBFJOy)Qvq114b-=sfH$bZL2{;1KDdH(|Fvhr~3HB$WZet zb9!m)&F9Gebzxy19v)RgGm+K(6|QSJypj%;9Jkp9eJI?b(lhPP;$S1n7cYdGBOT2Y zzQf#dmZXo_lK?MEvCL-B!XmYxeU%DvMXUF$4=?BHDi18q7;;6=i~1!cd}ejO{+iE2xSPKFqZca>@`>a7u&Dui*!H)JK{W?ms|SL50GmT>bsk6YW6<<|9% zdSlr1QSwNVy{OCd$igpFbTtm1?Z2d!#}v`F z3P0wj<2!E7s1sdA^W9sC=OCK)*x#*G|z(BsJW!z!1 zP51o$W1%jUOH&M?p_GQ0>0gdoTsl93E#x!umqaN@-gIW(&g8WWpNM+_zuFk08n$;m zh0#f42U)&yl>cRgpMumtOSZ)+e|7|#PB4!bSX7$TuK6%wt<{nNr+V$JBr!eo*QV}= zGq`saC>#$Qo!&tzTKdKOe6Hvn*_VU0j4J%ix=%jrm?FqufgY)cn=>^?+0L|=Ci_`z z9wO39-ST2M+hH8a92c_o+n0tHM7!(hJC2`cfpuyOZ`5NNT8-?$b!vmDN~!PsSnOK8 zRyXZ)XU!sBZ#^!FrUtjY+0lasT%9t_Cru5@Vh zl?)#-zj2)EG{7=d&&wG2map!@tp4tUe7MoQ@F(Jsg=BYMEgG_Rs~2a1r_aBOuLUI? zuDi?lWk=lccRzqStAK*V15Jk2HO7=*S}(4(UTYRn=Gzoug_dEL&QT{lgH~-U7h5G8 z6`vIXgqUBSjG!NmT4r%3R;>-09;rT>tqZg@8yrckC>5h7Vw z^0xiP8C3{PDSfkQ#~&gaeqHn%&a{H7w!@dRxhN6xWOc$d4Zd475X4&D(?Z7Izn(v{ zxQWPjO?p|Mxm{5DaBg}-(Yg=yv^A+$>Y4ggUMnF0a&}QK8Idu)0Y5)~Iq4Kt++L6> zcDL>?2bzq8i-ap$e0>TEX#LQ1ba@#FS)gQ(2qHx|?QO|G{*3r&VmcJq3DM9>QSqA9 z65K+kamax-BAnx}y+%?xYG%sP!m`%Z$9v0(0`MhFW^;>v?WfgZ)9B8U%-A~axT$nE z$r4m|>>|Qr)Lb&2&DKmfiPlK-85z#fuJ>zwh0(#D!Fh&fXUPDT=x;_dEgCOB2Pg3U zAkb>#-RO?X>9MN<1OU+2a;OKJVVhpYI`pu#2>Rl)h(f^f;4oo?#=^_-LZ!v2YZ-bP zGG61|e{{wQ6_ynUJtC#)}6iG2xx7Q7zWiKw~F44Yv zMRZiqKr&J|E@`4PXW`klB{{n1Jo&KGwxHkT%jHgU0L@a_>T*NQi$!6e?P!hf9MWsj z0$jWZ`J_!8R;_k6ane~J?A3*CDy9LtubgMo2}&%@=YJq7y#h=PPqv>b+Lwr(+=OH@*YJC;sx5!M1 z{ocX}d7tL7N&A*>LtpKS(Qn7?l#fFJP_^6MZ1z*la<}l+p=o@HUtV@NJ^56pwXw`) zW!(#NbgapbBk*aez9Y;vcsMZOItZKje``XCP@iP}{%%6vBM%X`wH!FbBk1u^_E^PB z{J0ec#+$a%2EPNXohhCj@ThBg$<~(Hvn;Y(IKDfe`Ve@zOz13dHv<)p66u|o{kY~0 z@ayd9DGEk21@L3P`Zm;sx$gWnGcs~FTYgy&vNzn*|Jtm{T0jR>_*b=PjWn3Y`}Zq) zBzDd}HSU)Qp_rOFDl{K+I-|K!wKlrlgv#gDMHq}@4q*E4NB-qmlmoFqftd&BS5~5U zz;=`E>;C3}=P(;eUJ4R~!b0TI;?QBWA*hiaHa=+jtg(^(gL;AWH{U2U$>fF&qYvI` z&q|IK^l>bpyAm|9#mm!spWd8kc3VIMnJ@G5Dr_b5_0n5m|zWOj%Wv7VZ3Y&^Mo?)CQj@jh5nqRK!Qv${n+krLVZ;OY&F z-#V=2`o}M%8>z_=K8*nyYA1&T>#-v2k43DUS332>$|M1BuJRguJg~+tY@~3#_BeRh z``Wl>i>Ua{)%zD$#BHr1a96p|*(tvZJkvTO zI%5~Ek10&Y!?h#!X?8cZ$Wg(Axr6S#$10$V_My^JaYlZ@Mi8@9YCqCcPtB#II?57+ zH0@i7Hs2{zUYvV1b?Ag-e11m>e`^5#-a=6N_QHVS)g3*&?hUu@9SHhANlhQyWt)YK)gJqoIRGD{=KDJkh1;;b zy_&609pQ>Q(XaAw^K3Lot}jnNR$l4c@T*Cd+jug{2OV^hROKmX{<(ar=U%P%NG*kH z6{Z1lUcjUuI~s99=IH)US)I@%GiKj=ZP!7n_R6ZGU?XpjzZB~zRW`gb#jR0tMeg{l zBsZ@)&C?Hb*$vTZyo<~6q58_Z_-M(Xd&dDhHut=Ia|&Za%eCZnt)=O~Zj23@shzZ# z6Qj0)^BzpL@NZ?mHFb9zTH`nd>}A=~+m3_fF;A8Z2frf5s>T z&RTN0sw=0t3TWVEOzHEsta)g0g`8SE+Fjk@Isfj#mEe_Q;8f_13Nsbwoh)d%&l4z% zLkm5M7!zOK4t<#~vv(rWVri;X7q6UWiY9XF+W`nO{%yT5HgPYmLQ8(~+N{A2vL}Bo zaycSoiQ;*f3I+T$)O;?p8yXub)P5S|6=G#=u_(C`*gNx|f0_RAyQ;<|sFsSZikIO- z^~t_dkwtdz{c6%jAXB+o?JT~{S}9AFPd5@@!QlMa+_?dgMPRHVF~EPrbpzrOLm3U1 zCU;~5J*kmq#^$E}?GsO`$_otIg1)cTQ_dJU3*{h2wlv87=^x2#sM_v#?!XBdSQ&(} z2WIDDaS{KsxV21Q`!y>#A*7|q($Bf6@M&WW)A-TUNVTvY^ zJdeD0=YzD}%fy5fh86%U2L`}7iGqT)>0C1p)m5G`v8)Z~y%^m%7xx<%;yZio!&|&0 znS*+BHn}Ar)xT>Lm-?l`dxoDZfB`f_&?xJW!m26&qP;V&{Gc13_U&sieZaGe1-|ob z0_9H79-q`s%`z4~`=_UN7ScX+Q&qbiulsL5?X7DQC@t|G;?50;a{Hd(=>Z zJNRKL;4_hR3vkTe16mJ%5WJ!2RC@}eNBl(*gnbNiL3)E2?;;g|ga5gU1Wtwq-Ejq}dd^_mGCjTFYrKgfjBJ*J~uvSCh$Bwtc-3|&Bl>KX2 zgwbSU?SSyR`=CxDx+{%r&!RpZxtuwpUkBO#b#NwD_m6Esd|$Kl1NCukrz zq&RcUq5XD5{$1?_y_qZRF~8l-KMDjYg5+nejcMK3ZK`GSOx-o$UkcH>$#^UV>4E?U z6Csv1>NdW13Xuww5h9t48)I)<9Cwn}*~-J6$C*BO8BVz9(4VY~lslBWoE&)@27&wI zHTLeNdHg-Zj;xL4ejvlk;PmHwfErvSc74o75-&cd9E$f^mk$Q-Bok_w;ZLqB6s?zk zMOI2stcQ%{+$xQaWSysotGeuNwDK^3v1VAAx?q%xnAZkpvc7PAB~Ci9{O>)Q+x z*5^E&q#sJe{2S1kLuM|(BrVJ@#Z!6@c5{Zi{U#Z2?JywG|2tZG@MpiESLvBS0V~gr zDh*lzK&9B{EZlBkd%mQtz7QR}oB0DZ+3RAYf#g3*h7ZSSCC(^A7Vdj{Zy3hb{u#qb zi|41h*D6IWuzziQs>G+~)_8co-O=%#WhW@E%n-k>r={j4^|>AMF#H@bf4Y{n&-f$|-F3 z(MnvgWgyj8!9}U2#u)2D1Bfb#)os z=H4|hjHPF1TZqU0@(YO#;584475OlqdS-H00;e^)_J?SY7&!>{Coz0Thr5L z?YJOH!%8E~!?_TQ5@+tyk~^+H?D<>ICXg3I9SilpH@8EZ{JTP;tvY)ARzZ@ zuc)Hjq-=%)yGB6dIQ6-$ukYz$8n~L)M2o0tuXucXoR6D3Qt20YLyJRT*+hRu)3NbW zBfridEQF$B2TI26Jt>k36rgHoSljDDa?;u1gq|)_acEI5)wgNJ|G}@KbS8DgKkb2u z!2J_OR@Q>tFJBn$zXjaq`}=%vx1&s|Ty1<6)gf*9mg(6NTJPG+20k3iW~=pf$f`z+ z67Xsci!nv^5TSt`oSh9@7WdSISJHupoTA@XSk15FeK6wU;&2W9FU~*s8ga^|q||Rl zQ1Kl$OLC(HqcqRpkR<%XiVy3dmfaB~0R=#`zI1j;k8fu))Vn2R7=qj%Zx;kR_34Ih z6Z#mhJni!MPnq~^LR_Riitz=Kcd3rH^zcuSQw<%*7Cco|OP!}&4a3cS$S+`Rd~|No zOm&&N2-f7~b?1$&UYR#iS6Bb_DeXCss7O~jp|cc2l7`Q`ACE%IUuZmqVUnHUTzeC1 zJn7nq@sUD`pZ~X%BdyOX)03GqGp6FQGQm+?xt?gv$VnQxn1xvU0?s9^YLAeuDQRgO zsLc}TK%mpo(o!EKF`EUgURyBMKbqV#{}_JFh=t%_bVWWE6l4k1IbGAaDdyytivFg_ zh!6diSopvM4cmsOo&fr7V*96!@wqTS4%RKuIzvle!kAE~GCVN!HcS}{LepU6K@F;f z!5pyx0*}>71EW(JIYkH=2#9Fpu-xA7FxxD$Zt)3g1?|)P+OI@(tsG}DhP^o;o#Nmn zvet3t6M9XGMVlIv8a;!F$ete!3<)}zpg*hgc*4s%={EDyH2ij^Jci!|0qp}^*QpQT zUDdSYOX3P23%CP^PA1FwjvA3agSs`y=cxcn$)Cl@+|wIFBmnv8 zWa=!Wua=){gsG~}{osKcxpvY1ZcHTk{guzmPHJ!!G7@!iIqug*IB1W)pe-JDL}mOj zZ)x0EH;kM==)L?@C0Mmlw7HxueL@N>l95B#wBae|8*@QQ-fGXFR2-!I-&w;VKieA0 zH`UaAx%b(kmbCtjdskpJh^bU?V&*=VVf)DP1C(~wu#cPFIlSMwxzeM$@jn>;vicyZ z9ZE{3BzFJ4cPD?*(r-!@Q$7p-iy$#G<8UI(QZ}h=31Tqelh`iMD_Yu&Ltm*!u3u-> zp1L%!S3Jo`;)ae>xrwH3c)d5&CE4eFsy6&HeeaK;#y5|>HjK^fZHA!YFV%st8{Hdg z*Tb&5FIj*3V)yoNty*DEhkM3-{N1z>N;vo`+NfNACNzJ1l zyAz79F{tRv-w*r#{fdz6GseazDDDT4RTQjH!A-))qMjw|`9~^sSC!T}e8rR6dF-No zLM7_9`U6Tuic!!jD;_7uZ`@|j(_oJj(JoN@@Z~RAs)AL@;@jNlF*Zv#rGk3DHm_%l zR4VuvUt#xkJbe1=d}Lri?XCj?K)+giugslye$2N;0;xt6k85jB$_JWe{qV1P#CXn` z(jU1!`3AF#ddIgR3YO5*iK@6-btij3wWJCFipN*$O5z|OlZV`(eu9qY)*crLJ4ccn zlYX}Es|AzMM1VzAAW7&>>@Z0{cMRa(u_MgMxdI^zR9WRbf?O(72s*Mg2@=$9AhAeL z5H{qpJxwgHABRX%wp-gYXp_UBw$dnJO8?`=D9;M&c0C)HlZn8mqweGcC7ra7jU5`Q zr8u`yp+!}NG|Na(T=g4XMp`ae{>hSTR*BD2*Bs>Lex%&&#GT(1D%Je<%4C}2=pYd0 zz?nHkP)KutAN~A!)vvP_|C*~6wP({|VF@8To(3Mom5lGTIZOrs;<2;Rv|6rsJ=Lf& z!Cj|D2B!Dnn8lrI)?_oZ0*hieh?dDiUeLyjpxc<|cLuRdb6oXC` zfQGqHX}K>i`;AMmqQPn91X#AVD3X=kQ8>EmuSbQYsDN;Q2fSPs$8scE1rZ z5kt*spSeAUv+YDICWfSxpsyt3iQysCTFs-bKO$V-yos+K0E>`!ZLc?!Nx6R#`&3BB zk%DQ>q+^Qjlz;^Hw^#}bNPNX%T3`iHQuJc*H_Tw|o5VoA@b9JgnX=~{-^gtJu8TvD z-;ktX`*YHQyG=0OLXQEyac;mz2*}o-h@7vp%N}>@XYxuKY2GRBom9Y3ey7Jn9B9BT zDDTsA=dlDn`c1MgLh5LJ^}0FSn;wY8`+oFVJ8w2?X3m2?!C0qRCw$i{}oPD|$wt&Ja1 zZ|x1jnlOWcu1u)j3S2FA;v1rxa`lloi*ld7_{P9OgLQa#nMW78GTjD|mXKhZsCGuU z+i}T1^~FIHX}(s7^bJ^7V_{(_$2HehQ+n6-2lJ|MSSb4X&mYH!?f~~k`#p;vU20|` zfd*9T8orml9T55c*{q!(P8&4;n$~+$x#B1pznaAG(l$i(O#D+e*5VsS1UPRKP=Fko zTgQR?(>euU5eXN8vlaC&t_`iuw07&Xm8o)!0h2K0(LwyW&4gkYd4jlEDR`<)X%1`~ z0{XpJ(Hh#(;&_nK@rB$Tl@_;sFrS#)KxVg*=rb{N@N!uzN1ANG?L&kf)_cTYiRmKj zk^JR@=ArkwHID+9lnc?G!&B0H!wt;m-R&G4sM6X2Q?FAuy>BHOC|30P;Np5@@^~Yp z64g!gnMd%qQ_1S}^>h`5c3(~I$N(!_>AeX3g!k#>Ov?(CP>(MDO67jmho=h-E7(MiB}PxB%&^%=58-%>tIz z2Juf{znb{T7htP2`nWxLC$~p)H~IgSszbW14bOH%6A65@`c!exGx$@JJoo;uHgqNp z+*k4XqBIQ73qEo$bgAho#F?za<-Kc6M-7dQD%9$l8dr2Rs+NQOe0Z*{5T~h%1xr0Q zc>obQ-%3=p_K|!KHV=qJ9IvIl{ox*DY~{T$mq9s&n}wNhzU79gga-s}N~IPx8e?pG zvzXt%I3W?E9@JiZ`8o)C<{Gc{q=Eb(NyciEqQSREL~gz9ltY{QEPQyxxs zAEF`J!9gBEG}w-?2CAu4885F*C;>U+mgAcm%)SV>%ZFH!nF(nG7s4Xq2y~lw0(HA< zmcPH;DbtZd zdPi)3d?6WEQXkD(>xvc#=(>?}d3JMc@HLW+X3I_OgRs&NTW3!SBitE#5kZttA+@yX+3G_%mvc5w^Ua>4tyoZeZ=NIv)^f^{_Vcm*q5C+ z!zbH=5&`6`Y`gZOw1vY~jf?0^e2gE1HIn$~oocsyJ63@NL_jQ5(>tpLd}-Xo0O!Vr z-8_$xHtQ%Ep%n2|#LBns#J%FStq>%YT8c%*-L{eG%RLN4pW(L_H)nil(RqN)m) zlFy>A9u3WXVk+-cxSaws0VxzX!jvDum^9@poWx>XIToWcVZ&sG2E<6=;394ZjaYub zixrfb9Aa)b4^qx<7FGg1#?% zh@4$EJV_93L2g&}v6?u0AMg;r2Q$gUAc2+nt9|1oyx7ZFZip$WeuPv^Ex4@t)mj|a zJ$8=lDi06a=dbhLzT|f&F;gUA&+)!o$q9fkV1D)h4_~vXYijQE+I&sX>T{)t8fI7} zi?I-V>%*~hwcv&Nikr29h)=A5rlL5XUSytMSh%L%AKy~!Y=u)x^XetVhY%nnQvOae z0IhgMkBeA&07@!AI+qj(i)?EYjEOV@*|EH+#=L_eb1jhh8lQQ{sMqmkq*{oB3tGAv zt5H+Hu*|zjS`h=?=#0hD|&of$1dVX#Pvzye>ZBAZU2HsbFpzF6exx3aL z@otZ$BGyvtevjh}6%bmf$Y~_TvOueRvE247c>|O^gt2w7IvDkNw0&SDv)yKPCFI=% z)oU%HnZH`#=^acnDK~;Ks1`cRh6X<%$XuOm6+$YBWWU8(+X3j3V%@Q*72+ODiY7 znvf_bDprMQD3YAM@ino(agH`$7uzUuWO~~sVO2uFdxW!-+2DBS_521lExXXVSPR#O zm){-`q`5yXCeP!YnsL0VdDl6sTJD0SLC%eg*6u%pVn z+8|l%WxQNtV-u9;ktq{||D_Q*y9&LUvwG;O7Au4h%6)-zb z*ZYR0F1}|sYv>69v*g4kov6BT#`7feQ;PRK%POCdZ|;rSZCfq}=Vlv4?uVn&U6bWA z=PCd*O1mxpb&L@=g(xPwk{Y&>wxTIX)|f~!VU0$0{WK9KlIk6=<3{?1q?g!9;@c-< z-w#T+#7|bsWrzGw|5}9JL??kR1@5lwMKQcVUmDq|vu`|Mvmegln`P>H$|xx_eep-9 zG)aGa{d-GAkk`J6`s0BMiZ$rcy^|Isn{F-k72%)n4#W93vi^%Ns?LI*qDszWrO#dH z`nQW;vLuvDPPUG%Gunu(zh`Uhg3;1iR&BnzSgQb{YW}YNMheuOJk1gj-_<;$l|4lM z+POZwc1JTyMtA0s>m_7QKz#P&Dl#ITfssc1xf$(f@sjj`z~jQR^Vp1p+88$m`r7Bk ztLVn%)LI%E`Q$Gxgdy&iWEZiSE76 z6+N64bSxD&$?is*BaIC>cm7IPmigiB6I@YVHaKj^!el>HC^nIK!oxOQUMgh0Fk|*` z;+SkGuT{A>PT!;3w_|$Q13f0DJGdHIa~@$>F|Rn{T4SO!bu;9Hn45~qsj3-Q&lUsg z^wGwG9Uae*t9DWn|Engz1Mqhr{3DV1Ukt!}fN%XjT^cBtR>N@#C>Ws+a9Sw1)mb~k z3OBm!5=YEaw}U`HQMbRm%eVgf``^Uww^IF`5&gG>xAoqgyz@W&{T*<;-X2D73J2G+L z5}(V~$*94!uNEg?tnAy~-Lzx6f7*3o?m)Q335gE zG@#6f*GJ_q295`2P!&gsY$p4nsE=nP6$Y3tUQ`4elaT0uxS)cV$H5Pv0Ra~_6SK6m z6?)f2TRT;a4gCC69{y2WP^H^!-Pt!Ec@HfK~T> zSz^SC=KK)><3uT1V*w(I-OiX)HMNH=;mbqjh4`EPacm{Z?yiu~o~~6pwNMS_RUV1h zq!Y{5xn9N4Ajo>5W%q~1S3ZXvp_mIqvKFjE9OPI=o8N{4&plV3*&9mtI8I&m{MoFT zhCG7S*4Cyz%TK!iBG-tX<*#IAW$A%l1${H>u=uBdv*l=PxQd)HVRsHM;Ee-{ zd=82L72piTrRZXam{u@|D@a#dk1ka!w7@U2mq{ki~?}Y$*{XQP} zIKInuj4Gjmh){;DZ-qUf&-9C4dRXvh`0AvCpY`L6?}9p@oYrtL!AGm?=6-RSztlrD zN_JypujtzJK!R`Is*nQ({k7%7T?0Zjf_HqSr2-GgLkI0jab&VHf{&bH*2PzP+{c-q z8L-znjfDc)Sk&z-wZG3lraW-e>f3@gUf7clN;Z~{NGIj3aryaMl60kGSx+t0JXjIzQA{r9 zU*t~U*QyHY7EKK9?R5On-9wJZVS*YT^iBJ^@RApfbhidP}(COY`1&#Y`>ZS z9%?-FH)LeGJ#HYamBFt}aH_uBa&AY%T`KMcHKdYj^bKlw8ZeZ0>13(bm{ zX1>3BeeLY=P=)j@d`Of=<0QZXV%5QC-e5*FN8dbIJstoCU-F0g2Pw3fD=IE?ks{(5 zhm~q~+m)?ForYR>DADYHD7$8Bw>7vB&^i4!YY zB#5!WrMcwsarb3odCU2oVtBz1>egOP8=KMf`pC%YQ244iPj+`-yqF>jhRC0zfkTy~ zL{IzU!^0rLKRs)Me3XdGX^q!Xu5xzvx$tZ5Py%SJVV>yqkKMtKdV7Igg*f1pyw1+f z&z}>PS<`{2(7$`^EVcP!D{395o&TEJNeNlqm-G60NsSI?xe+?Y1H8@G;UX^RMx6cI zl}gVp4chLXWW zJ^FB;0TV%2@cE`q*`q@}@rxiMNLN8(BWxPsg6d5UhiGM_Lk5yQCB^wj!x^aST|-<# zX7f-aiGjR~e)X>ig|jx40OyU`o-#))AjAj=Hc=IT^S%s;{r|Z73aBW%u5CIMP>_@m z5D*YlVCWJ7krE}OhpwSv=x!-#X#pvzLAnR&mL9shYiRgy^nIT9{rKSs*7k z-_B~$RA{K|))tlF=>L4ZvA2Ab8R@hj7Q*bkyr@KBm=iyoN%|J~UDB)H#3Y*W5fFZz z4UKXS**f-+pC>CB;k70(xa#WNZn_Uk?#n@~bOq9y>N#IH9^i!3q#ZCW|cE@qQ^)C0^w}2KeVEl@7A{sTRrzwr(u(n$Y_1!~r_V7&3RGnzor z6fW|S_t6TbPTBYAM~r)6wA=OPt64>{o|Jk@!uUvB?^158L!h z&ky&v7Zxo3r>9I0aIG3vy9G(%A-{P{`7$Z0{`&Z0p2z3SG{4^+iz8QTk5Z zahEh1$13`EK!D_bJ1o5kB{`ri!b^q{+_A_*Veje?_)(XgpuMxXIsE~+gVIR^Zrmp< z`z3S@9Ret-Md%|2nV56yg|wm_BG4e4DkiDZoWiEUV)`?c;Sf9XTE|U+$<(@2^9qgl zc?DyUue-aMU-lJ6Z(pakOO?9=GOOgYxdUuG9=TPRh;eCaL)>B;5F7-*5k&=konm9- z9MX<{IROHJBEMiDgz9T*_+RjmL_UvWyLag5;CVx_1{;n%YCGwU^HKHIE>@<8&rVG- z?B_g03jXco{Q0~2;e-kj6%w5KBmm)tQq|Q(xt}T*%<2`XnmmtgSp@9JA}}Pf%k!k1 z$tQH4Kk{Rda{ub1b6u~VxsuaUv#VOUOKNYJN3lgZcv2JHr@$A-8lml zg%UV(Zr^%Y0UUz=zJUwJU;yp`L#PY*{FFvn`D70SZXK@jI zFpPS@WvnXwhGZ(=zJLBC9`{LtT)N`G6~W#SWX=<4?jLm9zYPS$iRk`75&ybI|8w_xGdU^t4F| z)jqT^k041<_SeM8Ev*0Xbf~hn($n64s~`2RM~#ItzjuNp3@qL0!L^v~-F6p9y_qN* z;nkg*nj&;&PL$)|c<$90{MO=fbkuRbf7k-G$;QcRZd~;EoSrE+r$uDv&nJ1CAp5VY zKGR2JdBr}#GQhY(ee{Niv6TJ^9T5r9^FvciOa`R7(+a1vc0O?aL~aM@?rv)0^SVT?Ig0<#e8ea*D@E^9 z3YFKK2g*bP@To@5o{*tG}TDxfEU@|R^nr;VmL*svEyne|;h!O!LcuVq2;%dX+s zStVVs9~CDNs6oWcTQktHh@P^tRd>GUA5#};uA!rp7)S}}WSu(sYiRp=O?n@duyKmp zTNx*w|69ujBLky2RgaPCtDl^*6nYrImqWiA8}*ff(9?vBl8m;?ha6LkS}JPo_D=ng zj~kr!(m-wsH~#q@tKITR$NMg`^aWA8kU~5Qr`sD33abF5#+I5WiI2zWuJ$m#hK8na z!#9adrWnRab8Qt6&5&xonhuqI-`e{g={;IYiyf;#%b{>h!GD-mK${;RI4D&M3N9rs z@y+k4(I~fQQmTx+&CE39hGmxup+}nISP!ImP+EFjhc9RxDp6;oOU${;`nc>Ip< zH{bwftizTLAL-t81O}7^M*XNLsPO5I;o<8AGNwnv_Ua3{c=H5mwb0OKJUrKrmU+2T zB33z{v8%3b42NqdY7QL#PS{en7T||o`KP#=+MoQ^ZOG{@`*GeCb22QNm>q-7Jfes> z^6nICt^5lp!ZZD3g=%tYDuIIymoqm-v&u?+G{0y@@3EcTnsm>n`xY;u)2%eQIP3VD z4MblJ8oUB4LUZP8_rlW+ACLTU{rJJ(=`5d2*OlM?qBl!=>*1s+rmyrCeQrVGee==% zC6gN=ss=sib+zXBjwl#xqr7MQ1Khm$T6^3fayKPXBrW5Y9&gd7^YGb~Vyzl;o@$P$ z{iPw~f@IwBp5%Gx%=Itf+8Ohf%tfR2C)VPg4=K?QO&v$yz%oC}}SL zVG`YOm;9bEhR16#XsK2Q>bn!SM_i3RLT~tnyiC3Ez~d=*TJGG-VWXr9j~~23N8iWz zJv8*Q0crwh$F%3!USr+sPD#_QEY8fx7-a0~OTJYe-~O7e_CkNT4qcZTBq&%_pSk%f zAe1Q;Qc&F2-QU-x4Mv&ICr0QGuNI9FBZpRcnxF8_y~@W8?b5_Pf0Z-nUNlsATk=q3 z*eFWHvv1N-)ibYj#(Qt7%G#pCYQW8RmHJ0m$i7O~p{Y+mC|>y|S#If{LN@J#y&*A7 zbuU}TD!8`>9U{XV%V9Zaj49=6r5Uf}RFW$(tfPf@<1OjSD=PBG(hi5(`}%n0)O;y$ z^(O1~m{&egQ~t26u^Bj*O(-cpE}J@x?-7M=SI6aZeTw@zh6{^+Q^G_~9yH=A&+Ago zfV{m5eK32eHTS-$U2bzi*&G|-WA**fwuv+{1XUf$R(&qaKQWpd_f_%M@N$&Dp0q$**Cz!D;~ zgyG8!9a6QAi!I)dP3S+xfvB*4I-N=bnrAV`Ydt|KL6|EG?kJ%?(nm!Bh`WC- z7sbG$b!eKU<2MHkPO$*Pszkfan9ilUyYZ;1Ua+5L=9PUf;z0byZg9o`SNS&Q4bhl_Dlibyi{YB2HQCnZPmt$kn z=j6_UrWxv~DJ>kK9t{YPsM-mRXngC8A*qH44ALWn<4`7>dTarBMJo>qD}OEKS7Zua5Rxb6fDVi{JY-Kv#J ztv8?0$T2XC@X?$x#fvHB+Pw3$d{J%HRq65H#`a`p64@vr7=x+uU!~@*XIgBgOw3NG zvz{F-JgOcAY8isU}wl(+kNwuIs2mjw?DB$l4}DjLL^eeZoT z_#~TvQ%DsJajGWtfu`_u;6q&$WjK>Vdn~iuuOkkJ=RCfR4ff1!axF#Y*5(3(F07?% zf=(BNx9AjvP9wCmtQ^IJvNFmUxnNa z6_%;c-bUvn#7q=3QK0&%@1!mAJKod|;lg+Dq`u($J!oScfZeICQs_z66fo~R+T(;)z= zuYN`d6BJZTVq)s=+rMFic}^ZDt~xIB2NjYVDXh(!(JW-z!qT*WND0MMT5L}QezS*{6eVWB6=Wge`oUA z)#e8QjEqAgrHj3<@9YPMNb%`Kg6>a72G=-h2;`;zC$f;GC4o%bz<~;#LKY}*Z{U-4dvk;3)N*k(~@shPN+Na)t=7{xSTj!INHiAneEzWg_ zFy3)5)oJgMp^r8NIAO1on~EBKRf^(rJps5UxL|y6&p+cF4IyzK>_$kL@ua!S9|oB& z=O5Ev<-=w42*&Mr>7GAZAbDSC@rBLu(-Kl0l}VV)30#mJ489<_^aq@s=N;foV?-#;&;Q15Ru=F8D~I9QEspCbm5 z1alWRAu$i`SgKxa4UvrZ)p-z3p~_x+Vd={&JKLM^l7{;(#`F*?LHCOEVt1r z*MJqD=hd4Fn)Thro3UB+o;1^JGo!5}hCP}-Gzd$lD0Iiu?`DZvLH8K(b4L(r$M`mS z1dcNGh2L}fWju?=V4V~G9C{A!53=8MD9F)*@u566wHsp%b$=4xAqwRAuM08>FSNQO zyfN~9K2%6Ld&6R*6s$mR%FN@@j}YGuAvo*t@iDdu79NZu&MVu!noF`;78QhW6stSt zi}Xrv%dubM{2FRC`?K#J^ZpF__K-|!lTCBj)3Yg?)A!;D93JuX+edl_yjIhN*sy2n z!baw08v{(<2n{bN;N+np&=CJjjzkpXdvwII8n;zW%id#Lg=&3Vly^|%Hb23lQePb$ zJeu*XvJ$#f{t6j14h`_F?|H=60G%nz#;ghHXDl$;FguFN2pI{HdDGBr8gB6rY8AM& z*Dv>(i3wwn$W1m+}ZB`ki@ms2TA1GWc&8Tr%I&-X*E(R8>_ zraDyIY<~?^P&}&;jw<3nYg{;1&ggFqnx$kAn?2n2#-sTm2FcBsM=SU?P*Kbqd=7=TK@9$_WTjicoyna+yS7(6&IV?!- z2f&AKi|&?-XvTl8aAZD6m zSt?FSPR^ZEF@3R+{wjh|KkFF_IjgA{N^+A2wsnDONf!!4DDVonTS?Q>wfLQUDo#8c zM$8}Cz=e`be%$+s*fpBW%9IPQX>%=sGYGFX5~Xf+o6(FeWc{Q1R@oYd|4nS?K^&W! ze5Kdn=Z+lC#&sl0@+9I}_<84G%tJ?=o8=#89s9arDPuypINwj}ORGr0x5v#vi?k`& z#4YjPZ@IkXb4cmG9vQlRK}q(*0-M;`#wZG+!zN7EH_WiWYh<`OZpeDGZb(Oo6)ZEP zw^$o3kUD(%HRp^7P8eY0uAzn8?axC{x=Z$4{;09Nd_Kr-dZYb3^HeBlirL z;z}t8_dR7%UD{lZ60B4h69&XY3A{uc5XZ{~rWYeAslXDCa>p+G^8ywl46qyL55i*Y zCQk-&);AzR8nBb2cHwT`uOhSR!k2rNjPp#x9rNdzF-gh`kFZ_6!(+-XBn=w7!$i)P zhlffT-7pwqtd5&dU@n7O{R%a)*(o17!D{0j&sbgC3tcjNkvpQ~ml8j_uy1x|CXp#Y zBXhWKv6oRmQt8S)F^mh6`}Iz9=>TSA&Rs%eU)gL5ecM#*G%c;EW64m&dG|d0;Ow;V zmf=^cA8^BxjFL>NPNh8qM@hC;igBt1!TZsT(Tz|GIwG6)*klNG*w5VqAf(}sP(c&9 z9_m(+>DM~#n<$rnDph`~oTImVo5S#O!muINAK-C{ zwKmF~y#H*20R8cgjUK{jd{L4MM_f#%Q=~OJxg)mv1Wz~|94XS7q!!;q0Hy%H5{mw9 zyZ9o_&9hceA9ai%cegy3K)wUtj8B;9`9pq|cP6473(J@+iCBb?c=OrcfG8kR6 z9+xuniP#8#zvMdl`M}qa&Ymvc+j$#2TiSo8cj_jswaQVH?24g+y14Q+H~O%r9}7?F z0PE05RUTEU@7Is-_BVOHKa1=8^1)5tg)N|178Ob8c^31wBnDX3=|M`}lUn-6h_2EQ zPFg}3?kZE@0NfeH9WrxpSOCoZb}R&jtuJ#{6eeY3r(#;y=JXi~)uBf1d02K&e9@Hp z>prL z*+g;M`1$B2@zxnJz}J8CYi{SNA*k^|@8yTZM<0XBq`OU+GrbksKYd-1*aVg~>znsE z;mM%nt?1pl#SWhzy!C{AS>PzzF!O=CD>almX6m08>h=>Hn9>iLRP4O7O5b5M-I;yI zo5SLkfKbvPUrKK8lr`UT%ljWp3ug~J&B|%EAEGp*=Xb@ z;-$9|A%tBfk1z^EMUN zVR!vZ_H~;AjkP;@Sp=0ITU7;hmQ4jZVkOpft)QeW2gp>Bm=jlI0jRQ#(SW&N#|502 zCfG93lvLmWOLd~3=I-7*!|kvaY&Qa@NaPq6!lkl%u_Jb(INBw^m76A5HQ%UZw)Hbg z@?2D}R11r$hi76ED&k!2a{|-THW>VL5Em+Rebnts~uNS=6+aeN7@po5%C;oMb+5_kHOiolCla z+MjUbghE&(sPtmnpG8l7iuGsuhGe*1KeOIZRr;sbLStKLJeL*myrdoNh{r{;@sIGf zwNu@Cv!T0_F{uKtUMyF77Y-;yJzm8`Zg81coURc7mELB;hurqbjyZOU?<97kO^wI) zbN2Sm{M3CR_N!9JUbL{zrre2Lv}G8OX`HL^Qaw<0nY%K6<6g8EAI$>uxP^4%2RvTJ zcJrau3{0NEI$wDyvvUyp0MUp?FfqH@4iFGc>W^Pp{+QEjjCN$85ssB8MFmns1(#6m zv1N0G#)@Kp$m{bSdnzmOVAKS_Iz9Xk))T1N%crmg;itxL7zdr&KoiXVh#^d*W|8%H z|JA;Xq(2|NM6uJHAjgQmU4Y>ic8{v?WwXtIr1gvk2n=6KjYvPIV>Cg9ZvQuZdRXse zxx_Sj6OI{{#;?U=4)M^ahUG{`M<7kEC+5M*X{kiGU)-o0tSfoUx4nCqyB10HudLUEA%TRgupWd0z7!Sg!r4CyOHT!r!8|b0l z#7pyuLq8DPll!Dfp8Vn*ufomVhDj-0u3#uKH7P~^S~5mq8$BZ9Xzc?Wn;B#HSrE-C zgvsdO+v@g@QHx2$7Iyc<+g$;P;^5*rT=fghq9?EiRiK;=y1q^v z2v`2`65NOObt+Ly_D6;Vg{4V(*|uYBNtQ9SXSu(Pg1KJ_J{h9R;5Rq>h=jx3S0*9k zyZn3#X%{?#mxv`mZv))Hx)O+quUUqgASLa0CpFlGHpT0n$bFvAV=;bIYwH<~zwH;~ z+TD1NuR=2vEMGTFSowQ&-FY#2@C!Q)PmO53W`p~=T`e3*kha!K8A9#P*_X0YGB9!V zTEKBk0bCaKpkhz+9Gu!WIjX|^W0tkkiN553zvP9q>d5~$v>><;bbjF+PX_Gsz74cw zw2|{N!xju|eT0cdZje?LWQMdunH1sg{Sl)wJyx{4$jcE&Lh?t2 z#jpZoamJvM*tYqZK+hzE5j2KoE(3GZZHYaT^#f=XvP_xS__W$ekFp8D5$n#e0j432N!5C_a4Q!By&HB?5Fg8&<*Uy_62*p3$l8f;bxL%~B6`7xHTamI zlBo|V1jZUHu<((!569w#9`6rcz0ukdU;KKm`$o@|nDZ4Uv7VgQd!3`pXT02kPhLa7 zF3zWERuQkt>hxD?<*o^+g-N@+KKsu(4qRvWV_I``729u)SoaYlJUpa*{mLN&I&*i~>u*4I!@tSS|67 zqUiwHU-Ev+FHe-{*Ktr>P+aCJ?*@(kFe|^~(|2&fd)YJTe3}VlPs>a^+Yxlgu4>|>9^=rP|f+jk)(gs{I>8Z7Lz?8kyf(3p7 zGs8d!93!%LM3YzEwS^J=zFzqVi4O8WL~(lU3^N4=4Ll$SHTYd>e2)Vp%l+i)v)qZ% z6JXOYkc0A-!pJQd-Kyol_AvpE*qyew4Z~h`OMwk$L@aqJmzz>Q*N~2ldpj7HDXBEkX5}!rn;@@8 zJ}Jy4&&p8CE>{p9l@(#6l$T-fA}hMm{jwR}?2^8E)^CW}a@wNa+UuNh`nK1`bMHuK zs&da3<^AUfwe)?Ft+)h1f0v8n#7fCcw$esLbK;C7^tMO~^=i{nbQEutyT}k7oC)6l zVHlDeHpw7T)XVGiA}RE$dPk8*)-c!$4{VhId&pU4%6s z6-uNBkS8b7kp(W*7vUW#xw*U>%O#kIcZ(J!BW_K+Kj=)hu)H5;4P0#NHKFril%eNV zuqi)U{$b&DvzUEtl_N9-Kz#qG)ewP+!O|b{l}B_1mVbNjCRlXz&y+QO;9+^ftXm5S zU@gPPgFWKIlxhOO$1LA)E+gAfIXKx)(YC)ni+j0NdR<%8-#@-k@SA^>oNM`YK4Zf- zl)BDQ)W91~Uah(cs}@zxV_}y~cUGQNyNw4YRO9$im0hQr5lJ!ajvbdD7X1<79!?KQ z!CO)4=-VM7GF0HLIdkjc4~L1?^S|0;%{{kJzWL{QC^Sa8@mxc3azcbKcit&$(gEOS z)VrXjn76#Qa186~*PF1lKHYB`<|bj0kz3C$T}Q|W4Z(>D9F~TCN-Z`vj|;2pi?8w92oh z>uNOvsvxvc?IOZyoX%CCrvFx#S`D>c->A*HAn}p!Y^{gdb-OlAR#w&~!w!(gMOcMn ze2dqb7c!|o(DcK-B9(xpLT$6&e`+P8CT!Dc$Ed(nR@752Rm8rZ5EgEDi#?N~($sPI znFp`PaX_gVT4^&4ypKdioKW#jN#<8x_RmN?imZKYYUN~L5F_K*{M`U0& zYIbKw6qIj_>z7}{)u@_Np_kyLbB)w%FOlKMrYd+c0DOjP~M!mxy?&w-iqtvu%XsHT)2VXjWNbN;7R` z1}&{=mmuRiebVpp=5h-o!ufc&*8{=RoVn!|`dkLG!~lxquI0buQ0r6+_Glj)sv8x2 znw(@6F;q?Y);AnG583JS^$87xa!I>@T0^#ktNQcR1z@LaOsOA*<} zrt%f8e1|s%a{L9~179ylYlVdT{Q6%!O~=o&I;sVJCYsgu;qsB_KcB6y-;BZMxkCin zZ(@xryU86eh>utMAR!`3rKZ2AnB@O2`d@T$V#`joCZ<@kA5j`>kFe>;J>St9=vY;d zgFw)TU=RKbhIJS1hXA78wRQDt<;a#)OQZVmZHrZGKSEP;TAiy=T#RwZNENw$+@hVA?x>ct{(0E+gXR*kE%zn_oZ)JX4%Q}s^nfUX))E}6#n$o47z2*m> zfW&$}&}Z!T-sIhMkdR`1EEwYjbu=opUKKs%nT(Drl%d1iNYx#OKSkl1_b)LrVRTFuI` z|In&ASCQn(#fexf}?${^=Y<*dir|xFHpSj zJWByfjL2N5w5r6`p33Ha%l!m)m>x$^pmfpj=FaEgjcC4+EPevBqAG}$6$R}_wcCfb zLj0Z_e(%8_|H(0V=PB)}gM0Kwo%pz3hJmb=x6LkMgM8Pp+gcX(AI9aC@pHh zNBha`BP=44eMEOnDNtXNEx{1=R#_SgZR?@G_kaw`_yI#!TFQ|YBZ0IP5gr8ZuU7G2 zN5x0p$78(Qnci- zqM_Eh%p9DqNJ;gwx1XXTK>*hNdkDR&1={7U7&wxBuWt2}?wKt$Ki^RylTzH|!qka$ z1lPomk?K#84Ky0l){pO>%5``&kDMfnItIQG@T2(cTX&AhoZ}%%r&Ea9_SuiON@!>9 zq1$H*v%37SF!gDXUmXWx565v_uet)GYK% zqbe4I*2sr9L&oZy_65Bu3eoEPse`H0da(MjJ_cfjCFt~e<)U3$d)+Q11Vt9^xP+#n zB@-R0{amnM(E(eZQHDn&aHU(f=}{*i2T{JssbDnO(jDh&d4AEsJQqL2RJxwl&Z~wZHUL~>TuJo#}*tWAjpM{hsluEN^%tOYtFss=4pfqOW)5Br8vj1078Jl#jRRYYa7k_VcKdlW(iRvSiz< z*q934@4zh*<5vgtm!1;gm1C6(IA&$=_5$2TaXh9pd;W8@d3lAN&F_h`0}C{K z+32dt$8TCivS!me|L2@aKWJ6|2!H*N&JVg&wPoBWSg;&5UTpsHy#uJEX3o1btf8O?wx! z_o%R@s?@F8&2Ug4#L|HxW_rxEJ>I^3s=-3O@`0fb@04&v*Eh%4h4iPjWW>AKZ}tjC zFZ!D=2-G^h5k3Ic85S7+w;;ui#k#Sf#WA6cSsi9bX~pyBC7s@>sr$^h+aXO9wsIsWiV~Ej>Q28 z@4*QHaMhqxp_(sz9KDOAn3+*!Y)eXh8cT2CaR+6|W+8Hg)T?9F1k^I)1*3kP&RCVT znr6dP`6=HQO%Ju)svhQ2it;tSj}vT)1Kp5yimHmcxm@erpHkLZtQI@Bb*yCxDB7K? z9axN0`l!A5)yh>kTl;GhfJ>Ov&b2W#Ph${V?tTVvyhXtpISD?h0y;hS^e1qfc{8_k4^To^6;RZ-f#DCZ~f`e z_Hj+)oyPM$%lwA?@dPJE7_(mAbJL#{1JRRIFKc!ReEZ3<-xf^p&=xf`IQ?*0VC|2C zS}VJjesN5fa5EV@(73v?$AETOm{HguE>aE-8-4M%hoEBi)1~2uQQ@mIyPvIx zB%>vNv16cl{7eUI06UfJ@PP!k9+(Wi9J@7R%bK(ZoUm4k~^G@twAP|r_mFFJiE&y#>nj&aiIwnzi1tNQpB);C zx?W#rrWl_Xsf!A}?Kk?0m{1GN1jN?KNw|YR>bq=~zxfS1y(R=tbD4@+B5p@Ej&l_{ z6bj;f36=eQ-#TNw{>9~s=O6SYN*d=4H!dSl-W!=2(HYMZIfZdICnv*Xz0UpXUTUTk zH+!d=kj`G_wa0%JExc_eV#M1~o}AgZf+WwKgxIj!-_R_J?|c^fd**#CkBc~nMW*cO zu21XHs8qVcpOKV+iY>l;;^_1pcWdO=M5q>+l&mzW2Xd8qeY#xdZqXc|pb{YMIT^q! ztg~7@mgw6HwlPRU6P&J~i+t*v1+5puw0nfeP=t4mbePaBmy#Mcq4;>@S`g_Y$QcH> z0^y|ZbEwK#7)2`2u;OZ?w%BdY%t&yhzkAuOOooFhI%{#2dDB*Ck_N6@B1O$=kwyKU zeVI}!HfInm^|t9pu`(5T8!Tj_c_;*oSqJi&q)aHj7y&bltA*Jzj5wzKE7bS4^x zPn1iiSR`&XC=nvK5sGpx!q{QvCoMG?$dTi)H{C%&5oBy9Q$5B`+FqCqar0Xz;aa=l zyL7i588+OFk~_9qE22dTNz!213!Znu5^Ce@m6er}VCMARNwmxPZ-zLK^2YXLEezze z`#TJ1X*DNOK;W@oL(V_R;qPVw2}1rEZ16LJ^W0gW!CKC;QMj~%!#}UMD&T2-{?hBM zm?`vUP0eV6@EKOB>K1tIN-g|m1M)Qe6DvYN?da=HshU9CL}^^6ZU0+&C<$9Kh58=U z$I4Bvk;Rqfb+yGp06u9KAN?c5f`Zuja(CExtjDsxt~b0=NdX-<&^TQJ6Sx!$^_%XRxM-uPfG@I}4i zIDIyOU}3cq!c1IQcY8kYoa8fs*+ZZEbL=G=jIWfB&_-7!0pukO%nj^?5f7INa(sX| zIIG5ixF3rH{i6IZd`d~-F+M&|JGM(LfSvnv_5no|D6&iWbiWX5M!R@W zNt1MXC!)3xnS)_1yv>_7M3tV9f7xIw0X_}qtWl$v)>?GBq09tEAw^ftrQePtNO+#x zH^Wpec42c<*2TNr*iByS2RAF1wWX1QV!_%Y#9RN-_^JJb_R6^K(q8)f$*%#m@!e}f zgo5YG`3jasl`HDM6HnFzzN=$EYus5lbzkvo9%Wq5O01hRnp2?(Qii+WL1KxBd}hcf zNPNO|Ht1p6St~tf4J%y#o>K0{pPDq-Z!p`pRY`nOyMwB$wN|RkQz>ZL{#)(YEg8ao z%g7RKM~y~n?>YQ%vG(lQV|+P}0kKM3Q=jYA`H{Wg!=O-UWa@q0s_&-~8k+6y;dznFur%%HeHBzI6?5Kx_xd_Z?e2UV|zNRaVy07oL z+|v#>Mg+oXXhPiZAarPTRlM|QZUEyNIH>=7e!KJHyu|UPBfXwKzIJb5oHIUL(Afea zWep#rWEmO|QxqfCJf#pkA##Upvk@*{Q|g&^_Qv=KQLM1;%d+2Sf?olfj9#c`gL;~~ z)`@_HPApMl#))e8?8Cw@4A6ztWf3kZVscABpw3Dz!^3eVqKfL{+wrkqO9uhkr^;>v zIE&5jB*2rjlf2MtQL$zhdOI-P3P*Pj{Wi?^4MpYj-Kpu`9CBc7)FxNAcm-cl**MT~ z*BSBr07!IyWDNFOS|Wzz{XCzk6PhHKczY@t$Vg62QxPCA{rxgcG%dBpq|N3*D#LmLwIA_y zlg7nd4W~oQ$Oa?eCSPSqJyL3oyU!KuI%@yT1qArb-(Q**nDu2MQ9XW^!fA%=DRo}> z!?7+eu|21dLYu_=mgu@17FgKQVfk%sMl-05!%zAcU?)o?soIg|I_~)|bvVhbaJI)P z>vBhbJclWXieVs^l|NvBT(v{tX{jF2vpBmNt&8seugOX9G;P5z&o*8c-B0HH1h$Z{ za+j2<5N5NZHfTC*77p+Shd3#$q19gwSA6iclbU?_swv8Angs3AU?=8{&;Z^8xqM)e zL31@QWhuU{MrIoSY!m@WU1SVk}FL4Vr^j^D*Xqr7$nK>94FdI|tJujSOY@ z$tyjaFjPt(Sr}VL*+u)YsAsJS&bAFR$|`Tq!|g;Dh6l^;TW;I0@V601_ZD9}tX61? zK5qS3_b}yC*X`jH?Zf(6Bz#XJkB!43P>V|c$~G6DcYmvIYQ0L{M;^zFWMVslMdH)K z-eA0R93BZzw}*se3!lJ!wBiR$g6W*G?~HZH?{E)e1IZE1}@*7pl(h^#;;+MO-IVaPP${3BVh5 zYq+r@{i2z8T-==QZae|1pXjvrbG&wti+NTVTNV!Db4e1Is&dvoe^R;8gZz0Cwl&t! z?EQ`(Fr?+BSFdFQyHn&>=8IijPaGse_v2B=5~xGd;#BA4&nY2G#W!%|&7{v{8An4u zHLs4b1v16P8cwrKyx-{G2qN69L)O}+a)?x-Y8sfY7B~EMoouQl2~*&7w`M3u>P;oi zoyA*@j!i7hJr_BQIFGx}Wx22rpO{6Ks!bq#wm_E3R_tn-PURf`1dL7|pS^<3M8W!` z4*nLsQpls$@<#k(^~%jn=32qeQ}gwyN`X$YcWU3tz;`aNg_gg$ZB2vZ}+jcf)bw=#|-)-ADn;`@m-p^}eHKc>xZs~_J z)F#POuP>kFFKWtD%@e;(*hHd3oQ%vbGbiUoj=gtwTJ71QZn)x#-7q2HHrwy(3K~2# zbS)wsopTZD&2E>+llS)kxsgbbn_j_%$?%c{iGc=tlFmz@zBtTa^mSDM*P^3%fOTof zRH*!e;{^?>rs%Fzl~<#qTRRnu>`M0XGC%(6Bb*?R{zQraMtX>~tv=_A)Dawd1ar)+ z_Ds>i5Bz(bkAiaMjRwuuP%#@j`SKqt%ycPRSIR{yyFoizHqOng;|l%b^|bv*n+aBW4=fL^&8`tYP#92CRP zC&Ix?d0*NUV_Tz0wMdmm{)jllkUml9PA>^4#Zb5iKfj6@%`Ow?~DihG}t`p6RRD)8OFpx+{I-6n-vEQdAnMzrD_K< z2_Z}=J$WNMQ<#N0Ba@|Cq+di8vGcr>b4aTk7YzjDbP)SD{gAh}aHa=Bf}FVSRKQ+Y zt-R7;2UF$VpNuzGZT$@eboVe&Nv@e6dwC3w%bSZ+lO>mW%r#8TbY9 zo3~oN7`EtI8Ko6iwqdnmI~CVa1ViXTij5n$jkg$_e?o0{0l_~s=)bh@yOI5Wnb7}e zL;a1+{rB-oZ;zT;*X!?8ZFScf?4pnE?DC(`9+cS3HJwwnqU)$5om2AQMp(zXQZLTP zrCBt_H@`SueUWvrJdm(XC0tDUF)HQJsR#j7^)CLd^o!ZQbldQYoBvAg??{2sT zF^2QMhtI+Wjh}!E5)5OC{}LDft{K225Z17?O#d#7tv;i$W{O^^;^*ggfp8JhiR>`2 zobK!lZUbVucjeARf4=)hT7fhjB)u*ZlY3^k5t_ge$phAHfNCo^7Q}hX!*+8^59HsO z2fDKnhu1uaQo1Z792*(g@Nsl>qUB`Ks1vbsMX(#JkC<_(mf#NbaK@G#!ygk!4~8rn z^{D+2wHe!X>3X(t|Gr{iP|))T^nGbbNh3X_t~Ob_|&+=m9If4QB~ zx;E$tqns0d(w+N~;6?xP8H49#aSQ*s)1ELnIr-zc2a%h!R1Aix>>`>f+NWEtcMbV7 zynvsI-1e&++|s6Q$c*pWW~-@j1>@m7;B;1ggV{Ms;@o6F<=^u2X9Bxb!6z@(iw_jv z^GnQa3Ob6; zHCZe?0Nrk_{Xf#)0xGJmjUOGlLqMbvm68VO5|I)C0qGi$X6SB46hum-q(w@}p&JG% z1?d{PdjRQ%dj{X{{oeQc-@EQz>&{{gXV2MZ$FuY7-|u-IF5RQ~)=p>YOdEgOH+_dH zDqH}#ely^1zFDIH3+@?g=Lh5a8yor=D&8rFURaRCZsyL<76K<9$@?-UC*5Nk&yi(} zF7XNRo8KRBNxD8-e56t?LJZHS680&B(YZE zGJ36?gN}6P{MuRfEr)7x4};frcU=|}=*2ZH5+>G>$U#(}_=EKV^O_7@HwW*NLLaHB zvCXZW9g=vPA{^w!qTEW!>x@?7t1k}TR5hH}{(0pJ(_Z_dX&-`zd2IaYe$Q&U>m^9^ z8#G#8wRev_q-yq=whs8wvdvjvnvrDTS!>vF)&C(6d4cuHL{+ukP?|IP1HsXxYlQ!4 z|Kz!fmc2Jct?i6hPzG}PT5Uv&u5(v9|1h}7Vzg}d`H*h;i3iWIaSiTbxR|o3`BS;i zyY>lkCU+GCLc5<9^)T^;B>i~nfLCN^*#7g43U-(76r<|SR$+Nxufq0L0QaQ(;wi%J z%BlBss`awvyxVTo52)~CDBxWP6of8b|GYlSWDDX_ldGz>#Z}-G)t!<*mJLs^H53CE zyRqIie|1uGd+XPo+5mqy3k$jY z!lNj)!alldt_^2#@U*~|%jzAY(cy19y=&iQX38ypZkKVDS(dTRhqK!=H8(CVN<0>#7~5HCwBwsI&|4 zz^qnR08?`x#T)&v`nyYzA3OBLEcfH$+D37DfBjnfmb((2wn&>k@s)0=cYj*ccS&G* zYua65MdZkBW$7@o?<$y=z73-^vW8fWBY1B3|r{Aam=rnxK3*u#o{s6HU`*_dx-E>L!` zlTKHMroB&n?~&mf)$Xt{9xi=lu5vrHJ0!H^ho-I8k?5YI>QRkxOFh!?H!`>9TQ$aq z3b;16i$!&MeqhM};mR`3;hgkQAcrs1@L~EqWnjBQhgqOOt@A65zAAUWd{q8Hf9x_~ zd3gMMTQ{nOaN*S##gsKInCdBnS2^-TcRC&(8d-Zat6OFSnufbJ*29f;b$Nres~jfP zHP547_G;7_)c=SZ>6gCz<*HqXwQi5|SY%?khiA)gsU))AC?TG|Oq^Ax=u6Z5{mWq_ z(vZa_)p%GMHLMf% zY#MkMNf1YI7EbV>=|xjE?lf z2F|jE!{+!BR@H~@I?dul!Cu+Hx=b|=mmF{XNoN+?-=o}{F&~|B^yPdz@fuken$ub+ zzmel6u%in18UFWSC(zMnq+X^t)K2iH&zA17DEj8i2!R1ZMO6%sUH8l8zU< z!ZzHR7cVXv61Rffd8R$QSBoY2%bbU_E8lPUhaPB#hleLYn6A5M%VL*i8uhSHxlr5? zN@|`+gp-B633btlQc=c8Q-b=hYc!v~T4va36?8Y6X1yO%3RI%T*Hd1b@&sM!>ME;K z|G=#FXiC&rz0w45ZT}#{Gk{(2(@n-j9^-|HQ_a#P3>K4sU5dv69u4+Po%M&Gf8Ds? z`%j0vL{K6w6LGS=4?p}sc(Gu4Sd5^po{+0KypsW3(tykpc?WBr(8aqf; zr~5c}do;2L_pQ=N``78@EI8NSP!`#;0LtREdWE&-Q<;G;`cGv%TC+cM+^%4M&+RcL zR8(gZmxx?o9(p8D2$yfQNoEtMx4SdQPVNkOk-K{kd?z$fAWNv^JL&%A@8VwXzVirL zVd?Z$5H-(;TP;$*w%qH;(6sDYy{C5@D%&ZM$kyo7s#jRz_z2If@@Okr%pH8Qle_z8 z7zL$idweLg>aLg z%E(~+(om?KOlmgP>$&j@T?$-Gt(=B0K0^scyMw{OrZ)sK%$wea3meLrH_p1fIXWWz z$vG}AL64{UG{c7&rk47vI%>^leg6)Ag@X~lJr=@;)!DPPqet9xOFP%02p9$Z5$eH} zl?6|pEejrt?*=E6ZkN)1*pA=FZ-Ir_X7e9p6BD;u?91?%H++)$VS6V1;4@1B z?d}U=_;5s2)X~A4iAnJTztb}=n#g1uGozyEtBp$iV(9?n8GRVlzpCb;qC!$capa*= zl+?e>(LNdj{_SlRR0N)kU~h06Z_N2Qxh=1x^->68BvtfndLr#EJOOYX_rA*@Ovj?2 z;w(>a%aHGrl6=^C&mO+Ngw3M6YdKE<@3IRj6JS4i(`aZyXh(rul;Pjn$9G61d@0?q zKjoS8(gj>!1;1YHj5g3no|_&!F34JRS5_`aSIelo7o(WJOhxr)my6;9!cC5lk5LIe z{?L4QH&H$aPWh)h0Q=R)U;@;x^~T-~F67U0j~Zc^Re#1Air@GD5Pb^o?3g_agZeLnC(`MV@S{_%PVFV4@$zHxbp;S0FT9Dy_3IMpOV=I7hq zxX>Iy#ehn}3V;V`fO|LLKWW@Nq4l=$+(zjxE{4d3ByzBbN1fE5UnNua^X&&^SF+t& zY=*B-HyUAcOl-CSMhD(rBx1a-pEjdS|F*~k)E14F_&Zq8Q7xl*&%v_kQ%!2kmzuA@ z%Uji-P839R5n6rBUkC9x@IPB6AG@8H0KBzP-mvLNVaH~T+$NOayXMIx3f@u|BW(!? zs>w0C_HY)Bl1)M6Kay$m9Z1iTb+lOcimAe{D`}-*lC`N~XoN2ERIXH_xh8^IEjOh- zAn*m3yx#M_`Hx3UzgG71CGWJCady8{hD25t~MgqG~F7*3Y3A@oxiKsSiWPdHQOp;`d_gm8j z0ec>?p(7UiEK9!d4rckRvaB+8+_HBB;RIzRuf<;{2L$?@J+P9HC23Rd5K+&VZMbui z+IpTw!kxOiO7PcVEk2UFv&8YTq{8|K6H-BLYYj4(qF~{<$$!w6uW0$*;)V?lYBXNt z;~#L4nk+N{>2>ltT*@v+;&GMCDFy^8vUHK<4_N0Pm(?;SG(CFo?@}%C5g-klKtqN8 zIQ^78f=>Vn8|sAC``uDC{rlpkX;GvuM7G8}@N+Q0-a`fovI#vV?P_>g{Lb)OlAidK z(-=DK%AlnuyJUPG0cV0dtb3UxTDXa z((tg3B<~;sn|-d9=Usg=T{QkvUYM=tw_LKt+XmDSDAylk~$h9yDh zO3%-<-IH8grnz{iOYQ8s3&-s0Jr@y|$bXXx|IqoM{&JUY%(B`>p7xt*ICBrRWy_$$6&0t`2<{2 zq#*b1s>u6ce2i+He$mhFur{-_A$_OXDVP08==^BTVfR(oQ;qxoZVrIH*Mf`OP#0(@ zcf^YiF-rQ`-r!%UA!;3PPLe!fdG({tkjCK0mlf@VnB>@mh*Tro z^u=ej9Fu{5bz^v=gW5B#v?P$hdBkuFww3RZ^euT_}FKHMd5* zA)en*h2d_Cd;8Ynq3}t#NS?#Dx!H-}|_C82v?}!*^MV+MTK{rs;9Hs|{>Vix5s7X16zX<4yBth48(6xFWle9t?_50exx|MWJ&d1nC>gNsm}+jZzWNu;ebfk` ziEUDbHom7qcJxS>{e?qFzW=^~!9x>qEs1K~vLfvSPccJu)#HWOjmGkNciqXOo_;vJ zmY88-onCxPK5Ix_h+~2{6R=_WGIQquh8(hc=H?44z^d{w^71Sq(zP#5=FDJI`8`T; zOha!nQgpEkKtdUTmpOtJd851R3qP=`mc&qWRrOoW$HpdL1xbH>ogX}PA;)jUZmwN9c+K~S|R8}Y4+||{f z_gy$MLggD=(4_=ZNW$&90NQ=+xR!Wm>PPmZP75rY(gXO}h?`uDY+q>f=V^Yq)1BT; zL&Lz4$U%?gahKF*Vt>I-@>)YJBk;z>{a?5h`g!LaL{nt)a1u|U_@{pQ<0W(S-EWmn z0lPdQVapFY68RnwIdjAY$GXbZysy%5$vW*$%AVp031%MOuqhzvs1bSuLq?;SE&rzS zPdInx{ko=I%e`84Ta7TDgG6Xe@s(noI5bjf@9SDHhMO}?oD?CFF0mo5K(q7xn$BbX zgd3}>u=y+_Y~&krUYWVR2~8>W>{`dHUM?%8eUw`T0knvKE26fLP40t%nM_Q|{eT$6 z(5;xY*6@=7I-&+oeW$}1(fbT2I`MWUT)Wv%%!KVBBiD?x>I4`Dr>q{E@OcVR%o7kM zLl|e6ln#(t$V!=zDphKe|NJP;Mz&@f1GiYay1Nr?=*!!zr?3Rt^)w%P{*5f6sCm~X==EPbmFZT~xdLfQ+8xYB)9)AQ zwR<_IKQ9;=eQN%aLikNVzFD@mGTpgKNvHb;OOY__1()IL{!*{{6~AsKzJ*ocIx0EX z;fGGV$gGCbT898qHJWr?Ne3iSKd+1wu>lJPAU6?R8O$mvnj-1|kEFqC+o=UHcLOhI zBsPNIV0!ecOwCf@Oq`P8>3U*DN?s@<2h8N9j$+$~{trVFPy_$kr(J%PFIGCHIWBil z*l4@%6FNHj_O@GJF3O(HVKEO*O%->ms;S-b{&CU(J7I%POY?Hx8uqH1>d)Q)^Jd?$ z>snS5mCbeRq7MC7q-o#gI*Us2F(>}0tW=D_g}8|+^j1jJsAWjy@yoR`rDH(T9f|9+ zzmz!kGr=t4)Sd^l>2QM<&T(*hdh8cqneZ)iak?38-^Q}KknJ%fup0i2*RsLFWx=gD z;q7mFALaIw(aOYq(rzRaco-fdSEOxV^J~fee)t5tPitu6agl<8Ezy(Q_s4>iSt6ow zr%z5^-sUtGv4HW{`fzHlRc|Fj{oT{52m!va_=#yeC$TmgH1fi6F=n2O(qsn^pAq@* zZiu~F!Bgz}9M>=7m0t|dPuyYI=YmyASIZcw(E>BYgHR+^C<8aF6oEeex~aMU%~xh( zXX}!6pyUo-4m+9ga7JcWeVtEbW~D-w!=gm~f0%QV(KM%5RvJc)YA>th-YbCbR~s{! zd8!4TUC)K-XpJTKd5(zJ`>H}EMIO1tqV1aV#$cQOePlQ1uWBYkIYrvX$z8}ZhbO`MMiM?g z_TFVR{wy2{2~mdh<&<4bin&B;kYU0;eefcsD5^^dS#sf&wpIQ*>+rzmgsI=YsLl)Q zf`FZ$y}li}^I&h2QH_Qb`$Ob(t%T8lZizQSl6L=;9N{zEu^TW&6WQYBL0&T{o&JngAQv=b>>>s8S=9vG8KHM~$Od@sq0POCJ3 zBdH^1^IBRqdL9##AJy;>xdf=x(3;TY`F>u0PUt<3GOF$e?*!{GEU&paVnn7~o3MnC zmHxY%XVqjSw!7-UJm~Vn!Rae#eGJ5}w*2UzykUEju>z9q=y(gDo6Sf(1L$TH_y!Jy zM3G&u*VLZ}JEba8C%Qfp5_5M!h}K>yI&Bzi!iDx~5OIX$WGYw8 z7qZMO&e^LyXayR7_*+kZLIZ)Az`k)fML0aFQ*CKCGDsJ34 zcWi19;ITIj%7G{4*QBt?pk{Jx#)&x$-+_m`zrwy@a}C3>_i*{>&R2PL|j zA4-{}su%U#*>~OLU_Rl*pM8S#ce10A2sG`o1)m4jGeoHi&Qh(B9GVawyzfl0EhMx) zs~^X{WWTBfmM-C}EE#Ta`bm)o@Lnj&sD*M1=`#IGjO2j0~ZXA&Lp(h~3>R zOrgiYws^fgJ+v+qfR6Ob0HI!T!VsjDJXw*d5<_iNOpJ3lMKpYL=h3fUzjAh3j$26n zFu=Bv8}7Fek1;(oHN#PZK<|I%h&FTs84~30`Q?*mgxLC(yOU7+EE&NoN!2N@i1O~2 z+US#UZPxjcv=Qg59u`S4qY>zf3`Dgp!)MO9ok!lLm>!fk-I?(8@przsNZM-b)wAa& zSbJ$uqLL~0MJ@aIr|A;s-Rawgcg7LiuvK8H&s7X`p5;mVNY-yL+ zq0IXmJrP&G76bo)6!!XG^D$3IiX+dU?V z41qh?+5#duc^U?nXF|J$FriFrGmwp4B(Njl+qOV2;40VhpZ7b4k17VP5pYMQk5Z|`VvrRcGz$x@69puE%PTRlWP;7- zZPm9Jh+|ciHb=E=S@hh=gI`5AmIpq2NU(UKCcbr4CMa&NQvWcXIhn<_AmvL?S<*x} zru98cXg1p3L$s!0K!dC#eLYEhbtRJTUg?fiI^$BRYqZWBj+NB$Io%V*(0i-$cI>ts zBCG}pHg5&;=CRb6U@b=;SK0QVMz8MW3C?woB!`8u+dm}5?IQl2hP_S)<%-q>n1J+t zVOb1|8MGmG==Gfg29A#k(P=L+;^67~((356Rz@7qMi}g+ST5jID@ydi-%~6~7b=1P z!uI^6w_R}_elO=5*o{QlcJR|KN6fk3ReyqhK8#MgIq#Oy`QF;5m`l}C`fGb^H??4y z-dE*L7sIbm`Xvd+UsVd=>b(v(Xc12CtwiDI#l_aok#PBNuh#DiJ~BOLPim|1ATMW! z?+XUZ56YR?YLyRZl9COLym42dzd!20-?tpf`NXYVtffP*1Itep^L$t05?|3|8iRguJRbt=D2 zS>2afKBCG7h9}@4_srGmWEhIob`TGjf3HuD_&R21rwypD@qNSectoR#UT?_l=|1}w z-ZVQ#VuQ-?@2QpzCawgo$Dq#P%`B+=Hqfpj7(Y86|Db{f$}w{Gsx|M+culh0?9LD# zH@I>by6@rlpTHiEI)2L!% z%jIIDxqHmo#*h+q4g9ItHw7IYsi0 zCq1uv!S>`i%~Bo(0(hPFr*-afuhf_1jqwU+$HDZ7US!AxdW)LQXVV%zFixmsm%`6N0~5 zbdi&MYhh|OcrZ&+~o1RsEezD>>CW;UUYF;@Nw5i4Yg^rfApx(P6CTCj$TVQusqy z-jskWiNYK#8h;QV0_sUK^zsrwf*Ra2_R%npoZab1dWvQLG41wN!{v=JbEzc>BNJW| zm2wFQPsp+9$q(vNtKex!>rdn-E%fRWQ&DPDhWGR+o9qtZLq5zD6D84@DQh~jnJy|r zRyIcue^)XN_Xq7Z0vdt)=ucduL@fc zt7-pT{<#|eK28`;sJvOJh-EVy%u1r1(N^%dqLUMLo*Mkacnm5B&dL(GN=r}QMIC$n zXU6!Dp$NXC%>2ocj>M*d0`ktjw06MwVdbUgso24)s(PK^-(b z#RTHMq#Y($+Ek{yZcS0ei}x&29%z27s4!9zcK1a(7FK55nMw7pmvkq%+8tt@BAn5h ziZDb(z#UwfGPa^P?;l?{df{1t12;p~E5H3vwvjH5XMv62`iVXlD*j&S1(*cFU4IJ3 z$;8fC*nK1OBk306u?=J?ySi#7Yo9bZpg@Ke8hHqMFBU)HbuLudm{u}k>+bX#Ui|W5 zjs{*gEWlr~jv&msNUm964}~0ZHI|m>^ofa<863XOJ{_K_?thI5qwf6HsNX{JYrVsSP{hxq;h_o z>@Y1=s8DVzVSFMB^~*84nB&44*?)b12=&L0MOXjd{hnFo&F*88b(|(r@gHQu1UXfT7sN{; zH!$7Cjueo6jG0*REJMpu#tdlRFEjo3wM5eZ+u22H^U_gw|A63xy(~K%sx4N+^5@1rrIvaQ>i5 z0OIt|7=JJ$;5L9K{R36{XLOj=jv{U_2!utFZ}tQiGj7-QnmT8oqvLzmc$#N;DAfK> z<$&*?e_VsGtN!3+sPBJXQz5W#_$a9Fe_n&Zv^Tih#WCFBk{d8yxjth5vQHW*HZhUo zXR_s8ad?>ZQdK!%Y(f83zUxG;Zd8_N=s*QSZUsptcJr@uU&ljbgpINTYyMY(flB;4h_bu+w8*r(A2WP76Cy9}N zS{jJ+ub}T_mAyEO1n3L-`9Pe12Z1!VfW%Pnp%@V7@8I`z`X8vO;qF%xX9wzGg24YQ z9I9piQPvf8;?8qv15 z;7N$*$z;)<2tNs7d-w6(19Th~T=aXWl9Q1#rapQ#=YB_N;_2tRQ<3$29*lvW zZ&2<{P|bk)1i_o0?Nn;A>AP>;Qu*?2t@Y2*IN*firq-w^cVRr2Q+AUA(+(w#;t+AG zKq->?aWQ4lO8x!eldF7gU<0)vdAcz$<#47YAq1-{By?;$&XQB(h*A_ZGZxa4v8&-3=417msfeOxGaZj^V(C*5^TACdgVBOFG9y~^{|SMKUaLO5sF)Wm%sTas4_HGo`joJX|U$*bJV>`d_Vha;E-tR_a>aSk-4$n z$}h0@e;VyB02t<;UeNScBsa`rRu#XJy8NJCdIo6q5TVmTr!&yS<3wQlYbM+gJJCE$ zOY(=tp2wzvhc%f!Pxp?}&go-f6A-fnLtdlh@xp zF6>(Jn4pX!F4#Kqaf2ATR*A1iHQCzyI6}Ag=j0uilC5^ZK3A0v*qbUM zQTWnvyh;LOfm*L_K0vSa&_R_66F8gna%gTEWkCkJv}?87m3kF9LppN~Aj)ne(4t?z z0?Hhn@0*;s{eEjyUsuIlYu&j_;LhVL-b5BwHB07qHeEo`{_m%>*Xceb9dRt8n|5?|!@s=OGnZ~_sM}e%c0wVGm zZk&YF&hn&~w^LIqP+{ zJ4*g=Qhf?zs6KkO@M0j%k#XjezEW^nc#6W5a(Bgb$&5u)$yWLW+wL^mYsHMCqCu7n z$=agJh0|!a$K>Z?)jh|Djj>nX=pPEmv$)nf*}ALtO7|7rsqd_!5^~QT{IPo$l z8%ZPBfG_=SQ;4t-eD*D1AbdIq8-zw`7&=>K2>6hTM0#tlglFpB$N7ER9~q zK(f|G$4dfdKanRM#|>daK6x0Bs3vS$9M3}%!uB}*JTne!S_zxC;itDbnBI#??IyRD zZ#@+K`6{R}&g9bAzqdoG#OKbLD99tztIc$So~M2zLr7RqdJ`ESDIgx`=R*lXF6`nN zPmCY>nAu`Gq?zb#GSYpQ+oVl4c59&NRzcn&)&vQOGkIarr09=V4L3t9FGZO zt>dsoosV}?OysQ%a}~+20!Up{AK-z8Z1T;P))blwaykkNY;4FFg5&n44{KT2z*1%+ z?2%Zt+i!@qq{I7>H+K{8VDlwmmpd0VKVQC-2rbl^Vqw6|rhpSW*i@E2#V+8^iGMN% z<@@lf$l{tu`tc)&*hGt`SB&-!V65P?j%k^y-2$3wMjk^XcJw_~Oa@KXfuW!^t6AU} z^HRoQ#>5Zrb*}-V$?M07(&p*P;$gCIj%^Rl1l6bwm?+Dx@S7*5>x5T^huK|Q4~rbA z_>GFE83S|%q6e)Anpep0QR#T?d)&nW_i=HySL>b#eipNw{Pa3I%!3HGE*`V0s?BZPrMpV?V&c_K#)sSo%3$$dCBc6h^{9OOhJ z$Y@f?(FCN5n58=UK1(&Ta%IEbi5`R|h*$CwPrcWf)h@kn*XHw`WP|rToF27qxWnvN zjcy-*sK=Ai7`M2MS9~RbLq6o4VY?O9U_39h)wh!f-?2@d+lizjOsIKd-CinfQF|s~9{Jsvv68MPhoPu+xY6(>rjUb) zk9Z;?HM2{?+WTB2MOsJlo6Gr6kWgunOw=|Uu)U@UIyzFV5eQ+^t#4x*ZF}6w5+o_! zY+b$f$$n3$^H66@XaouZlS9&A7?w8U#ek89G@V8QBhR45%JTa@{w3R{uF3|ouL`J+ zhB{Ujc5Mh@j1m{!YlkJ(JEOvn$`hXurhGi?DFT6`S6Cj8Sqcg^U!FU+HrC;o{7<`a z;~>U}!Lr8l-d{9dX903-8Mw@HWkzGUhP}JxU3GZy9_C9EBkyGR zHv#MBrGZ~u5{z3-f09ju#>fa*Cxw{HVooJ<4lNblR!L!&q=$TlSvKL`e${q1sliYB zqOOZ1>SO3HCA41+A)KpuBrT)|9X8sB+Z<6D8SFh=J@0$SVd>);1q|A?MoE%CFZ(p# z$-Zk|n6Jk8Nqu-sJP0~+z=g?7#3jDk32pY!Rcaq9s$KR5!G5x*DNfOYujxQ35wpjE zf#wCGB9ES2d3RG2dpX#82eiN(N2)5D#fZ_W#<#{A{ULcU0T|w5fW|DCu)egkk^}pBcEeCh$_9m?8%6r z2fA+2i+&vuUU)!PBb@||B`1}40zM47to@Z@x$NR7M&dz1Gagx|Klup-oN}(s^*0W& zk#?q2;YJ2t1T?I8aD%3SP2MC4g=~&{daTboiIW~Qh6A`&3c6oULBpSRH1!`wj77uzUxN~s^SkwG^LBTua48Z zR`QRq(W-^WTd!89XVDNaTeIy~LDReWv~KqD-n(m5EGj3Jd=RoQ z0&wj6sWm}jS+ethXgbDx3#YG7I;JR`TyxYvyZ>qM;$TzT3Poc@?b(Nxh^(vVO7!^YEr+aZWK{ zKq?DkI#RHgu$O5$uf=WZZYMT>z+>V_ZS;k3(1We)i}*Y55{ozC$25+UVPn#o=14f- z!BWQP)uHr{`Y@*66IKFSQEAY&d-#kVO3?ilgU{jyvm;!iu^q%{NO)QHIQVBBY}qG%=mByVHz}EcxSQ$_3m>5KNUu5=F#4^tgK}=TW!0 zS5&J#$7T=<pfs1<9r!`97L{e)y`UnA$u0*KHg8DHGc0u^ zzVr6aw{LW1>VU~FUD1%oJWgI0>renCcCO9!v99HG5u=rBu%+biM!CrY&GXcyG-H`Z zI(A|r^!)D6UUxZ*cfBmpQCky^qS&D$(s6aE{cMPZe{Zb!QycpNCy1*tgfvehkuS-| zM1!jy=)5H^=Ya&@x3x~(1RF7tj|QJ^#``JW0Cfn(e z@GV31tC5*+-Iy@G*LqtIcMEq%;ztg@t&cz1Rb-akk=FIMHZzM5VI|+ci-XX@f`z@Q zaz}1#->x7=8c)kQ2|_2(;2jv`Xh&B8Eg%nn|5!B0krfr3yEK`gUVZms>M^kBXC1(?wGSyjbCd zTJ;*ad3kD{84MipC1W!Q7JAr-)mwl~e9C=v$ZoAj0F9n%u88qucqum*Sice@CPTM+S2)`5Ox5`(b~J!! zkMr}jceWY(zkF_fC!&{p___bP0^W+hlWya*KaZc75T8ZRv=qqK`|w1UIJyCX31c@Q zD$b?yKnEX7tQ8T!`0R0lrY~EkL9oz_MqYRoXE#7Z|Lczs#)A%L3Fkk}Y5({WC;bjs z_}RBeb$&Nott6(Q%G#RwcaR{c=2*Qgey(V70Z?^;#SA z*N14=&u8l_dtE2@A1Z9VvdBX(Xc7>d;X8XhN1_(KpYQA6LO4?-LV22g>H)(}tUMn51= zNeEoqNif6O!qkB7_K&|He`AmksaVD1PfrN2&74%ZgNSmA&I6@e^Uf%jo(l%aOq9+Z z#DP+Hy0i&M!nZ!<;UMr=cqBa52Ou|HZ@x+vQGFNHr1qk&dd4bvPG+Po;6|zwLYiE56=NPRxb6Wm89>@7|U61rW20cSCI3_>)M5XTEuD}o+x z>>Wmut)of(Qxtj$Xh+^?2++e8of>`X27!u~Js2?4KKA(No#?_hlztz=J~rZie2~$E zeKfctW4+E%*nY|Wj4L2@%$-Qak3ChJB;2gLmmf+Ag4{dm*CCF2c_P}r@hsPlfd(WI zSe&w%(v9!EmA`OC$dGbq5kv_e-Fr34@gmI_qyFp1j#Gth1_|b6KbLIh)K|SLR0T1c zSY3MbCSh}CI3)(2&rTfHrgrW4ntqoWOATY)VsI8h6OviDq+k?9X2|9|B3xJQ@7Czr zr8+H#Vun6E_pZ8}aE89BH}t+Ld#r?_YBh1mkm~pH4&FXKxOS7t?Jw9A7T5?hy~{|Y))zK$(gv`H z6&HL)&z@uRtOMgV;$cP5d-AlFz6xO+m{z@hzxrM3okKpEUqaAP5afgD>z^|6%YCY> zZzHH_7#sCf9&55SiBw2+Q5}aQF1Hn3O~)11GJ}L9Tg$(DznBxsE#-DudGPctd_OYb z_2&sLCW;+ndNHg5-C<7Mj%-AM!?>F)aP{amsSew_C4cPMoz6SO~1 z4=al7a)8@ExTGrVU_6|>x?RBq4>)(=PL<3zbd2YyJUH9(m8wIBH~B5o+~F|`>gmDz zh%=0iOw4)(H4bz=-O#S30}0J;T`w!~njRlYVjIzd4gN7sQA-3Zm>h1oo9P);{0o!< z3f^^R<0*WXelOSEe)J$ouMn(jm`zF|4T4Jn3;aNJ!>z_q!oA0nghG3-Uw)p|=#@=hjPP=E}lzFrIN9M=}VgwhSYg zw!fN=57sspR{LlZ!q&6iOoD94{9P-4Owd=!{SXa!591EYlF9Z~=)O%W9xAr+*k`3L zh9PabNxWhBUHQkahMjofLP8FtB_F?%GmakH>16Nv8W-|XEbdcm(Kgf1Z2}d;lv^NI zUg_*+*I@Dvz+&Z*QPa!+{ml=-3A`=&D2#pS7WM|F)YgZc?cqH|{PB75>6t<-xt8cp z=~6z4*`7zbRFHwmWn24NcQ;Rp?QG8u7xo?|GV$)p0>eH%#pGlkMuP@f5crC%-*@MZ zNUOB;8kSM0mljpY%OhZ$U^=_qL{4>dEbP!o!}S3VT)}(w^S|p4un=*Jz>W2tii zW=ViRX+Z;Zt+wcVx9`dYW?fx8E{pDa?>4!}3B0g$r7AE9_;>^rnPG-~2u#~g0(@}< zY}Hur1{8mdm-|g#XTEuiggU}4235grtIX4xzR3?wmmYQ?Z_*yr7 z;(zE~bO*OfS)I}TV!$O_Tl@nYhr?@S_~f2ZgT`IlHyrMMQTxcH`KWLz^CRt?jZOMAjAqGp zh!IsrHdPhev@f_Rt7?`_0dVN)=d&~S;Q*v_C)_YESoXA;zHwiQq8T9K-j%Byj`u9eLy`JP_J`jbWDGp6)5IE4J=q1dqCG z;f?O;?XxP{^SaR!1%c>1;Jvpi;#SPaJj#bkUSXS5U3J;CjuIo>r{&-dGc;)D6Z#zB z>MP}^dt&10zoLvAYrQueN-e#v8vIuQ{rEm9`V3BkK&P74pvI1kAfx0l)}e7P{vtv| z;}vX@*3I{_ZI9p&LNf=$@DRll@AelSgb65yoC#dk9VV(=M_Xl`uEQn_TW`a3l9zLZ zMT2~uaNzO4)p*G|N$ee^?Z{J$suP92|Qg6Hd!gXqCxv3CO$Iqbd zQ4;}+t)MLi+>Z~~QA(3HA8Qjh_p0gG*pwYU0zl*cQX@qfSsBAewC}@)(gS%wSC!@V z6}>FuyeYohf_4+szZ@h^$!0w|xvpiAQxtR#MVImq;C!#RYnhakQIilJM7klq_mTNFlr zQ&JJ&KML2!nEbH;3BbZi#5Wk*|MBS74Fpa1fB)ztkg^7H91Vqyy@p+VV}5sS z4F%A2lt3B;U?>I%0F9#zEy3YJe}Xr7IpIz5fS*vM|3=Z+9r*EIK}R5jbM!|| z8Hn?*U?adIphx_nm;iD99sF+&rhN`NJ;w{k7*&!#k_Nv2ZV;eZ{&|feFMxUhk)oageYm!}5Bd5>gZp0^-A6Yp4wMG> zzcsr5kTg&V-~ZpDL;g3X8?EsFe`((EME<7;Fe@B%pgx4b7ezaziSi(HgB#+-T|gjn z={H^OG3tkZ{0#atE>QRWp&bwU1N%d{fCvAnOpi(kuq{H3c(pSV_>r)cH|mFKq|+LsSE9g>OUXe*}=)n!pOAyCeQP(?&Vmuy423nh9E9MXRa)8`9CJ^Qc3FPg?2 zEGKft?q_Nj=Os@r4s9DU?^l}iM4^F##5oMLF<6yMmd=-Y|1Z|wIxLFzfBVO!K^o~s z1d)*LkXBN^#GnVC+5vnRA3( z5gqewoodg&yw=hyt|@jtUzMY`YqGs<{l(C6eMu)*!x{Clp`u>mah_pK*(bLU`oQxS zq3LMghuwu1uF+S90p0)Jzq=_t$FUX(?l>mBJ4+*<>tq9j-r$@&mQ?9@YIKlEl{h>t1V+O-=Io@W7HULL9Z7ze0fE@CqjpY1*4PE(h62x1JPn$ z4p!ZnHZr~vl#K*A4d45guCyP<}Tpa!^L}!PZsE2ESir$-Znwnb#|l30y?dpl28j@}x@{e)+|Q@`<#xg3 zd`+1FGohCgzC&l6QXXqZNuLHTuecvD%G5|Syu<|_tN$p!xv<;$kv87`P4t4VeyiHH zhTTimSM~l~a%0!-B-vUy9A$K7$_To}`9kS{YKl@Cg2Xh=o8Umav;lh_mNu2v&LPS@k~ zMfv$hCPqt_tjmPtIbb)%?tiNnFoiLzX>WA%e`x1bw3?orcn3O4XYfP6=oS^e5)qz< z1-~XLVrL$Z2GKe?IqCB}-L;=d^Y!#`3Dy~5#>Q!AH=cjZXksQp*c7YzSelR~F3;HI zD;dwJ;nPV9ciuO@kZ0$QuY{iNlYiD%g}!M?9AkKv)p^a2IeZ6ggkbE`Gk8CHF{rF z)53*iB~Z?vUM$Ry`P;N7xMgkd~c86Ch`e> z=%;7y9W?@LH5HCFpO!N4Ae-!Q5>^}oA(C9iFP2j!lBHa%HqY!6%+={4a>S^OLZNXX zvIa!lOFy^#(lsBP>Sk{n-$vubVj%N=BDKo4MsR2nHvT+V0X1fm7j?#m7pxO~BKR=x zD(zWLKVL0Xup~}M%CT!Vya~i^Fgi0}U5hb8LUG!`{qRkvU__$D+vB|>6edbGXbJ-q zs1dW5k&#i8a)hEpBG1MR6+S(v&K)r;6LO#p(B>CN{kq8XYJ7T+Ce6NQET{>Lzo{mC zT(OU&ok+ZnM7UNPr|lI3=`%#(%xbppGc#LID=8@qAlkH#n*Bevwzua*Q+kwhdghYK zzI~h9+bd9D8E_B`!tjyK>R^8;N!Q*Gmp!*j^mi%ujk7dd7i?i39L@Gt2NuiO0-jAg zIPYxANAT$Pmq`f|nmR6gbmE%Dzot{2`(w|?b;uQ|_5YZ9@5=gCehfHy-| z!0OO|0}L~gS|UDkpA0|rg=jq!a1Yx)m(TNg?{4tZQJ?P-VQ5uTW?&IxgZ))FA(%W9Tc^J?rME=-s6VT8@^<$aeh!9Sa#9eWcHj{f3zR%R4Y{}! zf-q3uEXNI1PJ|SMIBSe%Ui4HgdA8a9R9z}B6BWDPJyxtR$OiLZwN656bdTw-BNDeM zk-I;oLK|u`%75k*Tu%k!IOZdiJ|DFxOQ}>m1FQgEMki!We~#7+g{zka&REHKYeMN? za><-*b%n0&sR!(yVE;3I0VU%U2aGv@sx8F>oYoL;m^<1x17lmNwj$_hg zW=5i-ensR~1j7WiGw4DT55F&q+t#)6NHE^AO>ZKOQ*bX_W)htAiua1CW+w0bM(;FX zOgRdr4T~RstxdZfXl?IvWWt&m_vsFma_|fUs7dtSqBFv69@zoy#4<-r8bGHY)#=V&6E+oM zj8L-zUDKuhe%?iL;WDM$l^I@dcf!Nv>lA9ncrro0r+|IBOPDH{6|iV;Rx%QUXH4>a z?!oja(A5vToVT?Ql|1v{_)2)b&_T>n>;jETC9o>Zk_?XDb^ehHq|u~Xm93H@U$zSW zs`}iKaDB0IIoo3pEc2H{0Z>q8Beun%1P6G%d9Ip^4iX@fC5}%TY>Y8b>m$e=zxrt* zC*8)l$a3b2t;Ct6rG_b$fim|A<9_Y(&-t_X87w-NJo-iY7HMKGPEBix70bEOtorLC z7o9F?BQ zwjT0oRqJsm`TEs-numsPj}9+xT69xcDoX|k_b?e8Dm{pKuW>6et7ioW-h9;(1@gs% zD6AJXPCtQI6Jj3!J7vS{J}kQl{6r4pk$pn{`ol zUR?^adb_>YA>4M<{s^ws`K5arM&0A9>W}jsysu{*(dP;bjc!-&}%xB{8?Jha;XP4izU1><} zRLWfed*f*fFm^F_Edq#@R>N$4e7%{2ke>X2i(4!_E}B@pB!wxw&5>F%^>fe3a>83} zrxthrUc^Pf55~N4O60-Xo3(VSJzWe-FEP7@usLT7@W!i|tRA#>KR$7ow#jmUhT-c*ZuitMG=pqU#5?y_0+c$-(g=vJl4 zT^Rr<<$U{R*35r8EvC;gAh2s;(bc`ut(t5?jMSPdTAK*D*X7vOlN1%k{bI|DYg-2l z=EDT9zD(%~P2jqh)Gt-zMogiZfKA#50(>5@(49?I5A5cxillvO`%11g<%$m}Hp=>y zDfqGU(K%x2oOcLN#wN*)xTD7W2L5cCk;cL1JL}Snn0cbBo*>eYIEz4v7p;dAKrYW6 zmF0c&F=Dy;=!JmdU})U;yk|_HHWCeuJ}O?%Q#oZ2t&-Ljf8W_>{S8x|wXF?h|BJJf z(&2S-WR=h;trDl+p?NtbqryYRt=`Gon0-4aCMW?&t{y>+|F_w2$vQR*g}4V z@rM^Y^P&Z+uc_eWgI6t#IRf;>8JwuL8V(M_7|bL*rwFD9CZ{+1dBb-xR5=umb0(rQ z_kQ$Evn88$s|{Ub)`cd*;l6G@Zg%hex5bqIybj!jooj=0IHk58@(*u%mxAEcfr*QA ziv9^{=w)qw(inx#GUMOoI)2>U%X^GQIsTO8vxLp{y*NJ>Po>rlh4ecEL%mlnHJm!`oEL-0EtR!C@n!Rb^v& zH6@oWwJF-_>Fi5^ob)J~W)Tt(-=zr3%)sfqcZ9#DNOFKjKb2JdNcl_Z^grOxUP0Uz zvLgSHnI|H)@u3-x$7-`jJTPug!OssX<}}2UfA~9ia;M8KFKQ7-S@Du1MNe|8?&! z|3j!2cml-4D&jUX)m+1a>9YwmLfSVeSLu|2`D(M_Nr^>PovH+~`!e+Rl6|4{SG5g$ zX&;RIOBd(lE{>@~%f`pdpUHWGLNea->lhYD_Rz^)Wt{tbVTTgrSo>}|G!CeMI;Su8 zZL5aIk243qf6BPtG>Z3Z=jZ2hy$hAn0>RDQkb%IjNk_(($ZW=eVGJjG3=|TXVG30b zKqEtnM7gCRc;04yKxkA6C~p-B{8s}hL)BTaMC8DsPxkAM{3LV=Qrs7{HU`7K45vvoT5JEQgaIS8L?>{Yfwzth2eZu?9F;PA14@!;WB-FIsn`rlr ztHx7kgJBGWw&^0T_l+%EwiWC# zv5HK_hb*sIV6dvRa(k4-F1-77=|T72kJM^`i6m>SbLs_d`fEg{arg1ZrK-FP1YBAJ z%$wV1N$ur;imz{t&R7_^buUvMs;=l1aFJF5+4QuKtlT19)dqLOQ}Du!s4~frW=9(HySdziSR>w#NZlociBE+QGbdmgw=UqKBr(;gVrIgfYtPCUjd7dgjx z)ppw3%YCslIJh4-_cZGcjW#II9i!UKnW|Z0pl@Lp=5oK4*R1Is3D9gRC5XI-fDHUK z#(RXhDQ8-ZQt|=g(b-x1*_n7z63Xbe2Nc}R5E7({!^WfUMntz-SGzJXgg*%5f^j6) z4g4uKHY(CAp|eFq!WtZ~x}Pmh$FT56G`o^e`fIeQp3Fv_xc7+rxR48n*VtAQEzjhk z8UCMbK51M@T{NCY&|3s%BbH-K*vl3y1^rV%5>7ye+T#6HK^Oru9*mqjWj^8T=1%Lq zdff+_$i6M?xMdd2F8AV_ovrQgopO;+Sso!IAX->Ydg26Zp|-{K-YfxkG5I_?8;oQS z#SmKWRh-fBG=7g4w?zPSp7w0ToO`9 z09TTGufxomG)Q+Z8)q6bJ%AA<4<7nX74ST`aaX`jiX7R5Ra=1Tch%Q>)N2>H-BK!G zp7pH-9E|^n5VN{BLMWBCY^n}RIO%ePOu`sYm`jd#pOw(FW!3q{OOBUoj2NzdPW7+- z;@8$tb=aQdj<3iPLFM;_SW>1LfZcoW63@8EyL4i z#N~}sRW|gvXG&XyMuP4wf`J1{U~S!uEHX7QamJo(zD8gOKmA1wRW~Wom4gN!jJS{? zi;DWy!8@Cq>0;;fpup96A{A#`$iLdC12j>zRnOhh;>`LbZ#Y1!Ej76;_^<;PQfjspnPqmDOvpK5Ne5^kb+As6#KL=Af1-s7pl@g_ zUa78e_sbYG@$(TLMM#XR4sG}pq~zJT)`8Llw#-!~@3!eTD%zLawPQ039eDOuIiBL0 z)#^cbDE+%H-e=6s2IEq0BgTgjgz&u;=+=D?XvWj4*kupERFO_{R{@84|I6_DJrYJK z&h~=d47+E}=e;NtEks2Bbzk4dJKRg6HPWJzYONpfHl2>DiUEpU0soZo>FkyKN|Q8R zbrT{RxUfBYzIe;=#=gM;b+#$8Qn2F+N^>47vwVc~-O;crWrO{8zryed#fKn@2G*LY z1EK0U{DLXZhW*yK?d$n^2`ZM;S4U9GuxYFk}lf9#$XxK zVt<~)8P1)7-7hoeOTI5$u6anvM5TZ)IuoVt2JJ4L@%*1xIK9P%&-790EH&)t_XDd9bp}V~(G$unPJ7 zf}Ze)=^2ZO$q7k{|gG6AOB>V~LmbPI=Nb~yqB2DVW7HiKBAe!li-+OYd72np< zm)@%u(RwL3%vkU3p4FV#FnGMj(BdslsBY##EEPtl9M z{rf!WfcI_K{qGF4a7KQJeo*L6tI|*Pd7kpB^gm`%^+P##Dc+-x3W5YyaXldhC-*BWb1>g&pZw_(=RzAIfy>5_E9Tcrk1LTgq!$Y)7YmWc|D z<@I}*Ne1&Z5^bZhc($SlUNOq-*W+FVL(FX{gn>e6Yc@b;~vM)H^UN*w^dgXrqcO z{SLU|#re}l=~MT5DFg#elj@c?dMa94{QQOZrMuHO+6^BA{)%uQN=BUS_Fh11cS1ev zjPGe;k#*5!q;@eq&XLE-2sd;M&TndZM&6xr9xuHpSdf=1=D{C5nB8=G@y(;UM%G^V z`7^tFiraf=0V!EZ$vyjKyVAU`OLo~vWugykFPDEA!TP+C@H>si3ml1P+0{CW2WM$0 z5VkAv^L5ytw&&YsOntB>C^NVxsAPP173986ujM|mt{9vxc)nfuNvtoOa3x3?e%>)@ zH=(D|QRw{Gff>PCe~(G>ndJ4B@26IU`d4VQ%PAKe`rQo)vYV1rpn! zZ(ePPBN(VIiC{4uTW?Ot+*c|o?29)PUgJ3nC#tA;^ifzM!Isfy8 zX6IRekG57!3Tq4U73@c`W_45h$Ef*Oxd`@tP_yJ6(vJ(!8#3s=C{SHt|;%!9uo; zwA>5i8A1!%xKxyP_OVp<@ttx$aNVI|D!Aa0t8Q%>LqJz`v+X-Sk1`z9WEwlgvBwOzN>36<=CGX6N z9F@5d$nM;N2mNJYa6`ZU!^dy_B`oa!9$9mPqW*~8_-EC>J}&sTtF_Eg%p0W}E$AP} z4v-83T2H-pxBT}2|S`-h|Tjo-kH z;{f3M4*myl{%$FN^$!o#`^|yifBX3NzQzTn+vu|7k{n>tjxy`;wv{W=T4+?%bG+Ww zDXUySB#yYjdQ^XFxB-Iuw`lvf|G1b7n>sTC`ov(-#Fxxaq<3YN>S@#Xfy2gzazXmL zi;}jgcHNaq^)fo;XM{P9QfiKj@5~GtfTITd8vf9$|1B&B8Uo+r#*E=+v;Va@&3`qF z;BWQ|{|^Tbi1q()|NcL=)t!HB2{;8e#^L|Jw?D88fJFe#&9UO_*Dx#&;@kk=-%QcN zr2ba|{O=F{;iKmPCTCA&GL?o9TSJZ!FcuKO*I8F>T?xkv7s*Mr87o_@uWNbagk zwi!oO$`757N_l3}Dvmp9<@C3h-AzrEm+rjBn*!_=(5?SHgn+H1p&c+klbC6E; z9vD!6ma7oM0yi5=+PyVKkICh&-UI`i|Jwxxm2rJ6zLVN_6u@Ssn@Qa=G_&I5FZH#b zx84>Mq9quUL6mF{A3hj8I^W-I=wNHp79Q#_g#nsXh-TTLT zC1j#>0wq|id;20I4JLv;-0mZGBHGxt^L}k^WCc4dx@#I1 z3A`f%UH)e|-F8V?T@K|c8+QH~)u@e&tK+BvEr&15SFCukI+CB${ZE*U6Jv-A#-j zW@27^vVHd!Xj8MM^fJmVe(X>Xt1z#rf73<-wFx*wB|}qJg5dwaM-*jJ@AB8(4O;x~ zN=wVl*c!$PDSf;{s8;rtmiQEGWi=V^D+%rB%U_WwA@28*6?S#QbG}l zZry5Jcu%)pYau2vPV$|n82tcAuedn0UrQ8p|3y&gXmdk(bu;7WYh(w8m`d@8*u{)K z$Nb)p+cJz}Q&S>@ea6LF9Q-0RqQcv$AxG^i2l+$PzC<($>=Y^bKm|B+aY=x6nUoC; zzJ2nmw!sm7Dgt1tww!ZRYv%Yq>lWbJC=;nwu4^njqNh} zoKiL?%=5U^>!y(tx=o}s_@8fIJWqT7KkUj4dSibX2p#jl?r`aIS;hZBsNubMGr?8% zTtF}aQ=Kg@kX0-fBI|i%-MPjy&J>Jf!*fw!YqCn4&<2CuBKA0>QtUQJK!Cvf!5gL5l|G&sNis(L!gjt;OM?Z$A%ATn_zX zU)wi@Uq{{xfPQ#-z0(u@W#!D|+Uk0?%E4o^|1kL!J%}e7-@EdZhTtdE>?OIo>6eT% zD>iZ^M)bg?Cyi{jJUqN)r&#H1;_-YIx9R7ae1aO(j7mINRP`3(TVlHAa_{mF>~8~{ zbKYUB%z^FU~6`Uz0^c&!YdLnIBF1+$>o z=u-+Ag~_&p4_&2xjS!>`zMKTrkg3L5Kc)c}gC;WXNixuP&?U!=DJ$=;&2TStAW;v1 z0F3TVuN;Y5JfY6yoHz0P;=S`u40eqLeyZR&&^8O#YYSxJF?_QZQfLVGsO z32ZZR{eE>v`y-QaHQhMOptWK z=akj)56Ptxn3sD>$TY#LAD!?abqZ%z>>TDv@aJEgCFUZ-7@VGU1iwgh>ecVCh&z9e zy>JI!Xfe}jnR8?SaDK?z;=rbTaLvX{|1jH7$g5cNll}O%-N7h;tP+RcvN#5vdw?5j zns)$3qgg9!T~@vPj0E{I{Bv32uJa2gU01Tn+g$+I&h7n}(rs();ia2cF{e_g4qi}d zXDO$DTe0W&J@M)WY@;7Dnl+X`{J3@+L-HrsE}lSFQSB+3b6*_U3d|ZT4wSLToyojy zEprqPnpiowK0KIe9?H;M%a>sMtuh5*tjbUV>aWXpu;-)TBuI3Db5FFCk-VX5(&1{b zmVl9AJ4~Foq_BmP0H6F!hgCSEOo*hX!Aw{ zv@*wq6(&)WXt%$5?x{YL#C`$A2zznkmK8sG0?d;(@Fz|5ExaZOvSl&fu)&AWiywme4JW)POIWTdmXhPu+a#z_>l^!v zOmb#Dtjv>>#Byf}fMY&;pHRKa;96|Vjgg$olBQ<=Z>U>gUY2LTr5eBOlbLju%pg`J zE(F8G_iU7d+rB18BI>XXU=7_)rHllG{*t8~?2_xSj*fg{M;ClZ!x$6lY`ZZUr@0+Y zz79*$;UFK!@VQ$~;LW|(mw_D`8%rR~aHjIxOfsg=3@+ro0mlRHKo(`-G;k`@8f+L_ z^SOV`EXL9ug)7As)|6=p+f>u{zgmjfQhKEuJ~0x|Fz`B))Z)wEq?=P%XeNjm=gY9P z6|O_?u~`*03#8-%OA=`C0kh=*ofdO|+61>kzoJu#yyk>Oo%=tM8k#!Vy z@mWVCHW|uY@S^%6?!S8Q>bnf9-n#)^@`d*KFfNvE7Lo|dBzjU|BcX?6TC!BZG8uG@ zZ3T~K@`i?DwaEGmO1bZ1W!$ z_<1_-_%V+$wL&F+Q#lFlhu}`Q-G~MOsUf$9Y=}13RpEiiV2r z`vy7MrOCuZ-K5Slq}J>bh&Ji0aulc-@r#xd`u7YbU}fo`L1i1&uDz2>j?_w%Y*Xfz ze^5@=7T}?%5Hl!F7Ljh;;P@3`C*=Aa(`4BG%#6cEZ3$nPHMk}&n(4SwKXHDnc@@Y?Ix4QyfExheYvI0&CL(MZqO5XAa-*pkv~o{@|F&)eV%mh)tMGy zpsGlbz+@SULn5o0>yoy1%VOukX8~ShK7%|2!Sjy+VezW6kgup?ghg75l9V^z=2=ww z*hXZe%&Ru5Y^_8D#XtOs6j{X%BQe3vw{6zu&Zk7iZm`9=*v@(?I@Xh9_9zD2QT~Xw z#erd?66KTZR82Z-YNk@ex&h*VJzW>&&&-;F8)o&=+Pxu}u64_Nm#t1KJGE`gX}`>V z#7p2#`x%=@Moe9#!BY`0b#8OFwhJWD0!iV5?S19Cpt(M)k);~i!K@Bt4tp4sCGm}W zc!#6uC5s++E74+0e!?22FjjOo6h}}QIWfUFj~oxl(gy4J4e=~E$VrHwLGmXdOAPW}<)R_g!OG*=of#VRitC<=QGoxF$x4NzCShHYNco2rsZ=F zMqX#TH~MKOceltMga)S4bX80VoZiY_Y_cq}d0}P$;$UOUPd=6dbxn?}owu7KE*Rj$ z%><36J_57<@HqvH(T`JWWLlx4^rRRlA5(9qkhJ&4lD2prCN9@MEWTvXM-Us(M8sJ# zlfC)oeas+7jnx6zVJm{*hf6W^PA-8Ig9C$uuK8d-W)j@Z4OZA(+LoBGNX^@MQBhGr z=+xn2K`Au&b~>Vc`XO{`c-RFferv_>v9OSNRnD3rIzBkaKVI<9BRnxFQivJGTATtZ z1^A2kFS{Xxc!t;A)MsY>g%)Vo=MVP|S~Dq;?z=igfjB6kTbs0(9s%8|t^yBf?tf!` zgx*z&HjOOAfh^xSX2OOOP^C{C@xML&RLow$7b_uTEzBJgBmqO?g}IO3D;Mq6DPCmCs;%e}4yuy`d7< zZOYd-oe8eRG5&0Ghmj+rWvU+g=P=H%c$`fXs?QJ_cZhZm_N=$~9&m4AR|BV1rOk|M zifH}@QW6m*+|XVui?@%!S@yNY(KQgX_5HzQ|Kb=12V;~xnmsYHa)GPnhh1(}c6tRa z1S^PDX{0}P)e>;b0u=%%R&~Mvw*0(N?I=^aZq7C07gI|^M%DKU)7L({u%yS+pr*0+ ztn@FpPrDV%5q%FQ*zh;E=97+?w#E2v$sS==i!7lc<0{W)M$49-6VI2=!#p@lZ`(YL zx_3wz7#iA~%@^JG1QRts$X}%2LLbHdFjFbW-uHM@jV_O76Gj-dPaQ@EhVd@3j{{#}-r zYcS(zWVBuHvLHxe31Ywj^FOM-(0^zOIb_3uJcr=njC5S%9(Q}*osFT3AeQ;aAZ&V+ zoZ3~tMCP?J)SWFr2shO7`fOM+6Z_P7Djj$NbC8h`Gv|K`t@+Ebk6;m>Bc#dGS3 zQH{Nm@^C6(Z^(mK?icy9XZX6EkQX6Q2R*>#>1KJ{9)SVFqs1yvPScx-SB!1!8P)b<{?S=f%4UP{*N>FBTlyDGU zQ>)Vy$LT$EVtqQn`Q7;Irx!rdI~S1=G*S?rH6g z>dTPuwIloP+b>3*yR8N$^HxXj)R6hzZaP_Qzx$&;8TT_UY$4aK(ckGFX~lMrZXkB; z7qZ<8IzScPi`Ugs$_nuB%NzbJ45e=hp6)`v*cuUn@s&c8f-22%AbyfY?%0>Ux6yll z4BbK6d_QI+7Mr^tzfJ2!+6S+AciM-?d`7^D(cjmnLmDV3Ak4St>Jp5u{sOh323}yY zw_ACM$TJabIG_3z8z2~`hlHY1Rm z9o}SpV}F-=KY~_LkovCgF^G0$J^te>){MPKx*avJ$(GYy3k86GZ@gRDu_3Dk3Zk-O zMePm?+w5!J6)a0DFTeU4`u^<}>%q^UHyASm(}7j=(s!)8CxPc6si`U!ifXOGXAkwo zcrW0cI`4`k#~d2(AmP#8@h607AMGb}UcW-v-E$?`{S^{gl)!MlOh+e#&fNNTb{DpI zcYbkE*WRf@yO;unI!2+&rVP72(mN=oc&4j{N{QjN0);XnA~ln6<+sj02I*1A3xu*r zh$K;UQ=4)V!fpkl3;jDNFeGGPY-2;_QZCq+9VV7SPs@WNkxboUuKMpQ))fUZ46^+3 zbbJw%rey1&vGR4Gjzy~XeS4JAkbSYcS||VDfLt=19En(({FW^u5o7Y!D)-TDaau*rZGf4a5OeL+{t`O5xbJLl9_;VZ9%ahOMK zkbPSW_RBHCI4D7t*sVC6{m?yoRj^FP^s|mss`!~E(a6L{E1VmF9p|}NMPA2C)o<_M zL=XonRs{$ONyUiXmMkCBV<$;TselrvaKJ1*J;xhE!hN@sZ9+nuJ2i0D^{h)X5LdO0 z&p=P1>JpU0DjQoFQ=8r0Lvv4mRXO9wsD5!KLt!oT0$0Up9|fV`a7DL0;8{77S;` z*#M&uyvtBF>b;90Ox6=zI4NS`T8%Uv7piQO2qBeFKc$R zUP6h1IKx>%VGp2Qfc=2@ar4wS3Q*cr*r$8n5Ya^b``Evns>Ds1U`4}Db*6c=mj(vU zis~j)e|v&9FaahzR5ZmYhTriKVh!Yd9K#0@c z+L1O>1o2Bp!4JS)nJ9_@o<4VRzB|AOl>4s;X+^9SAW)B4UcsaOCTDH)KumS}8&cE8Py?AmVdJB-P8qf??C?Iq=?CAsRM=kJeXahS#B_C|n2m%lR|Pof(o?9{1Vw1{5V;%Rt$ud)!16z687 zV<;-Re58*ceG}wuQJWj{(T?N8%3LRLEf;m@f;u9-CMSyw$A_Ire^QN#YcY zzJ)IA%l7PRPs0KKQ)#lV&yeEsO9Hng+`8ABd1%r8Awa5qmNgcA=B!}fvEwFb^|VMGT2NJ!u(7?WupPW^w6gN@G?;<6!Y(big}(dkX0qdvV0wx~ z%fV(Gg;?BHH0bAWm>CfkuaVmhBj^vA8%P+@EJl}6gY|La7W_*74_GXF^9}?8+^87; zSjK?-g|h+m=Wok@?C@;s-`p@iBxw*ph&TS#|51Sc2SEQGxO{_`|KAYxk3TnqcG@ib zJqqBJe+DbxEd5Ul{QoyZ&AB-yLHeBuP4-)O;d^4xE!&I9P$kF8vND*b&&t&Nt-r0^ zZ-Db(_UORT{=WQ|(LEJ_34dR{@mRk(`rT-B_z@haS@>*F^Hk~@w&CEMvL9TvGB7fl zpe@FIY8K-hSJ3B}W+fKCOs6yy@P1I53l5V6)&wNWe*o$mO)~I8Ph*$xmA%pNGOb=a zC#=SOEG(=iZYC}T8pP>FkbEofKeFVTcdX4@e}qxry#KyT1H=B0Kp1%cec1`Y@P~;! z%?;-N&+=b<_|FsY-?7%e#S8o{SpGeY;CB)9|Mu~}w)7{7{;Nm*8M6%>{J$i~|7e%N znt!zL|G9ttPerr(=7a>^cUkhNfnbLPIr2#;IA5QIPl=o{L%6~_luD)aKDk(Q0oHc0 zy!3yD!c7SFKTsH`v-r=jzyOLb{vNwGg&2Q~oUm&`WJ1S*XQR(l*;tKwMd*gj-Eivu z*T4z;JB#|Vta9bUr|7J&oM>n=n?7(&`-60O-%Z|{3A08uBd&iab4_~R%6R61fJo?2 zw=)0xy?SuGx<@BlgD7x)qeg9|zL`eQIR$^p*iRCPw4t^E+?o`!uuslX)|l3{0M?JPFQ9 z7d*=dB&2j0{Or~g_<{;dh7Q3(D*X9sMT74=E6uex9^8N;^vl`%TcY&g$(?#man&p= zEN9euPg1>pk>Cd1_MeWDY@Wrwc*Oz~^M&=IrM>v`MFp0rxD1%RR*fMTiy+8|Kf6@D zh!cwP&~Yi4Q@K+p&_u3OZ>T5pIiRAEvbp+9;6yOQOge2s4f~UUNY%(g{cn%U8|qU6 zn?tUA_=MMl8|LJ;>xDn-o$Q)r_u!^L^%yr-P~jIwD19mQis5g@r`w;_XcZL%pC{@? zPr5PCy|)TezYU`Xlac)S0t%%7@tMx~5#j@^tS(?`f7DxHl%KF))k*YwS9ycN-6Phh zk2O^k+Ln{%{U3Z^M5TSi;A@HbaBl&>FS53T)> z1#Mo~eygdAld@m=>g*gpHFY^L@v5G;wj$bOe43Ec^22waYPNA_NsvVcsYUYpc{%)m zzG=f5_t}WxR(0F*`mnqk&-~*PdAv3UtM z@5JZ$)u-N%9Ewv~t?bvE%81CmEc8vdWQu)9OS>if*O_SCMl&HSVwmsyOwPbC{d9g9 z6WQspG~K?r5@%;(-!3ry<`=t2?4x3F;V1ktU*KAW8)qYqPe(}@h-~h#_+&^iQnQ(f z@I0uGXK=RlG%ED9Ke(<>z72cnpwQo_BzoEU^uE3Bb>i0J$ryKoRI08wKRyi^Q>x~A z`Ubd^arO1_tXjxu_OgTpnTKs1xvmBO42cuH7+S0>p;FSG!j0C4UxjgN>3WP^(mD*hP z47yQdMUkyj54mJkUN^t(*pmlj>FeATq7P}^ZYf&`Antuia&2aCBCB>8=IL_fNijHP z27Z=$iRYm2gx-@@^|)sGJW)R27ia%cHQ0A&miiXz+guWPS*Wj6Qti)FuWYSfyzF-* z9ZkJG#HY^Yn`aV=c%psJJFGnZYo9l>m=Hd6%Pw%S#o9s8hBT*I?QO|H>fS09d2g|Ecxggl^R^3c6RCRkX%rF#WwvJ_(ntbG<# zgU25xzHfI&Pf(>Ss>O1FzmVC(Dr@mYhX|%IYb$CB)W73{NqRk%E2Awp@%R0<`j-FO z5&wjn>~?}_SKO(_tM0pyH?|ZK%|UmX9u;WT6Xd4SA`QaKN=1I$uZkLdU4Xe!&ZD7j5@3$Mh;`^f9%B&DU0=(h*qZuEiSwqi zG&{!{J|$#t)Ox(?=|8`xP;n|7wsqhF7nU4wxGiJPKPJmmTM4N!{JOEx=PqhmpxZ?Ai!g{9K=$Y4PxflCe2F zn#^Km`kR7PVOg-sB};&^GtY=$c5l(Kc1j+NF+0izQn7-`+g9%PwEE$W(NeucP;7lL zzs*HWV?!1|42*0G0~_gIs?Hj2Ntb95X4v4hD_-sE`_5GrU~%}5y@%Hx(P0)p-k62k zu{su%vhCk7Rr_dmzV_h$JT-`0-;d9JMMT3V9h63_-SSm)RF4PN-1{qSec;jPeDKs% zv+n{fUsK|Uv4Z5d*bs-$EAg~mJvJDCGqrJ}QzS1r#FKk)VF}u%#^&6rK<>Dzt@DFI zY7a3CcmdvI;s;WIJgYYgN%V^dt~ojHL?rs)LnJ?F8O%foNeR@HkrWiU1(|xsm1Z1m zUt(8M16t0x-T9(Xw~((<9Y=+8(BFQM;N46-YQdcBlMwCr87a44K452ZFrnluYi`f> z7i4bRZ0jW}hV8`R^1zO2)-Y@O>1yvt-2QSOGYjNxMqE^HX3trK&|l%cEE7_SonT>{ zc~_pM5ld?li(j=&+E;>C_i!?3LB3-Xda-|wfO|nuw%Mzc*rNl#ZuzbuA(MjK(boBTEqqR;8R!sbl|XmmHVFh?W-Cg3tYZ zpZ9G;9&!F0)`s@9;$TQgxxApg6-x#e65XwxJ*O}4i6~KjfBONq&yiF%B^p%L1oi?}Sc@$Ue z1;s{#d^Q*5A}hIJeBnmXe4)JL!%jJ;)rG;Dd=1-+Q6wS;MZs=`?ID!kP#u(!RtU4u zema$blaC|R>YeD@A9$um^6Y?+-ElJH_?2;Esok3$ck?xJCo!)#b%U-GA9PWwGWJa~ zlBI}vcHUfv2g5cc;C@}D8h4#noqm9F`yTJ-7Z8!4ni=><+UmeX0k2xqt=_E#f|laM zp~bSW!k@_c_DPqA#io6aY{WWw!pPy%_L`*evC3b>XG;c#e+`8a`Suya+5?mlmzno< z&Z^-O=$u2`6%sJY($^}^dhG;KZ~r@(x`Yo}ET8mtLG97PXdd}{@D|90s9w6) z!kL860$x+SYJQ^HpNs1#11&b5GW;q!7`(A-nzN|===zy8GsOr6@}v`GU$*nZldDM4 z_cr}=3k&OVzk-a$}^4`Ii-8bJfJ}71`d$KTk$)A^y{=?jd}l8y$juk z@fDA~dEIT+lihWsV?sAxoIMwN6g06W$L;)dDt|?evz`xv-$)DYq_ItSG9X(7q(j78jQ{Z)=js)iLQjmZp|>yMP2rFvB%t z?0cO5iE>q#7X;6#=*WZyqjN#ya5q?B(NRi@;DrGowVRAghiWq3XWkH~(i%bkZI+Q7 zS;YbiTn<$NTX&XG{C~YY2RzjO`?j(xB{MXUQC5)|DiutEU*Dsc6(DNMEBbxgPrZ9ZikCTSpjm9>ci3#vLcM6cP zXEhmToYpSy@o4!97@oe{3W3}TCm`2e_9kG@D&DIJ*;V>r*^wf?(Z9p#N#Tqr4Q_V$ zj;P3dnVX+$HYJR5s=csk5;|tsE4fGJrZJX0cK3Wq9OsGcVlGv}R*ktxv_Y68|7AU- zL0nnY$fi8{$iyKiUEYw*F^dz!l*4oBK5js0<=qH+ver@|IN}RXZfpluYiE*9L7)J# z@xFhhYFrG!rb($ToOW<<$UhVOsrWOKlw6eN)5eGMkz1a&9KXN2P5kIllas@nY`v0r zI1CTYHMDkZHV87t09VN~d|sc2kZ-|oa(40}`^#y+2grV71mcVzsBFE_822Ja?^MUx z%*L5peF&9XmEvp$yA+=C-cpd~;^*dO0-`Nl<%%m1jW=y?nLPO@-4a$7I}jUdo}$?D z2rdll8j#lr=OmUpHKA-boCvAp&+Q;h-(>r0Nwt1Ztu_0ygR}{)Z9+x(XUsGH#TfJb zFI8SxB!8UpREvsJ&bo))q0hn>xiA+m79u6lkWF^9x|c%fu94G&B=z_n!% zWhWG6Z1V;iRF^9kM%l-GK9DadB|9)4kZ&p0lxYVBvC^TRt~BQkla)6&ztr!Syt`?6 za8fCG3+A!Lfy;_vwbiCHW_NfJjEX!3VtTfnM``#+Ij7HN>xwhH^0a~4A0G)uSB`TV zMOR51*eK`dhBALTULLOv-Y`!M2g(jV0D_vfGWiWJXYzEhMRJrryXVNgBk<$%(AvCXO>ps1QUCK}JtSX)HtUXuZNFUS97aQ~n=SbG& z{}`lSEDf5?Y|5Qf@2V>|!$KpQzh33sY}@$A@6dKIHv(I`LR2&b;1M`95=dgB9h zUt?}|aQY!K&eJgjwdy*=niM?}SqMOTH%vd%e~uwzpCwm<_&ki+<`<;f#*1XxvBQbQ zH0%9aN(-X3X+WkU(XBL9k+L3@K9DnQ0GraYQM5m+mj0O9-_~!D?{1^}9ZIr{bGqzX zFxs%Kw+nN+P*m~drl&JU)Q;L}y{X)X>8Xl(S^Gq3Y%YEn$Gkhi#AdHIoJOI-dkr?(f(l;2lLW@N*KUE2^uNLTfj@D`# zAQ#L+Ku3DWaPd~xv~Y&g_7C51hy~4jmd@aDRfSvw5-6FQ&lE>emQ5%_b_cZcArql? zP$0N)GHUY{%uC2gim9=&vEq3~9^|t3VUqP_wc)xDn@$_(!(;(6Z2H_WF%~B&1GnD( zxGAWgpzD)%cGVeG$f1rD4g-gTy+&Qts{L8Bc@x}y*miW?&V~K^MBTisdPv$@xcgMf zW6tf$4bDbNrj^F*;1bk3=3;hX;&B3Y$dmE$1S09)(9k^-P|R`&JZY-_fG6#1h~=9e zPp$yAfs!XrjX8o&qs7ZilQjpXIi-swT53a6d%x;PIWP-E zsF|`z##{>=%s!b`%vSfYWRItgeKX=*h3Z-D`3Ck!-BRs$dXr_79ij?8+66V)YnA9< zx1_#IX?yr>!A(SuGFaOhSn`Yh3}seD9(Kh7e&oxhrhsF27s<>0a%jM>ARINaOy+_hV^_e$#|ytQmgDs78%Ow;8<#G> zVf{hQc}nxpUFsb`$kAul3y}HURYwvI)fli~xbuW*?Ha?cvg$!-?^6~Vl;=#GQ8GB) zpFDiW0lB3%^+;8X&(#=HY?z7rNU-9w>;tBS#Rh7v>~_J5c@l$Hu*W|q`iH90_k+ci zZwV|n+6awIkIL;-wDUu&AjM-3pjEt^HN+>Vy> zAxS?W;wBgsAx7Eo{^49WF}5wVld+HSiqyU~w$!KGc3JmKmw{hUN8PdjlILbnMXt4h zndx)Qp#pSkLhDH$p%dblcltF()=5PlWTU09E>9k0DxSEvJ7dOfP-$-Yo6=x-QfuIx z>8Y4)a{T3!)@MZ7E_ayq8|=Dxu7TXIhfCvP zmL(sE12jM=RCO13_cwI&mX_;495|DsG=+rsh)_n_EmoqHezP?QZ2o5uT)q&wX}Rzv?sw zn9w>nz45hHUixZ^(!9BM(fC^;zo!W9$kQhERF`IY?#gpP?6l7$?O+eO4#Q5_O(mW9 zSSarSGuGl#5UM$$`>bNzg4Lc?aX^msgGyRV(Su&Y<`05z+iFlm$nSJ zzyFCu&F6IOn{b}8ac_FBu+?>UG1yUG1CQXf53cjHYPkk*K`Jb7^A;(~KuQbQXD*ia z0vg^`R$f+~mw(+fY01s3q_Mg3!Dvq~*|hD4{?C9_83#ifW^`*OrW%w5pR0JgGgk6^ zTFVa7`NQsSNqwd0{Iz53Mv<=09<4r?4m!?Qkj`g~Qs6W_jSyOIn%YREfsl)N#thK0 zfW(p(>X((oB!!9Lc5aPlEB1~tee=|CH0Olyiz!;7%T&vcbi$P#g_ia%4c*~n>G3$U zpQ`WdHiT5b(jV@k!Xz0SjhcyTMn-e;t=8TFx+V@HCsX>649>E?*-A=n zpIlI@!(O4v$-WY}OkFtj{?Nsq1nHCZBe70^-5*CoHkH(3T{RGA^y5w>kW6?7Qc@2z z!vjq=7~{Q7R_Z8I%$_ejX|)HR_!IO@aRc`jz@LCgOql>S1imNSdIR4Pyja97z;6i` zEoliuefM(#kzuwZ>&i6~-P6N+sxIO$Pp-Kgi1)~>OU;@Be*RghXMASfwU+_uK&Q=` zdeDUQy`aPAuly^3gXp*3o9(a>PHN+wBu_smkcJ)!=43x8I~{?h7ZYQ?V5u^?LqL$^ z|LLaXZ*P2lznl4?dcBMAx2yc+Mz{Rs9s;$T0q+k92~f-Vnm*fRB53#lFTe0ntxc2r zqxlq}>hD@ulwX$*_{Z^Xgd_Zcuf1XmR)OA_lo-4{O1!*PZyUj{N7ypphd*(u{-`{@ z2DoJip2FXjAz$4!?&UTWjK<1c;(yrZ&r6%H8|mkbfNcN=Cq|$@T$b^unorZFPe69# zUBiLadO__TjXNku;}bQRLp&-1&&!jZ6S)2Kkx#LDwE1BjQl663ZgmL?3!lKOHzkq& zkM`o-ga8x~63!Cm948$8bRWsgutetzD#!n|ba?Ibl zoWnMEcq~0xJ!5Pp1;{uG_C)9lpi{^=7sK=-cWkruc`6&R@+ri;f$MR_<|w~o-^2HWX?m={5+$QylxomE6{uV5mY%U}diZ@%5CVaKA`}q!O4MQ-((0?tojfuQDR9Q> zcXzyhFAx(G)k+i6fB%eKis1=XE0|Q6>O~)|l#4Rl$L^i%5@$nvGSu@~K2qYYdmv?5 zYENjb)tBb%D{y$ko4E80`=zd~+TL$a{?r!QmM`RiiJ}(=G8YOxSaQ4Uxl{Oc`uJo7&gxhrx!5*S=x!S8^g3oQ9ZHlX=(53!{ofU&c~Np zO(FI{v1EyzLOm1ZjX9rh>KV1WWw&NSwO`rji4EpvbHL~1N3NCcZVtD;@O*?XkN%)L zXY8#(lc1IDr9pgv|K|}EIP_MRp$z2#LsQD7gmc`=CT)^zjV-!d|rzbHDB@|f_?1Eeg;_E zrzXa6GTrO?Z8gqkD$?{A)6B2BEsM6k2<`cONIqQ&uE|z8mSrXv`4|UMRCj0{InM8; zacS3$>ABJ+oRg3JV{GL3uzC>;PW~j1775IDu@9ZIi-Y4)fA(hNc0VjED3@w*(g17l z@yfd*d3AfE(fK6?QQNTRm;84wn2mOLMBaKTGH3Hn=}zQTUdGppYF7E(a8e2iqkzp{ zC44%C-;ceie!vG!5^l{o%+uW26MZz<-Xeb*aQ4fuDKITVMmGQPWA4SodQt-h-GI5J zr3l`XI%vJjm>Itc7iWx;sf9)T-pupEDf-s%_qWcRJ&R58Fs$x5`MOK)h$HW0DjD{? zjV32UY>Xs_wzz#p;$c}<1v5qgBOL`!&Vupjrf=5#$l(ONVcOy+5-0L|WhhW#`BbUl zdH0idvttEvQe5q_O<@Op%warNRYPE^1Fd+&y#%14?FhxUO%Cu<9|FGr)puZKTqNn~pq{FJ8Hpmro&J?Q0NCN8b} z3s4r_h>^h{r8GZ5ZxW(LiL!P$;pww1!t3u}HDVGI)uvh0XzL;s(?y2uEoq*abr=1# zZCiI*B6Q?PQjFfg<_;Rq>pif~J`CUQ%?fhz`M1I(F_&jO?kBzMx#BPD5j3|{M| zh+iGF=GYSdwr?ioz3mC5?G@*>3o;A1F6M=8@IW4Vk&Y)8*|aH@O+&G>;YXxDFOMRmMLBB{=@Aqq3pto(KI-} z@Ni8RRBuN_=8- zL7w*5SLZYPwkD^_);_=DatazVF}VYy{PZ1TQAx>|Zh%lJl+}+j=@^iOzf!veQ83-z z)EQxhq(rrAv&fw18-0!*@v$o3jtQ%2yEQaj8#FDPeu#nLjq#hs6H1biWNbinYzoQx7_euU&vlmu7#YvCSyxEO8cTThce-;0P~oHAzf=(FRNrpYDXP zdlXZzX>ek_+KC8G+Z*E`{(VgV-)^Ky1dS#}dhS{2*jO!gTI$P$NDQQp0;S}w zbm5u|uS%h{pG-s3O#x(QB7AOM|K-g*mpKv6sX|wiu`8XB3{bwrXWRfM_7#Xh#98qI z)NJ9}D~hemR}WoQea5Z&o=hP@m7c1d|Ky>)q}2;9>W-nI)k0d5dLwbFA5f)Ti*|)aOm`? zLQ^+q+aHnvQV{AdxwvW%?<&dsc334n-P_|D)ZkcQBVr_7p(0)PlG5o)=whih~uz zGt^kc>FTcye!RlmqiR1M6c^$Vmy%O-PSDHEE#ZKH%(Ka<%U@@kYK-)5NtbI~O)tOg z_?&XOT^Fm6p=k{5L4?L%(y!`hzOipRCN}XJzr!$q!t8Qpu-$1=}%tJzX*eUOkc$xyLdm?WBR@VuEbL zho1oeMS``a#CWJAhXg>1nZF#&cTOu2>M!gXDQMrXzki;^q^-Y50~2L9|5X{deUwCv zanOBtB_eon@tW!RIA!gIw_yd>oKAFR?c3 zA&my0re~{1x?F41m~~F#)fxO264kyKAVux2yznGoaOU|p2&45WwHG#qLuqaHxn13z z5gI!xMvA@4;KlWa($nTjN*W|v^q$S`$cXu|Ar9wx;F20ixn!>15{Mj-= zA8i)@0545wwiiM%Z~!-2v@9!0ShZGf-&?z&qf$7mV1AkFVD!=?Ss5 z_;grn3^IU$hG2tPqj4Gl8~;-)(Mr7@68WW!Amo#u==4AGa1qDY5*Z1ILvo3Y1nZlx z8|lBQb+p#z@HghU0#LtN76`t03&yG?fMBU1{a;HO`AUOeLG~lGc!d{(=9x8y9gq@m zuVm9f7e z>%=X9K#&52Tp`L3Q1-X_$5=ECQe1JMSb5N(6u25+|lo`LXAY`7sH+OMtw z2&^-st=Y1l8)1KBu=^E=t;ZYy-qs?|2E*>(>T{$04qW@%k^Vmj!qo)?b-oyQ+u-k+ z$n!udx)#uZzVLTJZheO}^+n#qEo-12FA{FRI%)8ye`Ug7)5j{Jq5ldD{>`jl#ZB35|r;%w&=G8O*Ot-PC8PD63Q)F{(Ct_H_UjIFZcqrG8Ke^fPEuAkpEEv1b28N<`5{|O8(5%8*&5>3&^Di zd<^e}BmY5p{>)Ffo}C!45k(0@5OKOc;A6i-v~?p9oM?X~Rj%y(A5LK{n*Jd{_$~pi zL+{nxsXrW~D@uv4F(AHCe*-Qs5@_F7wZ9~ZtFjvbaW_hLG9UyKVc#l_5|9Tk-4LBT z@#g)*Ou8vF>(CUGL;h)>KTlA*IuiHmZjZaC0lI@JqvS7i`_ zs<2iA0kX8+s=Puq*$xb|2=~LKS25FuC0>M{|0kFj<$BBHG1~{Fi&E& z{8!EKENWHSS&>2hrUXb(r0`q_Fk5UiQhe956_(x!YqF8z^{%y8_b(YKjMt?r|A|5C zhrsg>*xsr}LNM(3p?^qge~3B1X#PWsS|vRM6W?#eiB;k5kBk&+4Wkz}*meFjBgJ|& z{qKwv>qq*Jj1;SA`kjpapy>qHPX*nZTvSzisLIvJ;3G$S@-}|`@b+!WLZ$}B_R=^ zd$quBXq>_N&`+@}|A+{Z9`CcE1OB4X5_qG#2=JHqJqbA$Z+}69e~I7It>#(*SSlUv zrk{+?N(G$Sm6l*a7T}*BeN`-HGq5Obx^er5aK$g0+bDnsNAaVf{RHvvIq`m^BgnnK zdEzQp4(`tt$Z4Q!23u-;kTQcUIMA9;)qca^EI{7Q3bgkHSmEsnv#~*R7Yd4nL3ZJ7 z>6{oW@X5~$Uo^x8Vh? z)3@x(0d6v#a9?*#qEX~n zf-Q9A6L8KXB-AMIRH;#fJUc6&z*G3z#VFQ~Pr~yV4FjG1XvTT>#K}#kS^E(o`B9IH zdHSS~cU3v{VxqzR224g7?3gH?9thYW-q>_q)0CEdfTZ<)Ls?$5(m^y$bPna0JmzetRP5p-yPD_oO$vtecm&poDTDzECk-4Dv zD?7Q;YKkP_P5jcNhjyq{l~-QPpvw4XraRsygzY~J>@&DnUzrBq&I1ACQ}c@hb}Pez z&*X41I;Srf@AUP#sKf$g*|J5!U=O9ThK7=Y28&t3-tB^xv|BDIQUJjcVaf@L=gvu_ zi;g7aCkBZM_27cDqPvc~ZHuy~Opm7Z;^O-JwqM#QxJ%Y^oCnA^1iZE)BaT7fe+IdN7(GeT~g_=x|3=!wVE+ZIMNc`1<#U^ipX^28FM?Y z6UwD#VUssuVh^cv)aZ|OlvROZ6TJ5T$wFW4Gb7E#%^5qEJVzzkFjH<0sG1(av?Lb zKPkCYX0N@Zbi$--?9lLs^G{~0;(RzuurA4VV)xGUV+Xw!Cy^!ai;MM~+Iqe7Zztu}Y|GIR)AvN!*ZK&F(pP>VvrjB;(ADJGUjeT+JQQHXzJ)O_w zy)SArQ2LhPHxcA0tFHT%wccZsdxgAMjy}S6m>Jv-+z7dcX)v=xE-%?o447|CrUug* zuiw6&_Kr%MFEP>8iYC=#%#dw)tUXaR1&^=umH84V_^@=*?o|yh`(rcZ!l}1qZU^TR zYKv@lpjN69Wa{6TqVxe8^RRWjySOx4e0 zI@gisFi;lrIL{GC?&BGF;HhFozv8ot%!mL%Er-2{m!Q_V%*`j-C!X)i)@`=hcDni9 z81->kYC8@xD~m049}nKo5FOZqEiYyPgPp!_Z5}ynVNwcpbC}8&aTmtII&{*uv@AW4 zypa^EYkz*|mc&r&QkRq{)~-#c*A6|RT1&m{JZZ!us38eYEYfcKnk@kKz<8KtpWk5JSi9li)VPQ#DG*5d7C zu){sGIb%FA`$3`%ya0L=M4(1HjnKiRhE*NNPG7+jSrrh?OF~1#C3<|NW$#gEl+dzs z+=%?Zo0&_#2oHoPa?$#Nf!`wBOmLw^e^`hXbKHp58%lS^Z6k8B^FRcYg@qz^54s4_ z>t#stLTrZYOUs^w8$%m7i&_h;3*XRVQr;-sMHv(p`g>aij(*_DaW1U$g<_-hnA=X| zz`W!?)G}tliMq7J1#^Kzi#gpY zUQoAnE=r|s4=ybDlq6*6l2&-0f<ta2!-bXB>%XXGbt7mW*dp9Ve+Wcc)$;?Caf$5SiWG*UHB2|^dPsV=rZ=vTp@l_y}!i()B{^*G$OM`uvktcok!1FEGmo@ITpy8qD$sJ7%W>)nb(Vn_E~dKPA3d6L7dBlqk~`cfs=iKGSvIzp^{Q6CEtKFN z)pHW}Ni{Qu9&=-W9Wi=nF+3poQHZ|2by3&J4BmQs$AKj*5BXHd`wC~Tr@HXTC+sY5 z1k-GBktxZ-&3eW=U;^I`JxpWzH%t&XR^ZqReBPGoaeVNyOlMtw zQ#~62uNV6?y`znOt<)tNS^a3PxFV;et}UiT*J1Bb5BGezFSr<3fh<6e@scq?^rx~_ zK`nPQFFmf;EB==HI>bmvsC4jHUADv;*_DJ2bLU*NTc_~$co&N-Z4R%P6d9usbvB?d z=9<1LEuZSzL`dL@X8lvg4gqp6I8>(bat~>t=5z4Qr0*_V#|wIS4IuwzzO5guL=$=Hvcil96Z)c&75#jEVBgcPqkG0eoj zmgh1sElm(hHBuw!jClR=Hr>$0rV!0`F}n!-PY1NgrFmt^=c_?s&9^PP?Y{Serli?t z_;C}WiVJfUcHS&IAoXCoN{_|1leF|_s;&~FdiSF)wn$suWiTO54Ygh9quutMb)eZtg9& zB6@Ho0+~9%cf;rmUzI6LiZLGFo#q2lX$$;ZBZfalmW3oeFmWyFH6i}pW397sfI9i$ zvE7^}Hv!nZMIil#21!?m9;I+YlGowB*Z$bl`(z> zbQpCiTpb=>eervm38>7VG%JEvOy_XdiTkyx_3gxO+sb1qoB}z_l^*lR3NPNT4o*#v z*;#E(Kg_;^8*+sLJl;+OX` tJC&R?vN?ggFlXu*G8At`@l~65FcfKVigP`a)Ciu^DP=XKbVbP3{{wjq2=D*^ From 914c6dfe45f36f61da7dc3dd59f1043543c354a9 Mon Sep 17 00:00:00 2001 From: Dimitris Athanasiou Date: Wed, 9 Feb 2022 18:47:02 +0200 Subject: [PATCH 012/167] [ML] Retry anomaly detection job recovery during relocation (#83456) When an anomaly detection job is relocating to a new node, we revert the job to its current snapshot if it exists, or we reset the job. Both these actions may fail if ML indices are not available. A job relocation is often triggered because of a rolling upgrade. During a rolling upgrade it is probably that data nodes containing shards of the ML indices may be temporarily unavailable which causes this operation to fail. In order not to leave the job in a reverting/resetting state in such a scenario, this commit adds retrying logic. We retry reverting/resetting the job for up to 15 minutes which should be enough for the cluster to manage to delete any intervening ML data for the job. --- docs/changelog/83456.yaml | 5 + .../task/OpenJobPersistentTasksExecutor.java | 147 ++++++++++++------ 2 files changed, 104 insertions(+), 48 deletions(-) create mode 100644 docs/changelog/83456.yaml diff --git a/docs/changelog/83456.yaml b/docs/changelog/83456.yaml new file mode 100644 index 0000000000000..dc138f1a98d10 --- /dev/null +++ b/docs/changelog/83456.yaml @@ -0,0 +1,5 @@ +pr: 83456 +summary: Retry anomaly detection job recovery during relocation +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java index 4ea52aec6e59e..47f250673f2bc 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/task/OpenJobPersistentTasksExecutor.java @@ -15,12 +15,14 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.RetryableAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.persistent.AllocatedPersistentTask; @@ -305,12 +307,16 @@ private void runJob(JobTask jobTask, JobState jobState, OpenJobAction.JobParams // This job has a running datafeed attached to it. // In order to prevent gaps in the model we revert to the current snapshot deleting intervening results. - revertToCurrentSnapshot(jobTask.getJobId(), ActionListener.wrap(response -> openJob(jobTask), e -> { - if (autodetectProcessManager.isNodeDying() == false) { - logger.error(new ParameterizedMessage("[{}] failed to revert to current snapshot", jobTask.getJobId()), e); - failTask(jobTask, "failed to revert to current snapshot"); - } - })); + RevertToCurrentSnapshotAction revertToCurrentSnapshotAction = new RevertToCurrentSnapshotAction( + jobTask, + ActionListener.wrap(response -> openJob(jobTask), e -> { + if (autodetectProcessManager.isNodeDying() == false) { + logger.error(new ParameterizedMessage("[{}] failed to revert to current snapshot", jobTask.getJobId()), e); + failTask(jobTask, "failed to revert to current snapshot"); + } + }) + ); + revertToCurrentSnapshotAction.run(); } else { openJob(jobTask); } @@ -454,51 +460,96 @@ private void verifyCurrentSnapshotVersion(String jobId, ActionListener executeAsyncWithOrigin(client, ML_ORIGIN, GetJobsAction.INSTANCE, request, jobListener); } - private void revertToCurrentSnapshot(String jobId, ActionListener listener) { - ActionListener jobListener = ActionListener.wrap(jobResponse -> { - List jobPage = jobResponse.getResponse().results(); - // We requested a single concrete job so if it didn't exist we would get an error - assert jobPage.size() == 1; + /** + * This action reverts a job to its current snapshot if one exists or resets the job. + * This action is retryable. As this action happens when a job is relocating to another node, + * it is common that this happens during rolling upgrades. During a rolling upgrade, it is + * probable that data nodes containing shards of the ML indices might not be available temporarily + * which results to failures in the revert/reset action. Thus, it is important to retry a few times + * so that the job manages to successfully recover without user intervention. + */ + private class RevertToCurrentSnapshotAction extends RetryableAction { + + private final JobTask jobTask; + private volatile boolean hasFailedAtLeastOnce; + + private RevertToCurrentSnapshotAction(JobTask jobTask, ActionListener listener) { + super( + logger, + client.threadPool(), + // No need to wait before first execution + TimeValue.timeValueMillis(1), + // Retry for 15 minutes. This should be enough time for at least some replicas + // to be available so that and data deletion can succeed. + TimeValue.timeValueMinutes(15), + listener, + MachineLearning.UTILITY_THREAD_POOL_NAME + ); + this.jobTask = Objects.requireNonNull(jobTask); + } - String jobSnapshotId = jobPage.get(0).getModelSnapshotId(); - if (jobSnapshotId == null && isMasterNodeVersionOnOrAfter(ResetJobAction.VERSION_INTRODUCED)) { - logger.info("[{}] job has running datafeed task; resetting as no snapshot exists", jobId); - ResetJobAction.Request request = new ResetJobAction.Request(jobId); - request.setSkipJobStateValidation(true); - request.masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); - request.timeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); - executeAsyncWithOrigin( - client, - ML_ORIGIN, - ResetJobAction.INSTANCE, - request, - ActionListener.wrap(response -> listener.onResponse(true), listener::onFailure) - ); - } else { - logger.info("[{}] job has running datafeed task; reverting to current snapshot", jobId); - RevertModelSnapshotAction.Request request = new RevertModelSnapshotAction.Request( - jobId, - jobSnapshotId == null ? ModelSnapshot.EMPTY_SNAPSHOT_ID : jobSnapshotId - ); - request.setForce(true); - request.setDeleteInterveningResults(true); - request.masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); - request.timeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); - executeAsyncWithOrigin( - client, - ML_ORIGIN, - RevertModelSnapshotAction.INSTANCE, - request, - ActionListener.wrap(response -> listener.onResponse(true), listener::onFailure) + @Override + public void tryAction(ActionListener listener) { + ActionListener jobListener = ActionListener.wrap(jobResponse -> { + List jobPage = jobResponse.getResponse().results(); + // We requested a single concrete job so if it didn't exist we would get an error + assert jobPage.size() == 1; + + String jobSnapshotId = jobPage.get(0).getModelSnapshotId(); + if (jobSnapshotId == null && isMasterNodeVersionOnOrAfter(ResetJobAction.VERSION_INTRODUCED)) { + logger.info("[{}] job has running datafeed task; resetting as no snapshot exists", jobTask.getJobId()); + ResetJobAction.Request request = new ResetJobAction.Request(jobTask.getJobId()); + request.setSkipJobStateValidation(true); + request.masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); + request.timeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); + executeAsyncWithOrigin( + client, + ML_ORIGIN, + ResetJobAction.INSTANCE, + request, + ActionListener.wrap(response -> listener.onResponse(true), listener::onFailure) + ); + } else { + logger.info("[{}] job has running datafeed task; reverting to current snapshot", jobTask.getJobId()); + RevertModelSnapshotAction.Request request = new RevertModelSnapshotAction.Request( + jobTask.getJobId(), + jobSnapshotId == null ? ModelSnapshot.EMPTY_SNAPSHOT_ID : jobSnapshotId + ); + request.setForce(true); + request.setDeleteInterveningResults(true); + request.masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); + request.timeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); + executeAsyncWithOrigin( + client, + ML_ORIGIN, + RevertModelSnapshotAction.INSTANCE, + request, + ActionListener.wrap(response -> listener.onResponse(true), listener::onFailure) + ); + } + }, error -> listener.onFailure(ExceptionsHelper.serverError("[{}] error getting job", error, jobTask.getJobId()))); + + // We need to refetch the job in order to learn what is its current model snapshot + // as the one that exists in the task params is outdated. + GetJobsAction.Request request = new GetJobsAction.Request(jobTask.getJobId()); + request.masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); + executeAsyncWithOrigin(client, ML_ORIGIN, GetJobsAction.INSTANCE, request, jobListener); + } + + @Override + public boolean shouldRetry(Exception e) { + if (jobTask.isClosing() || jobTask.isVacating()) { + return false; + } + if (hasFailedAtLeastOnce == false) { + hasFailedAtLeastOnce = true; + logger.error( + new ParameterizedMessage("[{}] error reverting job to its current snapshot; attempting retry", jobTask.getJobId()), + e ); } - }, error -> listener.onFailure(ExceptionsHelper.serverError("[{}] error getting job", error, jobId))); - - // We need to refetch the job in order to learn what is its current model snapshot - // as the one that exists in the task params is outdated. - GetJobsAction.Request request = new GetJobsAction.Request(jobId); - request.masterNodeTimeout(PERSISTENT_TASK_MASTER_NODE_TIMEOUT); - executeAsyncWithOrigin(client, ML_ORIGIN, GetJobsAction.INSTANCE, request, jobListener); + return true; + } } // Exceptions that occur while the node is dying, i.e. after the JVM has received a SIGTERM, From 13018e73324d86e916b72f7534d9d6275a99e6d0 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Wed, 9 Feb 2022 17:55:04 +0100 Subject: [PATCH 013/167] fix: ip prefix bucket reduction (#83637) * fix: ip prefix bucket reduction When reducing buckets coming from different shards we need to aggregate the doc count from all the buckets with the same key. Here, when calling the 'reduceBucket' method all the 'currentBuckets' share the same key. As a result, we just need to create a new bucket with the correct doc count value. This bug does not affect aggregations in a cluster with just one shard because in that case 'currentBuckets' is just a list with a single bucket with a specific key coming from the unique shard. For this reason in the yaml integration test we also increase the number of shards. This is a fix for bug #83321. --- docs/changelog/83637.yaml | 5 ++ .../test/search.aggregation/450_ip_prefix.yml | 76 +++++++++---------- .../bucket/prefix/InternalIpPrefix.java | 17 ++++- .../prefix/IpPrefixAggregatorTests.java | 47 +++++++++++- 4 files changed, 105 insertions(+), 40 deletions(-) create mode 100644 docs/changelog/83637.yaml diff --git a/docs/changelog/83637.yaml b/docs/changelog/83637.yaml new file mode 100644 index 0000000000000..3ebd982bf7178 --- /dev/null +++ b/docs/changelog/83637.yaml @@ -0,0 +1,5 @@ +pr: 83637 +summary: "Fix: ip prefix bucket reduction" +area: Aggregations +type: bug +issues: [] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_ip_prefix.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_ip_prefix.yml index 0c1d09b2e770f..33c6c5d78f897 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_ip_prefix.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_ip_prefix.yml @@ -22,31 +22,31 @@ setup: refresh: true body: - { "index": { } } - - { "ipv4": "192.168.1.10", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f10", "value": 10, ip: "192.168.1.10" } + - { "ipv4": "192.168.1.10", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f10", "value": 10, "ip": "192.168.1.10" } - { "index": { } } - - { "ipv4": "192.168.1.12", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f12", "value": 20, ip: "2001:db8:a4f8:112a:6001:0:12:7f12" } + - { "ipv4": "192.168.1.12", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f12", "value": 20, "ip": "2001:db8:a4f8:112a:6001:0:12:7f12" } - { "index": { } } - - { "ipv4": "192.168.1.33", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f33", "value": 40, ip: "192.168.1.33" } + - { "ipv4": "192.168.1.33", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f33", "value": 40, "ip": "192.168.1.33" } - { "index": { } } - - { "ipv4": "192.168.1.10", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f10", "value": 20, ip: "2001:db8:a4f8:112a:6001:0:12:7f10" } + - { "ipv4": "192.168.1.10", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f10", "value": 20, "ip": "2001:db8:a4f8:112a:6001:0:12:7f10" } - { "index": { } } - - { "ipv4": "192.168.1.33", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f33", "value": 70, ip: "192.168.1.33" } + - { "ipv4": "192.168.1.33", "ipv6": "2001:db8:a4f8:112a:6001:0:12:7f33", "value": 70, "ip": "192.168.1.33" } - { "index": { } } - - { "ipv4": "192.168.2.41", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f41", "value": 20, ip: "2001:db8:a4f8:112c:6001:0:12:7f41" } + - { "ipv4": "192.168.2.41", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f41", "value": 20, "ip": "2001:db8:a4f8:112c:6001:0:12:7f41" } - { "index": { } } - - { "ipv4": "192.168.2.10", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f10", "value": 30, ip: "192.168.2.10" } + - { "ipv4": "192.168.2.10", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f10", "value": 30, "ip": "192.168.2.10" } - { "index": { } } - - { "ipv4": "192.168.2.23", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f23", "value": 50, ip: "2001:db8:a4f8:112c:6001:0:12:7f23" } + - { "ipv4": "192.168.2.23", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f23", "value": 50, "ip": "2001:db8:a4f8:112c:6001:0:12:7f23" } - { "index": { } } - - { "ipv4": "192.168.2.41", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f41", "value": 60, ip: "192.168.2.41" } + - { "ipv4": "192.168.2.41", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f41", "value": 60, "ip": "192.168.2.41" } - { "index": { } } - - { "ipv4": "192.168.2.10", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f10", "value": 10, ip: "2001:db8:a4f8:112c:6001:0:12:7f10" } + - { "ipv4": "192.168.2.10", "ipv6": "2001:db8:a4f8:112c:6001:0:12:7f10", "value": 10, "ip": "2001:db8:a4f8:112c:6001:0:12:7f10" } --- "IPv4 prefix": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -79,8 +79,8 @@ setup: # network part will just 0s. "IPv4 prefix with incorrect is_ipv6": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -104,8 +104,8 @@ setup: --- "IPv4 short prefix": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -141,8 +141,8 @@ setup: --- "IPv6 prefix": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -175,8 +175,8 @@ setup: # with everything else being 0s. "IPv6 prefix with incorrect is_ipv6": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -201,8 +201,8 @@ setup: --- "Invalid IPv4 prefix": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: catch: /\[prefix_length\] must be in range \[0, 32\] while value is \[44\]/ search: @@ -219,8 +219,8 @@ setup: --- "Invalid IPv6 prefix": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: catch: /\[prefix_length] must be in range \[0, 128\] while value is \[170]/ search: @@ -236,8 +236,8 @@ setup: --- "IPv4 prefix sub aggregation": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -278,8 +278,8 @@ setup: --- "IPv6 prefix sub aggregation": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -319,8 +319,8 @@ setup: --- "IPv6 prefix metric sub aggregation": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -356,8 +356,8 @@ setup: --- "IPv4 prefix appended": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -388,8 +388,8 @@ setup: --- "IPv6 prefix appended": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -420,8 +420,8 @@ setup: --- "Mixed IPv4 and IPv6 with is_ipv6 false": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: @@ -451,8 +451,8 @@ setup: --- "Mixed IPv4 and IPv6 with is_ipv6 true": - skip: - version: " - 8.0.99" - reason: "added in 8.1.0" + version: " - 8.2.0" + reason: "Temporarily skipping preparing to backport to 8.1" - do: search: body: diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java index a99c78d7150a9..ad026ea9a194c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/prefix/InternalIpPrefix.java @@ -318,15 +318,30 @@ public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) ); } + private Bucket createBucket(Bucket prototype, InternalAggregations aggregations, long docCount) { + return new Bucket( + format, + prototype.key, + prototype.keyed, + prototype.isIpv6, + prototype.prefixLength, + prototype.appendPrefixLength, + docCount, + aggregations + ); + } + @Override protected Bucket reduceBucket(List buckets, AggregationReduceContext context) { assert buckets.size() > 0; List aggregations = new ArrayList<>(buckets.size()); + long docCount = 0; for (InternalIpPrefix.Bucket bucket : buckets) { + docCount += bucket.docCount; aggregations.add(bucket.getAggregations()); } InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); - return createBucket(aggs, buckets.get(0)); + return createBucket(buckets.get(0), aggs, docCount); } @Override diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregatorTests.java index d6876d135bff0..d7acbf3d51c98 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/prefix/IpPrefixAggregatorTests.java @@ -33,6 +33,7 @@ import java.time.Instant; import java.time.ZoneOffset; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.Objects; import java.util.Set; @@ -43,6 +44,10 @@ public class IpPrefixAggregatorTests extends AggregatorTestCase { + private static final Comparator IP_ADDRESS_KEY_COMPARATOR = Comparator.comparing( + InternalIpPrefix.Bucket::getKeyAsString + ); + private static final class TestIpDataHolder { private final String ipAddressAsString; private final InetAddress ipAddress; @@ -212,6 +217,10 @@ public void testIpv4Addresses() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(1L, 1L, 4L, 1L) + ); }, fieldType); } @@ -261,6 +270,10 @@ public void testIpv6Addresses() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(2L, 1L, 2L) + ); }, fieldType); } @@ -313,6 +326,10 @@ public void testZeroPrefixLength() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of((long) ipAddresses.size()) + ); }, fieldType); } @@ -365,6 +382,10 @@ public void testIpv4MaxPrefixLength() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(1L, 1L, 1L, 2L, 1L, 1L) + ); }, fieldType); } @@ -414,6 +435,10 @@ public void testIpv6MaxPrefixLength() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(1L, 1L, 1L, 1L, 1L) + ); }, fieldType); } @@ -471,6 +496,10 @@ public void testAggregateOnIpv4Field() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(1L, 1L, 4L, 1L) + ); }, fieldTypes); } @@ -525,6 +554,10 @@ public void testAggregateOnIpv6Field() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(2L, 1L, 2L) + ); }, fieldTypes); } @@ -898,11 +931,12 @@ public void testMinDocCount() throws IOException { // GIVEN final int prefixLength = 16; final String field = "ipv4"; + int minDocCount = 2; final IpPrefixAggregationBuilder aggregationBuilder = new IpPrefixAggregationBuilder("ip_prefix").field(field) .isIpv6(false) .keyed(randomBoolean()) .appendPrefixLength(false) - .minDocCount(2) + .minDocCount(minDocCount) .prefixLength(prefixLength); final MappedFieldType fieldType = new IpFieldMapper.IpFieldType(field); final List ipAddresses = List.of( @@ -941,6 +975,13 @@ public void testMinDocCount() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertTrue( + ipPrefix.getBuckets().stream().map(InternalIpPrefix.Bucket::getDocCount).allMatch(docCount -> docCount >= minDocCount) + ); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(4L) + ); }, fieldType); } @@ -1002,6 +1043,10 @@ public void testAggregationWithQueryFilter() throws IOException { assertEquals(expectedSubnets.size(), ipPrefix.getBuckets().size()); assertTrue(ipAddressesAsString.containsAll(expectedSubnets)); assertTrue(expectedSubnets.containsAll(ipAddressesAsString)); + assertEquals( + ipPrefix.getBuckets().stream().sorted(IP_ADDRESS_KEY_COMPARATOR).map(InternalIpPrefix.Bucket::getDocCount).toList(), + List.of(4L) + ); }, fieldType); } From c7c847cdb3fa35e7ca0591b353eb1726e1acf7d4 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Wed, 9 Feb 2022 12:20:52 -0500 Subject: [PATCH 014/167] [DOCS] Add 8.1.0 release notes (#83736) Co-authored-by: Elastic Machine --- docs/reference/release-notes.asciidoc | 2 + docs/reference/release-notes/8.1.asciidoc | 284 ++++++++++++++++++++++ 2 files changed, 286 insertions(+) create mode 100644 docs/reference/release-notes/8.1.asciidoc diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index fce2f81b6390c..91afec0038c36 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -6,6 +6,7 @@ This section summarizes the changes in each release. +* <> * <> * <> * <> @@ -15,6 +16,7 @@ This section summarizes the changes in each release. -- +include::release-notes/8.1.asciidoc[] include::release-notes/8.0.0.asciidoc[] include::release-notes/8.0.0-rc2.asciidoc[] include::release-notes/8.0.0-rc1.asciidoc[] diff --git a/docs/reference/release-notes/8.1.asciidoc b/docs/reference/release-notes/8.1.asciidoc new file mode 100644 index 0000000000000..adb61d997228d --- /dev/null +++ b/docs/reference/release-notes/8.1.asciidoc @@ -0,0 +1,284 @@ +[[release-notes-8.1.0]] +== {es} version 8.1.0 + +coming[8.1.0] + +Also see <>. + +[[breaking-8.1.0]] +[float] +=== Breaking changes + +Geo:: +* Fields API should return normalize geometries {es-pull}80649[#80649] (issues: {es-issue}79232[#79232], {es-issue}63739[#63739]) + +[[bug-8.1.0]] +[float] +=== Bug fixes + +Aggregations:: +* Fix: reduce float and half-float values to their stored precision {es-pull}83213[#83213] +* Reenable `BooleanTermsIT` {es-pull}83421[#83421] (issue: {es-issue}83351[#83351]) + +Allocation:: +* Fix `updateMinNode` condition {es-pull}80403[#80403] (issue: {es-issue}41194[#41194]) +* Make `*.routing.allocation.*` list-based setting {es-pull}80420[#80420] (issue: {es-issue}77773[#77773]) +* Permit metadata updates on flood-stage-blocked indices {es-pull}81781[#81781] +* Reroute after cluster recovery {es-pull}82856[#82856] (issue: {es-issue}82456[#82456]) + +Authorization:: +* Capture anonymous roles when creating API keys {es-pull}81427[#81427] (issue: {es-issue}81024[#81024]) +* Extend fleet-server service account privileges {es-pull}82600[#82600] + +Autoscaling:: +* Fix autoscaling of follower data streams {es-pull}83302[#83302] (issue: {es-issue}82857[#82857]) + +Geo:: +* Handle bounds properly when grid tiles crosses the dateline {es-pull}83348[#83348] (issue: {es-issue}83299[#83299]) + +Infra/Core:: +* Copy `trace.id` in threadcontext stash {es-pull}83218[#83218] + +Infra/Scripting:: +* Fix duplicated allow lists upon script engine creation {es-pull}82820[#82820] (issue: {es-issue}82778[#82778]) +* Fix plumbing in double and keyword runtime fields for the scripting fields API {es-pull}83392[#83392] + +Machine Learning:: +* Correctly capture min stats for `inference.ingest_processors` in ML usage {es-pull}82352[#82352] +* Fail queued inference requests with cause if the process crashes {es-pull}81584[#81584] +* Fix NLP tokenization `never_split` handling around punctuation {es-pull}82982[#82982] +* Fix `ZeroShotClassificationConfig` update mixing fields {es-pull}82848[#82848] +* Fixes `categorize_text` parameter validation to be parse order independent {es-pull}82628[#82628] (issue: {es-issue}82629[#82629]) +* Return `zxx` for `lang_ident_model_1` if no valid text is found for language identification {es-pull}82746[#82746] (issue: {es-issue}81933[#81933]) +* Validate vocabulary on model deployment {es-pull}81548[#81548] (issue: {es-issue}81470[#81470]) + +Mapping:: +* Add support for sub-fields to `search_as_you_type` fields {es-pull}82430[#82430] (issue: {es-issue}56326[#56326]) +* Better exception message for `MappingParser.parse` {es-pull}80696[#80696] + +Network:: +* Throw `NoSeedNodeLeftException` on proxy failure {es-pull}80961[#80961] (issue: {es-issue}80898[#80898]) + +Recovery:: +* Add missing `indices.recovery.internal_action_retry_timeout` to list of settings {es-pull}83354[#83354] +* Add missing max overcommit factor to list of (dynamic) settings {es-pull}83350[#83350] + +SQL:: +* Fix txt format for empty result sets {es-pull}83376[#83376] + +Search:: +* Returns valid PIT when no index matched {es-pull}83424[#83424] + +Security:: +* Add validation for API key role descriptors {es-pull}82049[#82049] (issue: {es-issue}67311[#67311]) + +Snapshot/Restore:: +* Adjust `LinuxFileSystemNatives.allocatedSizeInBytes` for aarch64 architectures {es-pull}81376[#81376] (issues: {es-issue}80437[#80437], {es-issue}81362[#81362]) +* Distinguish "missing repository" from "missing repository plugin" {es-pull}82457[#82457] (issue: {es-issue}81758[#81758]) +* Move get snapshots serialization to management pool {es-pull}83215[#83215] + +TSDB:: +* Fix time series timestamp meta missing {es-pull}80695[#80695] + +Transform:: +* Fix NPE in transform version check {es-pull}81756[#81756] +* Fix condition on which the transform stops processing buckets {es-pull}82852[#82852] +* Prevent stopping of transforms due to threadpool limitation {es-pull}81912[#81912] (issue: {es-issue}81796[#81796]) + +[[deprecation-8.1.0]] +[float] +=== Deprecations + +CRUD:: +* Bulk actions JSON must be well-formed {es-pull}78876[#78876] (issue: {es-issue}43774[#43774]) + +Cluster Coordination:: +* Remove last few mentions of Zen discovery {es-pull}80410[#80410] + +[[enhancement-8.1.0]] +[float] +=== Enhancements + +Aggregations:: +* Add an aggregator for IPv4 and IPv6 subnets {es-pull}82410[#82410] +* Fail shards early when we can detect a type missmatch {es-pull}79869[#79869] (issue: {es-issue}72276[#72276]) +* Optimize `significant_text` aggregation to only parse the field it requires from `_source` {es-pull}79651[#79651] + +Allocation:: +* Identify other node in `SameShardAllocDec` message {es-pull}82890[#82890] (issue: {es-issue}80767[#80767]) +* Make `AllocationService#adaptAutoExpandReplicas` Faster {es-pull}83092[#83092] +* Speed up same host check {es-pull}80767[#80767] + +Analysis:: +* Expose Japanese completion filter to kuromoji analysis plugin {es-pull}81858[#81858] + +Authentication:: +* Enable `run_as` for all authentication schemes {es-pull}79809[#79809] +* Return API key name in `_authentication` response {es-pull}78946[#78946] (issue: {es-issue}70306[#70306]) + +Authorization:: +* Avoid loading authorized indices when requested indices are all concrete names {es-pull}81237[#81237] +* Optimize DLS bitset building for `matchAll` query {es-pull}81030[#81030] (issue: {es-issue}80904[#80904]) + +Cluster Coordination:: +* Add detail to slow cluster state warning message {es-pull}83221[#83221] +* Batch Index Settings Update Requests {es-pull}82896[#82896] (issue: {es-issue}79866[#79866]) +* Improve node-join task descriptions {es-pull}80090[#80090] +* Make `PeerFinder` log messages happier {es-pull}83222[#83222] +* More compact serialization of metadata {es-pull}82608[#82608] (issue: {es-issue}77466[#77466]) +* Paginate persisted cluster state {es-pull}78875[#78875] +* Reduce verbosity-increase timeout to 3 minutes {es-pull}81118[#81118] +* Use network recycler for publications {es-pull}80650[#80650] (issue: {es-issue}80111[#80111]) + +Data streams:: +* Defer reroute when autocreating datastream {es-pull}82412[#82412] (issue: {es-issue}82159[#82159]) + +ILM+SLM:: +* Expose the index age in ILM explain output {es-pull}81273[#81273] (issue: {es-issue}64429[#64429]) + +Indices APIs:: +* Batch auto create index cluster state updates {es-pull}82159[#82159] +* Expose 'features' option in Get Index API {es-pull}83083[#83083] (issue: {es-issue}82948[#82948]) +* Expose index health and status to the `_stats` API {es-pull}81954[#81954] (issue: {es-issue}80413[#80413]) +* Force merge REST API support `wait_for_completion` {es-pull}80463[#80463] (issues: {es-issue}80129[#80129], {es-issue}80129[#80129]) + +Infra/Circuit Breakers:: +* Allow dynamically changing the `use_real_memory` setting {es-pull}78288[#78288] (issue: {es-issue}77324[#77324]) + +Infra/Core:: +* Use `VarHandles` for number conversions {es-pull}80367[#80367] (issue: {es-issue}78823[#78823]) +* Use `VarHandles` in `ByteUtils` {es-pull}80442[#80442] (issue: {es-issue}78823[#78823]) +* `FilterPathBasedFilter` support match fieldname with dot {es-pull}83178[#83178] (issues: {es-issue}83148[#83148], {es-issue}83152[#83152]) + +Infra/REST API:: +* Allow for customised content-type validation {es-pull}80906[#80906] (issue: {es-issue}80482[#80482]) + +Infra/Scripting:: +* Add '$' syntax as a shortcut for 'field' in Painless {es-pull}80518[#80518] +* Add `BinaryDocValuesField` to replace `BytesRef` `(ScriptDocValues)` {es-pull}79760[#79760] +* Add a geo point field for the scripting fields api {es-pull}81395[#81395] +* Add date fields to the scripting fields api {es-pull}81272[#81272] +* Add half float mapping to the scripting fields API {es-pull}82294[#82294] +* Add scaled float to the scripting fields API {es-pull}82275[#82275] +* Add support for `GeoShape` to the scripting fields API {es-pull}81617[#81617] +* Fields API for IP mapped type {es-pull}81396[#81396] +* Fields API for byte, double, float, integer, long, short {es-pull}81126[#81126] (issue: {es-issue}79105[#79105]) +* Fields API for flattened mapped type {es-pull}82590[#82590] +* Fields API for x-pack `constant_keyword` {es-pull}82292[#82292] +* Fields API for x-pack version, doc version, seq no, mumur3 {es-pull}81476[#81476] +* Improve support for joda datetime to java datetime in Painless {es-pull}83099[#83099] +* Keyword fields API support {es-pull}81266[#81266] +* Make wildcard accessible from the scripting field API {es-pull}82763[#82763] +* Ordinal field data plumbing {es-pull}80970[#80970] (issue: {es-issue}79105[#79105]) +* Support boolean fields in Fields API {es-pull}80043[#80043] (issue: {es-issue}79105[#79105]) +* Time series compile and cache evict metrics {es-pull}79078[#79078] (issue: {es-issue}62899[#62899]) + +Infra/Settings:: +* Optimize duplicated code block in `MetadataUpdateSettingsService` {es-pull}82048[#82048] + +Machine Learning:: +* Add ability to update the truncation option at inference {es-pull}80267[#80267] +* Add error counts to trained model stats {es-pull}82705[#82705] +* Add latest search interval to datafeed stats {es-pull}82620[#82620] (issue: {es-issue}82405[#82405]) +* Adds new MPNet tokenization for NLP models {es-pull}82234[#82234] +* Force delete trained models {es-pull}80595[#80595] +* Improve error message on starting scrolling datafeed with no matching indices {es-pull}81069[#81069] (issue: {es-issue}81013[#81013]) +* Report thread settings per node for trained model deployments {es-pull}81723[#81723] (issue: {es-issue}81149[#81149]) +* Set default value of 30 days for model prune window {es-pull}81377[#81377] +* Track token positions and use source string to tag NER entities {es-pull}81275[#81275] +* Warn when creating job with an unusual bucket span {es-pull}82145[#82145] (issue: {es-issue}81645[#81645]) + +Mapping:: +* Allow doc-values only search on geo_point fields {es-pull}83395[#83395] +* Implement all queries on doc-values only keyword fields {es-pull}83404[#83404] +* Optimize source filtering in `SourceFieldMapper` {es-pull}81970[#81970] (issues: {es-issue}77154[#77154], {es-issue}81575[#81575]) + +Monitoring:: +* Add Enterprise Search monitoring index templates {es-pull}82743[#82743] + +Network:: +* Report close connection exceptions at INFO {es-pull}81768[#81768] (issues: {es-issue}51612[#51612], {es-issue}66473[#66473]) +* Serialize outbound messages on netty buffers {es-pull}80111[#80111] +* Track histogram of transport handling times {es-pull}80581[#80581] (issue: {es-issue}80428[#80428]) + +Recovery:: +* Adjust `indices.recovery.max_bytes_per_sec` according to external settings {es-pull}82819[#82819] + +SQL:: +* Extend Tableau connector to reconnect with catalog {es-pull}81321[#81321] + +Search:: +* Add `scripted_metric` agg context to `unsigned_long` {es-pull}64422[#64422] (issue: {es-issue}64347[#64347]) +* Add field usage support for vectors {es-pull}80608[#80608] +* Allow doc-values only search on boolean fields {es-pull}82925[#82925] (issues: {es-issue}82409[#82409], {es-issue}81210[#81210], {es-issue}52728[#52728]) +* Allow doc-values only search on date types {es-pull}82602[#82602] (issues: {es-issue}82409[#82409], {es-issue}81210[#81210], {es-issue}52728[#52728]) +* Allow doc-values only search on ip fields {es-pull}82929[#82929] (issues: {es-issue}82409[#82409], {es-issue}81210[#81210], {es-issue}52728[#52728]) +* Allow doc-values only search on keyword fields {es-pull}82846[#82846] (issues: {es-issue}82409[#82409], {es-issue}81210[#81210], {es-issue}52728[#52728]) +* Allow doc-values only search on number types {es-pull}82409[#82409] (issues: {es-issue}81210[#81210], {es-issue}52728[#52728]) +* Rewrite `match` and `match_phrase` queries to `term` queries on `keyword` fields {es-pull}82612[#82612] (issue: {es-issue}82515[#82515]) +* Short cut if reader has point values {es-pull}80268[#80268] +* Support combining `_shards` preference param with `` {es-pull}80024[#80024] (issue: {es-issue}80021[#80021]) + +Security:: +* Activate user profile API {es-pull}82400[#82400] +* Add an initial `ProfileService` for user profiles {es-pull}81899[#81899] +* Add new system index for user profile documents {es-pull}81355[#81355] +* Add update user profile data API {es-pull}82772[#82772] +* Add user profile API for get profile by UID {es-pull}81910[#81910] +* Update Kibana system user privileges {es-pull}82781[#82781] + +Snapshot/Restore:: +* Add Linux x86-64bits native method to retrieve the number of allocated bytes on disk for a file {es-pull}80437[#80437] (issue: {es-issue}79698[#79698]) + +Stats:: +* Add index pressure stats in cluster stats {es-pull}80303[#80303] (issue: {es-issue}79788[#79788]) +* Optimize `getIndices` in `IndicesSegmentResponse` {es-pull}80064[#80064] +* Speed up `MappingStats` Computation on Coordinating Node {es-pull}82830[#82830] + +TSDB:: +* Add `_tsid` field to `time_series` indices {es-pull}80276[#80276] +* Make time boundaries settings required in TSDB indices {es-pull}81146[#81146] + +Transform:: +* Introduce `deduce_mappings` transform setting {es-pull}82256[#82256] (issue: {es-issue}82559[#82559]) +* Make it possible to clear retention policy on an existing transform {es-pull}82703[#82703] (issue: {es-issue}82560[#82560]) +* Report transforms without config as erroneous {es-pull}81141[#81141] (issue: {es-issue}80955[#80955]) + +[[feature-8.1.0]] +[float] +=== New features + +Authentication:: +* Initial version of JWT Realm {es-pull}82175[#82175] +* Introduce domain setting to associate realms {es-pull}81968[#81968] + +Distributed:: +* Add desired nodes API {es-pull}82975[#82975] + +Geo:: +* New `GeoHexGrid` aggregation {es-pull}82924[#82924] + +Health:: +* Model for the new health reporting api {es-pull}83398[#83398] + +TSDB:: +* Handle `fields.with.dots` in `routing_path` {es-pull}83148[#83148] + +Transform:: +* Add transform reset API {es-pull}79828[#79828] (issue: {es-issue}75768[#75768]) + +[[upgrade-8.1.0]] +[float] +=== Upgrades + +Geo:: +* Update vector tiles google protobuf to 3.16.1 {es-pull}83402[#83402] + +Network:: +* Upgrade to Netty 4.1.73 {es-pull}82844[#82844] + +Packaging:: +* Bump bundled JDK to 17.0.2+8 {es-pull}83243[#83243] (issue: {es-issue}83242[#83242]) + + + From ca5c612660bfa58df828f8df0f3cbd8a7e6161bb Mon Sep 17 00:00:00 2001 From: David Roberts Date: Wed, 9 Feb 2022 17:38:53 +0000 Subject: [PATCH 015/167] Tolerate empty types array in Watch definitions (#83524) In 6.x internal system components created Watches with an empty types array in their definition. Types do not exist in 8.x, so these system-created Watches would need to be modified in 7.x to remove the types field. Because doing such modifications as a secret background task could be risky, instead the 8.x Watch parser will tolerate and ignore an empty types array. It is clear that an empty types array can be considered identical to typeless. Non-empty types arrays will still be considered fatal errors in 8.x, as silently ignoring them could change the meaning of the search. Fixes #83235 --- docs/changelog/83524.yaml | 6 +++ .../search/WatcherSearchTemplateRequest.java | 18 +++++++ .../WatcherSearchTemplateRequestTests.java | 50 +++++++++++++++++++ 3 files changed, 74 insertions(+) create mode 100644 docs/changelog/83524.yaml diff --git a/docs/changelog/83524.yaml b/docs/changelog/83524.yaml new file mode 100644 index 0000000000000..9dc7e8b0b487b --- /dev/null +++ b/docs/changelog/83524.yaml @@ -0,0 +1,6 @@ +pr: 83524 +summary: Tolerate empty types array in Watch definitions +area: Watcher +type: bug +issues: + - 83235 diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java index 77dc361f4c5ab..0425206f224da 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequest.java @@ -12,6 +12,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.core.Nullable; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; @@ -43,6 +45,10 @@ public class WatcherSearchTemplateRequest implements ToXContentObject { private final BytesReference searchSource; private boolean restTotalHitsAsInt = true; + private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(WatcherSearchTemplateRequest.class); + static final String TYPES_DEPRECATION_MESSAGE = + "[types removal] Specifying empty types array in a watcher search request is deprecated."; + public WatcherSearchTemplateRequest( String[] indices, SearchType searchType, @@ -190,6 +196,17 @@ public static WatcherSearchTemplateRequest fromXContent(XContentParser parser, S ); } } + } else if (TYPES_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + // Tolerate an empty types array, because some watches created internally in 6.x have + // an empty types array in their search, and it's clearly equivalent to typeless. + if (parser.nextToken() != XContentParser.Token.END_ARRAY) { + throw new ElasticsearchParseException( + "could not read search request. unsupported non-empty array field [" + currentFieldName + "]" + ); + } + // Empty types arrays still generate the same deprecation warning they did in 7.x. + // Ideally they should be removed from the definition. + deprecationLogger.critical(DeprecationCategory.PARSING, "watcher_search_input", TYPES_DEPRECATION_MESSAGE); } else { throw new ElasticsearchParseException( "could not read search request. unexpected array field [" + currentFieldName + "]" @@ -272,6 +289,7 @@ public int hashCode() { } private static final ParseField INDICES_FIELD = new ParseField("indices"); + private static final ParseField TYPES_FIELD = new ParseField("types"); private static final ParseField BODY_FIELD = new ParseField("body"); private static final ParseField SEARCH_TYPE_FIELD = new ParseField("search_type"); private static final ParseField INDICES_OPTIONS_FIELD = new ParseField("indices_options"); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java index 005a089298777..620580ee09824 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/support/search/WatcherSearchTemplateRequestTests.java @@ -6,15 +6,18 @@ */ package org.elasticsearch.xpack.watcher.support.search; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; +import java.util.List; import java.util.Map; import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -32,6 +35,49 @@ public void testFromXContentWithTemplateCustomLang() throws IOException { assertTemplate(source, "custom-script", "painful", singletonMap("bar", "baz")); } + public void testFromXContentWithEmptyTypes() throws IOException { + String source = """ + { + "search_type" : "query_then_fetch", + "indices" : [ ".ml-anomalies-*" ], + "types" : [ ], + "body" : { + "query" : { + "bool" : { + "filter" : [ { "term" : { "job_id" : "my-job" } }, { "range" : { "timestamp" : { "gte" : "now-30m" } } } ] + } + } + } + } + """; + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + parser.nextToken(); + WatcherSearchTemplateRequest result = WatcherSearchTemplateRequest.fromXContent(parser, randomFrom(SearchType.values())); + assertThat(result.getIndices(), arrayContaining(".ml-anomalies-*")); + } + } + + public void testFromXContentWithNonEmptyTypes() throws IOException { + String source = """ + { + "search_type" : "query_then_fetch", + "indices" : [ "my-index" ], + "types" : [ "my-type" ], + "body" : { + "query" : { "match_all" : {} } + } + } + """; + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + parser.nextToken(); + ElasticsearchParseException e = expectThrows( + ElasticsearchParseException.class, + () -> WatcherSearchTemplateRequest.fromXContent(parser, randomFrom(SearchType.values())) + ); + assertThat(e.getMessage(), is("could not read search request. unsupported non-empty array field [types]")); + } + } + public void testDefaultHitCountsDefaults() throws IOException { assertHitCount("{}", true); } @@ -61,4 +107,8 @@ private void assertTemplate(String source, String expectedScript, String expecte assertThat(result.getTemplate().getParams(), equalTo(expectedParams)); } } + + protected List filteredWarnings() { + return List.of(WatcherSearchTemplateRequest.TYPES_DEPRECATION_MESSAGE); + } } From 6522e6eea25385c1afdac5e6b5555f7b4c65685a Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Wed, 9 Feb 2022 13:00:07 -0500 Subject: [PATCH 016/167] [DOCS] Update multi-target syntax refs (#83703) --- .../snapshot-restore/apis/clone-snapshot-api.asciidoc | 2 +- .../snapshot-restore/apis/create-snapshot-api.asciidoc | 2 +- docs/reference/sql/language/indices.asciidoc | 2 +- x-pack/docs/en/watcher/transform/search.asciidoc | 6 +++--- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc index 778bb55a2f267..1ff77c8af886e 100644 --- a/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/clone-snapshot-api.asciidoc @@ -55,4 +55,4 @@ fails and returns an error. Defaults to `30s`. `indices`:: (Required, string) A comma-separated list of indices to include in the snapshot. -<> is supported. \ No newline at end of file +<> is supported. \ No newline at end of file diff --git a/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc index 2821db95dbb07..7a134363b3c1b 100644 --- a/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc @@ -113,7 +113,7 @@ include::restore-snapshot-api.asciidoc[tag=cluster-state-contents] `indices`:: (Optional, string or array of strings) Comma-separated list of data streams and indices to include in the snapshot. -Supports <>. Defaults to an empty array +Supports <>. Defaults to an empty array (`[]`), which includes all regular data streams and regular indices. To exclude all data streams and indices, use `-*`. + diff --git a/docs/reference/sql/language/indices.asciidoc b/docs/reference/sql/language/indices.asciidoc index 4a8fb2325352e..1f986bfea8c70 100644 --- a/docs/reference/sql/language/indices.asciidoc +++ b/docs/reference/sql/language/indices.asciidoc @@ -6,7 +6,7 @@ [[sql-index-patterns-multi]] [discrete] -==== {es} multi-index +==== {es} multi-target syntax The {es} notation for enumerating, including or excluding <> is supported _as long_ as it is quoted or escaped as a table identifier. diff --git a/x-pack/docs/en/watcher/transform/search.asciidoc b/x-pack/docs/en/watcher/transform/search.asciidoc index 7e62e30566baa..9ed60ceda2df3 100644 --- a/x-pack/docs/en/watcher/transform/search.asciidoc +++ b/x-pack/docs/en/watcher/transform/search.asciidoc @@ -71,15 +71,15 @@ The following table lists all available settings for the search | `request.indices_options.expand_wildcards` | no | `open` | Determines how to expand indices wildcards. An array consisting of a combination of `open`, `closed`, and `hidden`. Alternatively a value of `none` or `all`. - (see <>) + (see <>) | `request.indices_options.ignore_unavailable` | no | `true` | A boolean value that determines whether the search should leniently ignore unavailable indices - (see <>) + (see <>) | `request.indices_options.allow_no_indices` | no | `true` | A boolean value that determines whether the search should leniently return no results when no indices - are resolved (see <>) + are resolved (see <>) | `request.template` | no | - | The body of the search template. See <> for more information. From d85e70c2211dd2561493e684912f00194059fdd1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 9 Feb 2022 19:01:53 +0100 Subject: [PATCH 017/167] Move Pipeline aggs under VersionedNamedWriteable (#83399) In #81809 we introduced a mechanism to check searializability of search request to earlier version nodes already on the coordinating node. This requires knowledge about the version that NamedWritable classes have been introduced, which is why we started moving classes that are used inside the search request under the VersionedNamedWriteable interface to make sure future additions implement the mthod that provides the version information. This change moves pipeline aggregation builders under that interface. Most builders have been around long before version 7.0, so we return an empty version tag for them. The newer ones return the version they were first released in. --- .../InferencePipelineAggregationBuilder.java | 6 ++++++ .../PipelineAggregationBuilder.java | 6 ------ .../AvgBucketPipelineAggregationBuilder.java | 6 ++++++ ...BucketScriptPipelineAggregationBuilder.java | 6 ++++++ ...cketSelectorPipelineAggregationBuilder.java | 6 ++++++ .../BucketSortPipelineAggregationBuilder.java | 6 ++++++ ...umulativeSumPipelineAggregationBuilder.java | 6 ++++++ .../DerivativePipelineAggregationBuilder.java | 6 ++++++ ...dStatsBucketPipelineAggregationBuilder.java | 6 ++++++ .../MaxBucketPipelineAggregationBuilder.java | 6 ++++++ .../MinBucketPipelineAggregationBuilder.java | 6 ++++++ .../MovAvgPipelineAggregationBuilder.java | 6 ++++++ .../MovFnPipelineAggregationBuilder.java | 6 ++++++ ...ntilesBucketPipelineAggregationBuilder.java | 7 +++++++ .../SerialDiffPipelineAggregationBuilder.java | 6 ++++++ .../StatsBucketPipelineAggregationBuilder.java | 6 ++++++ .../SumBucketPipelineAggregationBuilder.java | 6 ++++++ .../search/SearchModuleTests.java | 5 +++++ .../aggregations/AggregatorFactoriesTests.java | 6 ++++++ ...eCardinalityPipelineAggregationBuilder.java | 6 ++++++ ...gPercentilesPipelineAggregationBuilder.java | 6 ++++++ .../NormalizePipelineAggregationBuilder.java | 18 ++++++++++++------ .../BucketCorrelationAggregationBuilder.java | 6 ++++++ .../InferencePipelineAggregationBuilder.java | 6 ++++++ .../BucketCountKSTestAggregationBuilder.java | 6 ++++++ 25 files changed, 150 insertions(+), 12 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java index 47e1eb00bbd7f..20ba0fa993534 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.client.analytics; +import org.elasticsearch.Version; import org.elasticsearch.client.ml.inference.trainedmodel.InferenceConfig; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; @@ -129,4 +130,9 @@ public boolean equals(Object obj) { && Objects.equals(modelId, other.modelId) && Objects.equals(inferenceConfig, other.inferenceConfig); } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_9_0; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java index 85b6ce2c3893d..6644a5cf7fe15 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/PipelineAggregationBuilder.java @@ -7,7 +7,6 @@ */ package org.elasticsearch.search.aggregations; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.common.Strings; @@ -259,9 +258,4 @@ public String toString() { public PipelineAggregationBuilder rewrite(QueryRewriteContext context) throws IOException { return this; } - - @Override - public Version getMinimalSupportedVersion() { - return Version.V_EMPTY; - } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java index 5b9e26276b99e..3306e6bb6dbb9 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/AvgBucketPipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -59,4 +60,9 @@ protected AvgBucketPipelineAggregationBuilder buildFactory( public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java index d69e325c4f0fe..be89a8bdbb313 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptPipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.Maps; @@ -225,4 +226,9 @@ public boolean equals(Object obj) { public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java index 7e66dfd00af6e..e11754c504488 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSelectorPipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -221,4 +222,9 @@ public boolean equals(Object obj) { public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java index 62780cfe56091..15fa731e154a3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketSortPipelineAggregationBuilder.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; @@ -188,4 +189,9 @@ public boolean equals(Object obj) { public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java index 90fc593c61c96..4b4e38ba7312c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/CumulativeSumPipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -125,4 +126,9 @@ public boolean equals(Object obj) { public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_4_0; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java index f59142ad3bc1d..713e0f26f8ad4 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/DerivativePipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.io.stream.StreamInput; @@ -251,4 +252,9 @@ public int hashCode() { public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_4_0; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java index 914fea8ed8bbb..251643def323b 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/ExtendedStatsBucketPipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -96,4 +97,9 @@ public boolean equals(Object obj) { public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java index 6fadfd6b16774..b3466a97b1300 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MaxBucketPipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -59,4 +60,9 @@ protected MaxBucketPipelineAggregationBuilder buildFactory( public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java index e730dc2091c38..fe30fecb5a842 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MinBucketPipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -59,4 +60,9 @@ protected MinBucketPipelineAggregationBuilder buildFactory( public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java index 55c62768885c0..ca715858af8c3 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovAvgPipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -72,4 +73,9 @@ protected void validate(ValidationContext context) { public final String getWriteableName() { return null; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilder.java index f4074302dd96a..d1606c82b1899 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/MovFnPipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -234,4 +235,9 @@ public boolean equals(Object obj) { public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java index 719aa3a4010f5..af3bf418ed150 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketPipelineAggregationBuilder.java @@ -10,6 +10,7 @@ import com.carrotsearch.hppc.DoubleArrayList; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ParseField; @@ -116,6 +117,11 @@ protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) return builder; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } + public static final PipelineAggregator.Parser PARSER = new BucketMetricsParser() { @Override @@ -179,4 +185,5 @@ public boolean equals(Object obj) { public String getWriteableName() { return NAME; } + } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java index 1c081c9af534a..bca172e1dd5d7 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SerialDiffPipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -245,4 +246,9 @@ public boolean equals(Object obj) { public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java index 5ae9b7c0af671..5cd070b8fa6b5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/StatsBucketPipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -59,4 +60,9 @@ protected StatsBucketPipelineAggregationBuilder buildFactory( public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java index de36d99e3a0dc..5f45e1355b8a5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/SumBucketPipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations.pipeline; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.XContentBuilder; @@ -59,4 +60,9 @@ protected SumBucketPipelineAggregationBuilder buildFactory( public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 11ae7975d8106..f8f65f6ca0f4d 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -584,6 +584,11 @@ private static TestPipelineAggregationBuilder fromXContent(String name, XContent @Override protected void validate(ValidationContext context) {} + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } } /** diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java index 6af3a3d06f047..646604b0cc167 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregatorFactoriesTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.search.aggregations; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -345,6 +346,11 @@ public String getWriteableName() { return "rewritten"; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_EMPTY; + } + @Override protected void doWriteTo(StreamOutput out) throws IOException { throw new UnsupportedOperationException(); diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java index 8804818f91695..0a4a98f0c365f 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/cumulativecardinality/CumulativeCardinalityPipelineAggregationBuilder.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.analytics.cumulativecardinality; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -128,4 +129,9 @@ public String getWriteableName() { protected boolean overrideBucketsPath() { return true; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_4_0; + } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java index bd86230ed4182..a0135c045bef4 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/movingPercentiles/MovingPercentilesPipelineAggregationBuilder.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.analytics.movingPercentiles; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; @@ -129,4 +130,9 @@ public String getWriteableName() { protected boolean overrideBucketsPath() { return true; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_9_0; + } } diff --git a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java index f16b19f46f045..f2cf85646f7be 100644 --- a/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java +++ b/x-pack/plugin/analytics/src/main/java/org/elasticsearch/xpack/analytics/normalize/NormalizePipelineAggregationBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.analytics.normalize; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.DocValueFormat; @@ -16,6 +17,12 @@ import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.Mean; +import org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.Percent; +import org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.RescaleZeroToOne; +import org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.RescaleZeroToOneHundred; +import org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.Softmax; +import org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.ZScore; import java.io.IOException; import java.util.List; @@ -27,12 +34,6 @@ import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.Parser.FORMAT; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -import static org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.Mean; -import static org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.Percent; -import static org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.RescaleZeroToOne; -import static org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.RescaleZeroToOneHundred; -import static org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.Softmax; -import static org.elasticsearch.xpack.analytics.normalize.NormalizePipelineMethods.ZScore; public class NormalizePipelineAggregationBuilder extends AbstractPipelineAggregationBuilder { public static final String NAME = "normalize"; @@ -152,4 +153,9 @@ public boolean equals(Object obj) { public String getWriteableName() { return NAME; } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_9_0; + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregationBuilder.java index 6608bc32a4536..10eb7311ba321 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/correlation/BucketCorrelationAggregationBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.aggs.correlation; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.plugins.SearchPlugin; @@ -142,4 +143,9 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(super.hashCode(), correlationFunction); } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_14_0; + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java index 5ff742f366aa9..44a308519dd3b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilder.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.ml.aggs.inference; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.Strings; @@ -376,4 +377,9 @@ public boolean equals(Object obj) { && Objects.equals(modelId, other.modelId) && Objects.equals(inferenceConfig, other.inferenceConfig); } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_9_0; + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregationBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregationBuilder.java index 4c6623eb55c6b..eb6cc48a31635 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregationBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/aggs/kstest/BucketCountKSTestAggregationBuilder.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.aggs.kstest; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -190,4 +191,9 @@ public boolean equals(Object o) { public int hashCode() { return Objects.hash(super.hashCode(), Arrays.hashCode(fractions), alternative, samplingMethod); } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_14_0; + } } From 2361b8ecc6f5194bd1536439cd594aecc7d24f1e Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 9 Feb 2022 19:35:42 +0100 Subject: [PATCH 018/167] Cleanup data steams et al as part of ESSingleNodeTestCase teardown. (#83668) --- .../datastreams/TSDBIndexingIT.java | 9 --------- .../test/ESSingleNodeTestCase.java | 18 ++++++++++++++++++ 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index 7b2f8039d1942..be6165026fe46 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.rollover.RolloverRequest; import org.elasticsearch.action.admin.indices.template.put.PutComposableIndexTemplateAction; -import org.elasticsearch.action.datastreams.DeleteDataStreamAction; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; import org.elasticsearch.cluster.metadata.Template; @@ -24,14 +23,12 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.xcontent.XContentType; -import org.junit.After; import java.time.Instant; import java.util.Collection; import java.util.List; import java.util.concurrent.CountDownLatch; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; public class TSDBIndexingIT extends ESSingleNodeTestCase { @@ -68,12 +65,6 @@ protected Settings nodeSettings() { return newSettings.build(); } - @After - public void cleanup() { - DeleteDataStreamAction.Request deleteDataStreamsRequest = new DeleteDataStreamAction.Request("*"); - assertAcked(client().execute(DeleteDataStreamAction.INSTANCE, deleteDataStreamsRequest).actionGet()); - } - public void testTimeRanges() throws Exception { var mappingTemplate = """ { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 3645298543e0c..69b4000caaeea 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -12,6 +12,9 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.admin.indices.template.delete.DeleteComponentTemplateAction; +import org.elasticsearch.action.admin.indices.template.delete.DeleteComposableIndexTemplateAction; +import org.elasticsearch.action.datastreams.DeleteDataStreamAction; import org.elasticsearch.action.support.DestructiveOperations; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.Client; @@ -127,6 +130,21 @@ public void tearDown() throws Exception { assertThat(searchService.getActiveContexts(), equalTo(0)); assertThat(searchService.getOpenScrollContexts(), equalTo(0)); super.tearDown(); + var deleteDataStreamsRequest = new DeleteDataStreamAction.Request("*"); + deleteDataStreamsRequest.indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN); + try { + assertAcked(client().execute(DeleteDataStreamAction.INSTANCE, deleteDataStreamsRequest).actionGet()); + } catch (IllegalStateException e) { + // Ignore if action isn't registered, because data streams is a module and + // if the delete action isn't registered then there no data streams to delete. + if (e.getMessage().startsWith("failed to find action") == false) { + throw e; + } + } + var deleteComposableIndexTemplateRequest = new DeleteComposableIndexTemplateAction.Request("*"); + assertAcked(client().execute(DeleteComposableIndexTemplateAction.INSTANCE, deleteComposableIndexTemplateRequest).actionGet()); + var deleteComponentTemplateRequest = new DeleteComponentTemplateAction.Request("*"); + assertAcked(client().execute(DeleteComponentTemplateAction.INSTANCE, deleteComponentTemplateRequest).actionGet()); assertAcked( client().admin().indices().prepareDelete("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN).get() ); From de1dd935dfb969edf04bbeb7e755028437069a2a Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 9 Feb 2022 13:40:23 -0500 Subject: [PATCH 019/167] [ML] move to using new BERT style tokenizer and analyzer instead of custom tokenization class (#82870) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This changes the internal tokenization logic to utilize Lucene style token filters and analyzers. These filters and analyzers are not generally available yet via the ML plugin and are only used internally. Micro benchmarks show a marginal improvement on average on throughput ``` BertEncodingBenchmark.luceneStyle avgt 15 565.577 ± 3.527 ms/op BertEncodingBenchmark.original avgt 15 714.827 ± 15.393 ms/op ``` I think there is a ton of room for optimizing these tokenizers further. But, to keep readability for the initial transition, filters are created separately, and their implementations are rather naive. --- x-pack/plugin/ml/build.gradle | 6 + x-pack/plugin/ml/licenses/icu4j-68.2.jar.sha1 | 1 + x-pack/plugin/ml/licenses/icu4j-LICENSE.txt | 33 ++ x-pack/plugin/ml/licenses/icu4j-NOTICE.txt | 3 + x-pack/plugin/ml/licenses/lucene-LICENSE.txt | 475 ++++++++++++++++ x-pack/plugin/ml/licenses/lucene-NOTICE.txt | 192 +++++++ .../lucene-analysis-icu-9.0.0.jar.sha1 | 1 + .../deployment/DeploymentManager.java | 13 +- .../ml/inference/nlp/FillMaskProcessor.java | 5 + .../xpack/ml/inference/nlp/NerProcessor.java | 17 +- .../xpack/ml/inference/nlp/NlpTask.java | 3 +- .../inference/nlp/PassThroughProcessor.java | 7 + .../nlp/TextClassificationProcessor.java | 7 + .../inference/nlp/TextEmbeddingProcessor.java | 7 + .../nlp/ZeroShotClassificationProcessor.java | 5 + .../nlp/tokenizers/BasicTokenFilter.java | 272 +++++++++ .../nlp/tokenizers/BasicTokenizer.java | 381 ------------- .../nlp/tokenizers/BertTokenizer.java | 86 +-- ...rieNode.java => CharSeqTokenTrieNode.java} | 35 +- .../nlp/tokenizers/ControlCharFilter.java | 94 ++++ .../nlp/tokenizers/DelimitedToken.java | 55 +- .../nlp/tokenizers/MultiCharSequence.java | 98 ++++ .../nlp/tokenizers/NlpTokenizer.java | 3 +- .../nlp/tokenizers/TokenizationResult.java | 20 +- .../nlp/tokenizers/WordPieceAnalyzer.java | 80 +++ .../nlp/tokenizers/WordPieceTokenFilter.java | 207 +++++++ .../nlp/tokenizers/WordPieceTokenizer.java | 94 ---- .../nlp/BertRequestBuilderTests.java | 16 +- .../inference/nlp/FillMaskProcessorTests.java | 5 +- .../nlp/MPNetRequestBuilderTests.java | 15 +- .../ml/inference/nlp/NerProcessorTests.java | 23 +- .../nlp/tokenizers/BasicTokenFilterTests.java | 134 +++++ .../nlp/tokenizers/BasicTokenizerTests.java | 266 --------- .../nlp/tokenizers/BertTokenizerTests.java | 525 ++++++++++-------- ...ts.java => CharSeqTokenTrieNodeTests.java} | 36 +- .../tokenizers/ControlCharFilterTests.java | 78 +++ .../nlp/tokenizers/MPNetTokenizerTests.java | 87 +-- .../tokenizers/MultiCharSequenceTests.java | 55 ++ .../tokenizers/WordPieceTokenFilterTests.java | 70 +++ .../tokenizers/WordPieceTokenizerTests.java | 67 --- 40 files changed, 2364 insertions(+), 1213 deletions(-) create mode 100644 x-pack/plugin/ml/licenses/icu4j-68.2.jar.sha1 create mode 100644 x-pack/plugin/ml/licenses/icu4j-LICENSE.txt create mode 100644 x-pack/plugin/ml/licenses/icu4j-NOTICE.txt create mode 100644 x-pack/plugin/ml/licenses/lucene-LICENSE.txt create mode 100644 x-pack/plugin/ml/licenses/lucene-NOTICE.txt create mode 100644 x-pack/plugin/ml/licenses/lucene-analysis-icu-9.0.0.jar.sha1 create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenFilter.java delete mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenizer.java rename x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/{TokenTrieNode.java => CharSeqTokenTrieNode.java} (57%) create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/ControlCharFilter.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MultiCharSequence.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceAnalyzer.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java delete mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenizer.java create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenFilterTests.java delete mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenizerTests.java rename x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/{TokenTrieNodeTests.java => CharSeqTokenTrieNodeTests.java} (66%) create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/ControlCharFilterTests.java create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MultiCharSequenceTests.java create mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilterTests.java delete mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenizerTests.java diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 68254206ec675..0efcce2d23fc6 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -72,6 +72,8 @@ dependencies { // ml deps api project(':libs:elasticsearch-grok') api "org.apache.commons:commons-math3:3.6.1" + api "com.ibm.icu:icu4j:${versions.icu4j}" + api "org.apache.lucene:lucene-analysis-icu:${versions.lucene}" nativeBundle("org.elasticsearch.ml:ml-cpp:${project.version}@zip") { changing = true } @@ -102,4 +104,8 @@ project.afterEvaluate { } } +tasks.named("dependencyLicenses").configure { + mapping from: /lucene-.*/, to: 'lucene' +} + addQaCheckDependencies() diff --git a/x-pack/plugin/ml/licenses/icu4j-68.2.jar.sha1 b/x-pack/plugin/ml/licenses/icu4j-68.2.jar.sha1 new file mode 100644 index 0000000000000..fcb3d79075099 --- /dev/null +++ b/x-pack/plugin/ml/licenses/icu4j-68.2.jar.sha1 @@ -0,0 +1 @@ +76893e6000401ace133a65262254be0ebe556d46 \ No newline at end of file diff --git a/x-pack/plugin/ml/licenses/icu4j-LICENSE.txt b/x-pack/plugin/ml/licenses/icu4j-LICENSE.txt new file mode 100644 index 0000000000000..e76faec4ad20f --- /dev/null +++ b/x-pack/plugin/ml/licenses/icu4j-LICENSE.txt @@ -0,0 +1,33 @@ +ICU License - ICU 1.8.1 and later + +COPYRIGHT AND PERMISSION NOTICE + +Copyright (c) 1995-2012 International Business Machines Corporation and others + +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, and/or sell copies of the +Software, and to permit persons to whom the Software is furnished to do so, +provided that the above copyright notice(s) and this permission notice appear +in all copies of the Software and that both the above copyright notice(s) and +this permission notice appear in supporting documentation. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. +IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE +LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR +ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER +IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +Except as contained in this notice, the name of a copyright holder shall not +be used in advertising or otherwise to promote the sale, use or other +dealings in this Software without prior written authorization of the +copyright holder. + +All trademarks and registered trademarks mentioned herein are the property of +their respective owners. diff --git a/x-pack/plugin/ml/licenses/icu4j-NOTICE.txt b/x-pack/plugin/ml/licenses/icu4j-NOTICE.txt new file mode 100644 index 0000000000000..47eeab14f2ef6 --- /dev/null +++ b/x-pack/plugin/ml/licenses/icu4j-NOTICE.txt @@ -0,0 +1,3 @@ +ICU4J, (under lucene/analysis/icu) is licensed under an MIT style license +(modules/analysis/icu/lib/icu4j-LICENSE-BSD_LIKE.txt) and Copyright (c) 1995-2012 +International Business Machines Corporation and others \ No newline at end of file diff --git a/x-pack/plugin/ml/licenses/lucene-LICENSE.txt b/x-pack/plugin/ml/licenses/lucene-LICENSE.txt new file mode 100644 index 0000000000000..28b134f5f8e4d --- /dev/null +++ b/x-pack/plugin/ml/licenses/lucene-LICENSE.txt @@ -0,0 +1,475 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + + + +Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was +derived from unicode conversion examples available at +http://www.unicode.org/Public/PROGRAMS/CVTUTF. Here is the copyright +from those sources: + +/* + * Copyright 2001-2004 Unicode, Inc. + * + * Disclaimer + * + * This source code is provided as is by Unicode, Inc. No claims are + * made as to fitness for any particular purpose. No warranties of any + * kind are expressed or implied. The recipient agrees to determine + * applicability of information provided. If this file has been + * purchased on magnetic or optical media from Unicode, Inc., the + * sole remedy for any claim will be exchange of defective media + * within 90 days of receipt. + * + * Limitations on Rights to Redistribute This Code + * + * Unicode, Inc. hereby grants the right to freely use the information + * supplied in this file in the creation of products supporting the + * Unicode Standard, and to make copies of this file in any form + * for internal or external distribution as long as this notice + * remains attached. + */ + + +Some code in core/src/java/org/apache/lucene/util/ArrayUtil.java was +derived from Python 2.4.2 sources available at +http://www.python.org. Full license is here: + + http://www.python.org/download/releases/2.4.2/license/ + +Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was +derived from Python 3.1.2 sources available at +http://www.python.org. Full license is here: + + http://www.python.org/download/releases/3.1.2/license/ + +Some code in core/src/java/org/apache/lucene/util/automaton was +derived from Brics automaton sources available at +www.brics.dk/automaton/. Here is the copyright from those sources: + +/* + * Copyright (c) 2001-2009 Anders Moeller + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +The levenshtein automata tables in core/src/java/org/apache/lucene/util/automaton +were automatically generated with the moman/finenight FSA package. +Here is the copyright for those sources: + +# Copyright (c) 2010, Jean-Philippe Barrette-LaPierre, +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. + +Some code in core/src/java/org/apache/lucene/util/UnicodeUtil.java was +derived from ICU (http://www.icu-project.org) +The full license is available here: + http://source.icu-project.org/repos/icu/icu/trunk/license.html + +/* + * Copyright (C) 1999-2010, International Business Machines + * Corporation and others. All Rights Reserved. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, and/or sell copies of the + * Software, and to permit persons to whom the Software is furnished to do so, + * provided that the above copyright notice(s) and this permission notice appear + * in all copies of the Software and that both the above copyright notice(s) and + * this permission notice appear in supporting documentation. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. + * IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE + * LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR + * ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER + * IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT + * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + * + * Except as contained in this notice, the name of a copyright holder shall not + * be used in advertising or otherwise to promote the sale, use or other + * dealings in this Software without prior written authorization of the + * copyright holder. + */ + +The following license applies to the Snowball stemmers: + +Copyright (c) 2001, Dr Martin Porter +Copyright (c) 2002, Richard Boulton +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + * this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * Neither the name of the copyright holders nor the names of its contributors + * may be used to endorse or promote products derived from this software + * without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The following license applies to the KStemmer: + +Copyright © 2003, +Center for Intelligent Information Retrieval, +University of Massachusetts, Amherst. +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +3. The names "Center for Intelligent Information Retrieval" and +"University of Massachusetts" must not be used to endorse or promote products +derived from this software without prior written permission. To obtain +permission, contact info@ciir.cs.umass.edu. + +THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF MASSACHUSETTS AND OTHER CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE +GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. + +The following license applies to the Morfologik project: + +Copyright (c) 2006 Dawid Weiss +Copyright (c) 2007-2011 Dawid Weiss, Marcin Miłkowski +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name of Morfologik nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +--- + +The dictionary comes from Morfologik project. Morfologik uses data from +Polish ispell/myspell dictionary hosted at http://www.sjp.pl/slownik/en/ and +is licenced on the terms of (inter alia) LGPL and Creative Commons +ShareAlike. The part-of-speech tags were added in Morfologik project and +are not found in the data from sjp.pl. The tagset is similar to IPI PAN +tagset. + +--- + +The following license applies to the Morfeusz project, +used by org.apache.lucene.analysis.morfologik. + +BSD-licensed dictionary of Polish (SGJP) +http://sgjp.pl/morfeusz/ + +Copyright © 2011 Zygmunt Saloni, Włodzimierz Gruszczyński, + Marcin Woliński, Robert Wołosz + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the + distribution. + +THIS SOFTWARE IS PROVIDED BY COPYRIGHT HOLDERS “AS IS” AND ANY EXPRESS +OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL COPYRIGHT HOLDERS OR CONTRIBUTORS BE +LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/x-pack/plugin/ml/licenses/lucene-NOTICE.txt b/x-pack/plugin/ml/licenses/lucene-NOTICE.txt new file mode 100644 index 0000000000000..1a1d51572432a --- /dev/null +++ b/x-pack/plugin/ml/licenses/lucene-NOTICE.txt @@ -0,0 +1,192 @@ +Apache Lucene +Copyright 2014 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). + +Includes software from other Apache Software Foundation projects, +including, but not limited to: + - Apache Ant + - Apache Jakarta Regexp + - Apache Commons + - Apache Xerces + +ICU4J, (under analysis/icu) is licensed under an MIT styles license +and Copyright (c) 1995-2008 International Business Machines Corporation and others + +Some data files (under analysis/icu/src/data) are derived from Unicode data such +as the Unicode Character Database. See http://unicode.org/copyright.html for more +details. + +Brics Automaton (under core/src/java/org/apache/lucene/util/automaton) is +BSD-licensed, created by Anders Møller. See http://www.brics.dk/automaton/ + +The levenshtein automata tables (under core/src/java/org/apache/lucene/util/automaton) were +automatically generated with the moman/finenight FSA library, created by +Jean-Philippe Barrette-LaPierre. This library is available under an MIT license, +see http://sites.google.com/site/rrettesite/moman and +http://bitbucket.org/jpbarrette/moman/overview/ + +The class org.apache.lucene.util.WeakIdentityMap was derived from +the Apache CXF project and is Apache License 2.0. + +The Google Code Prettify is Apache License 2.0. +See http://code.google.com/p/google-code-prettify/ + +JUnit (junit-4.10) is licensed under the Common Public License v. 1.0 +See http://junit.sourceforge.net/cpl-v10.html + +This product includes code (JaspellTernarySearchTrie) from Java Spelling Checkin +g Package (jaspell): http://jaspell.sourceforge.net/ +License: The BSD License (http://www.opensource.org/licenses/bsd-license.php) + +The snowball stemmers in + analysis/common/src/java/net/sf/snowball +were developed by Martin Porter and Richard Boulton. +The snowball stopword lists in + analysis/common/src/resources/org/apache/lucene/analysis/snowball +were developed by Martin Porter and Richard Boulton. +The full snowball package is available from + http://snowball.tartarus.org/ + +The KStem stemmer in + analysis/common/src/org/apache/lucene/analysis/en +was developed by Bob Krovetz and Sergio Guzman-Lara (CIIR-UMass Amherst) +under the BSD-license. + +The Arabic,Persian,Romanian,Bulgarian, Hindi and Bengali analyzers (common) come with a default +stopword list that is BSD-licensed created by Jacques Savoy. These files reside in: +analysis/common/src/resources/org/apache/lucene/analysis/ar/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/fa/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/ro/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/bg/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/hi/stopwords.txt, +analysis/common/src/resources/org/apache/lucene/analysis/bn/stopwords.txt +See http://members.unine.ch/jacques.savoy/clef/index.html. + +The German,Spanish,Finnish,French,Hungarian,Italian,Portuguese,Russian and Swedish light stemmers +(common) are based on BSD-licensed reference implementations created by Jacques Savoy and +Ljiljana Dolamic. These files reside in: +analysis/common/src/java/org/apache/lucene/analysis/de/GermanLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/de/GermanMinimalStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/es/SpanishLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/fi/FinnishLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/fr/FrenchMinimalStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/hu/HungarianLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/it/ItalianLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/pt/PortugueseLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/ru/RussianLightStemmer.java +analysis/common/src/java/org/apache/lucene/analysis/sv/SwedishLightStemmer.java + +The Stempel analyzer (stempel) includes BSD-licensed software developed +by the Egothor project http://egothor.sf.net/, created by Leo Galambos, Martin Kvapil, +and Edmond Nolan. + +The Polish analyzer (stempel) comes with a default +stopword list that is BSD-licensed created by the Carrot2 project. The file resides +in stempel/src/resources/org/apache/lucene/analysis/pl/stopwords.txt. +See http://project.carrot2.org/license.html. + +The SmartChineseAnalyzer source code (smartcn) was +provided by Xiaoping Gao and copyright 2009 by www.imdict.net. + +WordBreakTestUnicode_*.java (under modules/analysis/common/src/test/) +is derived from Unicode data such as the Unicode Character Database. +See http://unicode.org/copyright.html for more details. + +The Morfologik analyzer (morfologik) includes BSD-licensed software +developed by Dawid Weiss and Marcin Miłkowski (http://morfologik.blogspot.com/). + +Morfologik uses data from Polish ispell/myspell dictionary +(http://www.sjp.pl/slownik/en/) licenced on the terms of (inter alia) +LGPL and Creative Commons ShareAlike. + +Morfologic includes data from BSD-licensed dictionary of Polish (SGJP) +(http://sgjp.pl/morfeusz/) + +Servlet-api.jar and javax.servlet-*.jar are under the CDDL license, the original +source code for this can be found at http://www.eclipse.org/jetty/downloads.php + +=========================================================================== +Kuromoji Japanese Morphological Analyzer - Apache Lucene Integration +=========================================================================== + +This software includes a binary and/or source version of data from + + mecab-ipadic-2.7.0-20070801 + +which can be obtained from + + http://atilika.com/releases/mecab-ipadic/mecab-ipadic-2.7.0-20070801.tar.gz + +or + + http://jaist.dl.sourceforge.net/project/mecab/mecab-ipadic/2.7.0-20070801/mecab-ipadic-2.7.0-20070801.tar.gz + +=========================================================================== +mecab-ipadic-2.7.0-20070801 Notice +=========================================================================== + +Nara Institute of Science and Technology (NAIST), +the copyright holders, disclaims all warranties with regard to this +software, including all implied warranties of merchantability and +fitness, in no event shall NAIST be liable for +any special, indirect or consequential damages or any damages +whatsoever resulting from loss of use, data or profits, whether in an +action of contract, negligence or other tortuous action, arising out +of or in connection with the use or performance of this software. + +A large portion of the dictionary entries +originate from ICOT Free Software. The following conditions for ICOT +Free Software applies to the current dictionary as well. + +Each User may also freely distribute the Program, whether in its +original form or modified, to any third party or parties, PROVIDED +that the provisions of Section 3 ("NO WARRANTY") will ALWAYS appear +on, or be attached to, the Program, which is distributed substantially +in the same form as set out herein and that such intended +distribution, if actually made, will neither violate or otherwise +contravene any of the laws and regulations of the countries having +jurisdiction over the User or the intended distribution itself. + +NO WARRANTY + +The program was produced on an experimental basis in the course of the +research and development conducted during the project and is provided +to users as so produced on an experimental basis. Accordingly, the +program is provided without any warranty whatsoever, whether express, +implied, statutory or otherwise. The term "warranty" used herein +includes, but is not limited to, any warranty of the quality, +performance, merchantability and fitness for a particular purpose of +the program and the nonexistence of any infringement or violation of +any right of any third party. + +Each user of the program will agree and understand, and be deemed to +have agreed and understood, that there is no warranty whatsoever for +the program and, accordingly, the entire risk arising from or +otherwise connected with the program is assumed by the user. + +Therefore, neither ICOT, the copyright holder, or any other +organization that participated in or was otherwise related to the +development of the program and their respective officials, directors, +officers and other employees shall be held liable for any and all +damages, including, without limitation, general, special, incidental +and consequential damages, arising out of or otherwise in connection +with the use or inability to use the program or any product, material +or result produced or otherwise obtained by using the program, +regardless of whether they have been advised of, or otherwise had +knowledge of, the possibility of such damages at any time during the +project or thereafter. Each user will be deemed to have agreed to the +foregoing by his or her commencement of use of the program. The term +"use" as used herein includes, but is not limited to, the use, +modification, copying and distribution of the program and the +production of secondary products from the program. + +In the case where the program, whether in its original form or +modified, was distributed or delivered to or received by a user from +any person, organization or entity other than ICOT, unless it makes or +grants independently of ICOT any specific warranty to the user in +writing, such person, organization or entity, will also be exempted +from and not be held liable to the user for any such damages as noted +above as far as the program is concerned. diff --git a/x-pack/plugin/ml/licenses/lucene-analysis-icu-9.0.0.jar.sha1 b/x-pack/plugin/ml/licenses/lucene-analysis-icu-9.0.0.jar.sha1 new file mode 100644 index 0000000000000..a0df1a4b7cb2e --- /dev/null +++ b/x-pack/plugin/ml/licenses/lucene-analysis-icu-9.0.0.jar.sha1 @@ -0,0 +1 @@ +a23a2c1c9baad61b6fb5380f072e41534c275875 \ No newline at end of file diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java index b49b2a950dcce..34e7f6d0740e0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java @@ -478,15 +478,19 @@ synchronized void startProcess() { synchronized void stopProcess() { resultProcessor.stop(); executorService.shutdown(); - if (process.get() == null) { - return; - } try { + if (process.get() == null) { + return; + } stateStreamer.cancel(); process.get().kill(true); processContextByAllocation.remove(task.getId()); } catch (IOException e) { logger.error(new ParameterizedMessage("[{}] Failed to kill process", task.getModelId()), e); + } finally { + if (nlpTaskProcessor.get() != null) { + nlpTaskProcessor.get().close(); + } } } @@ -496,6 +500,9 @@ private Consumer onProcessCrash() { resultProcessor.stop(); executorService.shutdownWithError(new IllegalStateException(reason)); processContextByAllocation.remove(task.getId()); + if (nlpTaskProcessor.get() != null) { + nlpTaskProcessor.get().close(); + } task.setFailed("inference process crashed due to reason [" + reason + "]"); }; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java index 5343cd94e141d..db79d24e35821 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java @@ -34,6 +34,11 @@ public class FillMaskProcessor implements NlpTask.Processor { this.tokenizer = tokenizer; } + @Override + public void close() { + tokenizer.close(); + } + @Override public void validateInputs(List inputs) { ValidationException ve = new ValidationException(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java index ac0395ce31b48..177bc387ea87a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java @@ -83,6 +83,7 @@ boolean isBeginning() { private final IobTag[] iobMap; private final String resultsField; private final boolean ignoreCase; + private final NlpTokenizer tokenizer; NerProcessor(NlpTokenizer tokenizer, NerConfig config) { validate(config.getClassificationLabels()); @@ -90,6 +91,12 @@ boolean isBeginning() { this.requestBuilder = tokenizer.requestBuilder(); this.resultsField = config.getResultsField(); this.ignoreCase = config.getTokenization().doLowerCase(); + this.tokenizer = tokenizer; + } + + @Override + public void close() { + tokenizer.close(); } /** @@ -232,11 +239,13 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchIn static List tagTokens(TokenizationResult.Tokenization tokenization, double[][] scores, IobTag[] iobMap) { List taggedTokens = new ArrayList<>(); int startTokenIndex = 0; + int numSpecialTokens = 0; while (startTokenIndex < tokenization.getTokenIds().length) { int inputMapping = tokenization.getTokenMap()[startTokenIndex]; if (inputMapping < 0) { // This token does not map to a token in the input (special tokens) startTokenIndex++; + numSpecialTokens++; continue; } int endTokenIndex = startTokenIndex; @@ -258,7 +267,9 @@ static List tagTokens(TokenizationResult.Tokenization tokenization, } int maxScoreIndex = NlpHelpers.argmax(avgScores); double score = avgScores[maxScoreIndex]; - taggedTokens.add(new TaggedToken(tokenization.getTokens().get(inputMapping), iobMap[maxScoreIndex], score)); + taggedTokens.add( + new TaggedToken(tokenization.getTokens().get(startTokenIndex - numSpecialTokens), iobMap[maxScoreIndex], score) + ); startTokenIndex = endTokenIndex + 1; } return taggedTokens; @@ -296,8 +307,8 @@ static List groupTaggedTokens(List tokens, endTokenIndex++; } - int startPos = token.token.getStartPos(); - int endPos = tokens.get(endTokenIndex - 1).token.getEndPos(); + int startPos = token.token.startOffset(); + int endPos = tokens.get(endTokenIndex - 1).token.endOffset(); String entity = inputSeq.substring(startPos, endPos); entities.add( new NerResults.EntityGroup( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java index a7fe77dc67e84..4ee96e78db0a6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.Releasable; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; import org.elasticsearch.xpack.core.ml.inference.results.InferenceResults; @@ -105,7 +106,7 @@ public interface ResultProcessor { InferenceResults processResult(TokenizationResult tokenization, PyTorchInferenceResult pyTorchResult); } - public interface Processor { + public interface Processor extends Releasable { /** * Validate the task input string. * Throws an exception if the inputs fail validation diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java index d613f1fc3da19..146967ffb04d4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java @@ -27,11 +27,18 @@ public class PassThroughProcessor implements NlpTask.Processor { private final NlpTask.RequestBuilder requestBuilder; + private final NlpTokenizer tokenizer; private final String resultsField; PassThroughProcessor(NlpTokenizer tokenizer, PassThroughConfig config) { this.requestBuilder = tokenizer.requestBuilder(); this.resultsField = config.getResultsField(); + this.tokenizer = tokenizer; + } + + @Override + public void close() { + tokenizer.close(); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java index 7f8f66123cb3c..77e994cbd06f0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java @@ -30,6 +30,7 @@ public class TextClassificationProcessor implements NlpTask.Processor { private final NlpTask.RequestBuilder requestBuilder; + private final NlpTokenizer tokenizer; private final String[] classLabels; private final int numTopClasses; @@ -40,6 +41,12 @@ public class TextClassificationProcessor implements NlpTask.Processor { // negative values are a special case of asking for ALL classes. Since we require the output size to equal the classLabel size // This is a nice way of setting the value this.numTopClasses = config.getNumTopClasses() < 0 ? this.classLabels.length : config.getNumTopClasses(); + this.tokenizer = tokenizer; + } + + @Override + public void close() { + tokenizer.close(); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java index f035519b39c41..b26355fa9f473 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java @@ -26,9 +26,16 @@ public class TextEmbeddingProcessor implements NlpTask.Processor { private final NlpTask.RequestBuilder requestBuilder; + private final NlpTokenizer tokenizer; TextEmbeddingProcessor(NlpTokenizer tokenizer, TextEmbeddingConfig config) { this.requestBuilder = tokenizer.requestBuilder(); + this.tokenizer = tokenizer; + } + + @Override + public void close() { + tokenizer.close(); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java index 26db36fcd16d4..699dd0084f1b7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java @@ -62,6 +62,11 @@ public class ZeroShotClassificationProcessor implements NlpTask.Processor { this.resultsField = config.getResultsField(); } + @Override + public void close() { + tokenizer.close(); + } + @Override public void validateInputs(List inputs) { // nothing to validate diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenFilter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenFilter.java new file mode 100644 index 0000000000000..8eba67c374aad --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenFilter.java @@ -0,0 +1,272 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import com.carrotsearch.hppc.IntArrayList; +import com.ibm.icu.text.Normalizer; +import com.ibm.icu.text.Normalizer2; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; + +import java.io.IOException; +import java.io.Reader; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; +import java.util.PrimitiveIterator; +import java.util.function.IntPredicate; + +/** + * Assumes that the text is already whitespace tokenized + */ +public final class BasicTokenFilter extends TokenFilter { + private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); + + private final CharSeqTokenTrieNode neverSplit; + private final LinkedList tokens; + private final boolean isStripAccents; + private final CharArraySet neverSplitSet; + private final Normalizer2 normalizer; + private final StringBuilder accentBuffer = new StringBuilder(); + private final IntPredicate splitOn; + + private State current; + + public static BasicTokenFilter build(boolean isTokenizeCjkChars, boolean isStripAccents, List neverSplit, TokenStream input) + throws IOException { + Analyzer analyzer = new Analyzer() { + @Override + protected TokenStreamComponents createComponents(String fieldName) { + WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(); + TokenStream stream = new BasicTokenFilter( + tokenizer, + CharSeqTokenTrieNode.EMPTY, + CharArraySet.EMPTY_SET, + isStripAccents, + isTokenizeCjkChars + ); + return new TokenStreamComponents(tokenizer, stream); + } + + @Override + protected Reader initReader(String fieldName, Reader reader) { + return new ControlCharFilter(reader); + } + }; + CharArraySet neverSplitSet = new CharArraySet(neverSplit, false); + CharSeqTokenTrieNode neverSplitTree; + try (analyzer) { + neverSplitTree = CharSeqTokenTrieNode.build(neverSplit, c -> { + try (TokenStream ts = analyzer.tokenStream("never_split", c)) { + CharTermAttribute term = ts.addAttribute(CharTermAttribute.class); + ts.reset(); + List tokens = new ArrayList<>(); + while (ts.incrementToken()) { + tokens.add(term.toString()); + } + return tokens; + } + }); + } + return new BasicTokenFilter(input, neverSplitTree, neverSplitSet, isStripAccents, isTokenizeCjkChars); + } + + public BasicTokenFilter( + TokenStream input, + CharSeqTokenTrieNode neverSplit, + CharArraySet neverSplitSet, + boolean isStripAccents, + boolean isTokenizeCjkChars + ) { + super(input); + this.neverSplit = neverSplit; + this.neverSplitSet = neverSplitSet; + this.tokens = new LinkedList<>(); + this.isStripAccents = isStripAccents; + this.normalizer = Normalizer2.getNFDInstance(); + this.splitOn = cp -> (isTokenizeCjkChars && isCjkChar(cp)) || isPunctuationMark(cp); + } + + @Override + public void reset() throws IOException { + super.reset(); + tokens.clear(); + accentBuffer.setLength(0); + current = null; + } + + @Override + public boolean incrementToken() throws IOException { + if (tokens.isEmpty() == false) { + assert current != null; + DelimitedToken token = tokens.removeFirst(); + restoreState(current); // keep all other attributes untouched + termAtt.setEmpty().append(token.charSequence()); + offsetAtt.setOffset(token.startOffset(), token.endOffset()); + return true; + } + current = null; // not really needed, but for safety + if (input.incrementToken()) { + if (isStripAccents) { + stripAccent(); + } + if (neverSplitSet.contains(termAtt)) { + return true; + } + // split punctuation and maybe cjk chars!!! + LinkedList splits = split(); + // There is nothing to merge, nothing to store, simply return + if (splits.size() == 1) { + return true; + } + tokens.addAll(mergeSplits(splits)); + this.current = captureState(); + DelimitedToken token = tokens.removeFirst(); + termAtt.setEmpty().append(token.charSequence()); + offsetAtt.setOffset(token.startOffset(), token.endOffset()); + return true; + } + return false; + } + + void stripAccent() { + accentBuffer.setLength(0); + if (normalizer.quickCheck(termAtt) != Normalizer.YES) { + normalizer.normalize(termAtt, accentBuffer); + } + IntArrayList badIndices = new IntArrayList(); + IntArrayList charCount = new IntArrayList(); + int index = 0; + for (PrimitiveIterator.OfInt it = accentBuffer.codePoints().iterator(); it.hasNext();) { + int cp = it.next(); + if (Character.getType(cp) == Character.NON_SPACING_MARK) { + badIndices.add(index); + charCount.add(Character.charCount(cp)); + } + index++; + } + if (badIndices.isEmpty()) { + return; + } + for (int i = 0; i < badIndices.size(); i++) { + int badIndex = badIndices.get(i); + int count = charCount.get(i); + for (int j = 0; j < count && badIndex < accentBuffer.length(); j++) { + accentBuffer.deleteCharAt(badIndex); + } + } + termAtt.setEmpty().append(accentBuffer); + } + + private LinkedList split() { + LinkedList splits = new LinkedList<>(); + int startOffset = offsetAtt.startOffset(); + int charIndex = 0; + int lastCharSplit = 0; + for (PrimitiveIterator.OfInt it = termAtt.codePoints().iterator(); it.hasNext();) { + int cp = it.next(); + if (splitOn.test(cp)) { + int charCount = charIndex - lastCharSplit; + if (charCount > 0) { + splits.add( + new DelimitedToken( + termAtt.subSequence(lastCharSplit, charIndex), + lastCharSplit + startOffset, + charIndex + startOffset + ) + ); + } + splits.add( + new DelimitedToken(termAtt.subSequence(charIndex, charIndex + 1), charIndex + startOffset, charIndex + 1 + startOffset) + ); + lastCharSplit = charIndex + 1; + } + charIndex += Character.charCount(cp); + } + if (lastCharSplit < termAtt.length()) { + splits.add( + new DelimitedToken(termAtt.subSequence(lastCharSplit, termAtt.length()), lastCharSplit + startOffset, offsetAtt.endOffset()) + ); + } + return splits; + } + + private LinkedList mergeSplits(LinkedList splits) { + LinkedList mergedTokens = new LinkedList<>(); + List matchingTokens = new ArrayList<>(); + CharSeqTokenTrieNode current = neverSplit; + for (DelimitedToken token : splits) { + CharSeqTokenTrieNode childNode = current.getChild(token.charSequence()); + if (childNode == null) { + if (current != neverSplit) { + mergedTokens.addAll(matchingTokens); + matchingTokens = new ArrayList<>(); + current = neverSplit; + } + childNode = current.getChild(token.charSequence()); + if (childNode == null) { + mergedTokens.add(token); + } else { + matchingTokens.add(token); + current = childNode; + } + } else if (childNode.isLeaf()) { + matchingTokens.add(token); + DelimitedToken mergedToken = DelimitedToken.mergeTokens(matchingTokens); + if (neverSplitSet.contains(mergedToken.charSequence())) { + mergedTokens.add(mergedToken); + } else { + mergedTokens.addAll(matchingTokens); + } + matchingTokens = new ArrayList<>(); + current = neverSplit; + } else { + matchingTokens.add(token); + current = childNode; + } + } + if (matchingTokens.isEmpty() == false) { + mergedTokens.addAll(matchingTokens); + } + return mergedTokens; + } + + static boolean isPunctuationMark(int codePoint) { + if ((codePoint >= 33 && codePoint <= 47) + || (codePoint >= 58 && codePoint <= 64) + || (codePoint >= 91 && codePoint <= 96) + || (codePoint >= 123 && codePoint <= 126)) { + return true; + } + + int category = Character.getType(codePoint); + return (category >= Character.DASH_PUNCTUATION && category <= Character.OTHER_PUNCTUATION) + || (category >= Character.INITIAL_QUOTE_PUNCTUATION && category <= Character.FINAL_QUOTE_PUNCTUATION); + } + + private static boolean isCjkChar(int codePoint) { + // https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block) + Character.UnicodeBlock block = Character.UnicodeBlock.of(codePoint); + return Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS.equals(block) + || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS.equals(block) + || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A.equals(block) + || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B.equals(block) + || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_C.equals(block) + || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_D.equals(block) + || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_E.equals(block) + || Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS_SUPPLEMENT.equals(block); + } + +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenizer.java deleted file mode 100644 index 561fd429422bf..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenizer.java +++ /dev/null @@ -1,381 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; - -import joptsimple.internal.Strings; - -import java.text.Normalizer; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Locale; -import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.stream.Collectors; - -/** - * Basic tokenization of text by whitespace with optional extras: - * 1. Lower case the input - * 2. Convert to Unicode NFD - * 3. Stip accents - * 4. Surround CJK characters with ' ' - * - * Derived from - * https://github.com/huggingface/transformers/blob/ba8c4d0ac04acfcdbdeaed954f698d6d5ec3e532/src/transformers/tokenization_bert.py - */ -public class BasicTokenizer { - - private final boolean isLowerCase; - private final boolean isTokenizeCjkChars; - private final boolean isStripAccents; - private final Set neverSplitTokens; - private final TokenTrieNode neverSplitTokenTrieRoot; - - /** - * Tokenizer behaviour is controlled by the options passed here. - * - * @param isLowerCase If true convert the input to lowercase - * @param isTokenizeCjkChars Should CJK ideographs be tokenized - * @param isStripAccents Strip all accents - * @param neverSplit The set of tokens that should not be split - */ - public BasicTokenizer(boolean isLowerCase, boolean isTokenizeCjkChars, boolean isStripAccents, Set neverSplit) { - this.isLowerCase = isLowerCase; - this.isTokenizeCjkChars = isTokenizeCjkChars; - this.isStripAccents = isStripAccents; - this.neverSplitTokens = neverSplit; - this.neverSplitTokenTrieRoot = TokenTrieNode.build(neverSplit, this::doTokenizeString); - } - - public BasicTokenizer(boolean isLowerCase, boolean isTokenizeCjkChars, boolean isStripAccents) { - this(isLowerCase, isTokenizeCjkChars, isStripAccents, Collections.emptySet()); - } - - /** - * Tokenize CJK chars defaults to the value of {@code isLowerCase} - * when not explicitly set - * @param isLowerCase If true convert the input to lowercase - * @param isTokenizeCjkChars Should CJK ideographs be tokenized - */ - public BasicTokenizer(boolean isLowerCase, boolean isTokenizeCjkChars) { - this(isLowerCase, isTokenizeCjkChars, isLowerCase); - } - - BasicTokenizer() { - this(true, true, true); - } - - /** - * Clean the text and whitespace tokenize then process depending - * on the values of {@code lowerCase}, {@code tokenizeCjkChars}, - * {@code stripAccents} and the contents of {@code neverSplit} - * - * @param text The input text to tokenize - * @return List of tokens - */ - public List tokenize(String text) { - return mergeNeverSplitTokens(text, doTokenize(text)); - } - - private List doTokenizeString(String text) { - return doTokenize(text).stream().map(DelimitedToken::getToken).collect(Collectors.toList()); - } - - private List doTokenize(String text) { - text = cleanText(text); - if (isTokenizeCjkChars) { - text = tokenizeCjkChars(text); - } - - List tokens = whiteSpaceTokenize(text); - - List processedTokens = new ArrayList<>(tokens.size()); - for (DelimitedToken tokenRecord : tokens) { - - String tokenStr = tokenRecord.getToken(); - if (Strings.EMPTY.equals(tokenStr)) { - continue; - } - - if (isLowerCase) { - tokenStr = tokenStr.toLowerCase(Locale.ROOT); - } - if (isStripAccents) { - tokenStr = stripAccents(tokenStr); - } - processedTokens.addAll(splitOnPunctuation(new DelimitedToken(tokenRecord.getStartPos(), tokenRecord.getEndPos(), tokenStr))); - } - - return processedTokens; - } - - private List mergeNeverSplitTokens(String originalText, List tokens) { - if (neverSplitTokenTrieRoot.isLeaf()) { - return tokens; - } - List mergedTokens = new ArrayList<>(tokens.size()); - List matchingTokens = new ArrayList<>(); - TokenTrieNode current = neverSplitTokenTrieRoot; - for (DelimitedToken token : tokens) { - TokenTrieNode childNode = current.getChild(token.getToken()); - if (childNode == null) { - if (current != neverSplitTokenTrieRoot) { - mergedTokens.addAll(matchingTokens); - matchingTokens = new ArrayList<>(); - current = neverSplitTokenTrieRoot; - } - childNode = current.getChild(token.getToken()); - if (childNode == null) { - mergedTokens.add(token); - } else { - matchingTokens.add(token); - current = childNode; - } - } else if (childNode.isLeaf()) { - matchingTokens.add(token); - DelimitedToken mergedToken = DelimitedToken.mergeTokens(matchingTokens); - String originalTokenText = originalText.substring(mergedToken.getStartPos(), mergedToken.getEndPos()); - if (neverSplitTokens.contains(originalTokenText)) { - mergedTokens.add(new DelimitedToken(mergedToken.getStartPos(), mergedToken.getEndPos(), originalTokenText)); - } else { - mergedTokens.addAll(matchingTokens); - } - matchingTokens = new ArrayList<>(); - current = neverSplitTokenTrieRoot; - } else { - matchingTokens.add(token); - current = childNode; - } - } - return mergedTokens; - } - - public boolean isLowerCase() { - return isLowerCase; - } - - public boolean isStripAccents() { - return isStripAccents; - } - - public boolean isTokenizeCjkChars() { - return isTokenizeCjkChars; - } - - /** - * Split the input text by whitespace. - * For the returned objects {@link DelimitedToken#getStartPos()} is the - * start character index inclusive and {@link DelimitedToken#getEndPos()} - * the index exclusive. The number of whitespace characters between 2 consecutive - * {@link DelimitedToken}s is the difference between the first's {@code endPos} - * and the second's {@code startPos}. - * - * The input should be normalized via a call to {@link #cleanText(String)} - * before it is passed to this function. - * - * @param text to tokenize - * @return White space separated strings - */ - static List whiteSpaceTokenize(String text) { - var tokens = new ArrayList(); - - // whitespace at beginning - int index = 0; - while (index < text.length() && text.charAt(index) == ' ') { - index++; - } - - int tokenStart = index; - - while (index < text.length()) { - if (text.charAt(index) == ' ') { - int tokenEnd = index; - index++; - // consume trail whitespace before the next word - // or end of text - while (index < text.length() && text.charAt(index) == ' ') { - index++; - } - - tokens.add(new DelimitedToken(tokenStart, tokenEnd, text.substring(tokenStart, tokenEnd))); - tokenStart = index; - } - index++; - } - - // trailing whitespace - if (tokenStart != text.length()) { - tokens.add(new DelimitedToken(tokenStart, text.length(), text.substring(tokenStart))); - } - - return tokens; - } - - /** - * Normalize unicode text to NFD form - * "Characters are decomposed by canonical equivalence, and multiple - * combining characters are arranged in a specific order" - * from https://en.wikipedia.org/wiki/Unicode_equivalence#Normal_forms - * - * And remove non-spacing marks https://www.compart.com/en/unicode/category/Mn - * - * @param word Word to strip - * @return {@code word} normalized and stripped. - */ - static String stripAccents(String word) { - String normalizedString = Normalizer.normalize(word, Normalizer.Form.NFD); - - int[] codePoints = normalizedString.codePoints() - .filter(codePoint -> Character.getType(codePoint) != Character.NON_SPACING_MARK) - .toArray(); - - return new String(codePoints, 0, codePoints.length); - } - - static List splitOnPunctuation(DelimitedToken word) { - List splits = new ArrayList<>(); - int[] codePoints = word.getToken().codePoints().toArray(); - - int lastSplit = 0; - for (int i = 0; i < codePoints.length; i++) { - if (isPunctuationMark(codePoints[i])) { - int charCount = i - lastSplit; - if (charCount > 0) { - // add a new string for what has gone before - splits.add( - new DelimitedToken( - word.getStartPos() + lastSplit, - word.getStartPos() + i, - new String(codePoints, lastSplit, i - lastSplit) - ) - ); - } - splits.add(new DelimitedToken(word.getStartPos() + i, word.getStartPos() + i + 1, new String(codePoints, i, 1))); - lastSplit = i + 1; - } - } - - if (lastSplit < codePoints.length) { - splits.add( - new DelimitedToken( - word.getStartPos() + lastSplit, - word.getStartPos() + codePoints.length, - new String(codePoints, lastSplit, codePoints.length - lastSplit) - ) - ); - } - - return splits; - } - - /** - * Surrounds any CJK character with whitespace - * @param text To tokenize - * @return tokenized text - */ - static String tokenizeCjkChars(String text) { - StringBuilder sb = new StringBuilder(text.length()); - AtomicBoolean cjkCharFound = new AtomicBoolean(false); - - text.codePoints().forEach(cp -> { - if (isCjkChar(cp)) { - sb.append(' '); - sb.appendCodePoint(cp); - sb.append(' '); - cjkCharFound.set(true); - } else { - sb.appendCodePoint(cp); - } - }); - - // no change - if (cjkCharFound.get() == false) { - return text; - } - - return sb.toString(); - } - - /** - * Remove control chars and normalize white space to ' ' - * @param text Text to clean - * @return Cleaned text - */ - static String cleanText(String text) { - int[] codePoints = text.codePoints() - .filter(codePoint -> (codePoint == 0x00 || codePoint == 0xFFFD || isControlChar(codePoint)) == false) - .map(codePoint -> isWhiteSpace(codePoint) ? ' ' : codePoint) - .toArray(); - - return new String(codePoints, 0, codePoints.length); - } - - static boolean isCjkChar(int codePoint) { - // https://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block) - Character.UnicodeBlock block = Character.UnicodeBlock.of(codePoint); - return Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS.equals(block) - || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS.equals(block) - || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A.equals(block) - || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_B.equals(block) - || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_C.equals(block) - || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_D.equals(block) - || Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_E.equals(block) - || Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS_SUPPLEMENT.equals(block); - } - - /** - * newline, carriage return and tab are control chars but for - * tokenization purposes they are treated as whitespace. - * - * @param codePoint code point - * @return is control char - */ - static boolean isControlChar(int codePoint) { - if (codePoint == '\n' || codePoint == '\r' || codePoint == '\t') { - return false; - } - int category = Character.getType(codePoint); - - return category >= Character.CONTROL && category <= Character.SURROGATE; - } - - /** - * newline, carriage return and tab are technically control chars - * but are not part of the Unicode Space Separator (Zs) group. - * For tokenization purposes they are treated as whitespace - * - * @param codePoint code point - * @return is white space - */ - static boolean isWhiteSpace(int codePoint) { - if (codePoint == '\n' || codePoint == '\r' || codePoint == '\t') { - return true; - } - return Character.getType(codePoint) == Character.SPACE_SEPARATOR; - } - - /** - * We treat all non-letter/number ASCII as punctuation. - * Characters such as "^", "$", and "`" are not in the Unicode - * Punctuation class but are treated as punctuation for consistency. - * - * @param codePoint code point - * @return true if is punctuation - */ - static boolean isPunctuationMark(int codePoint) { - if ((codePoint >= 33 && codePoint <= 47) - || (codePoint >= 58 && codePoint <= 64) - || (codePoint >= 91 && codePoint <= 96) - || (codePoint >= 123 && codePoint <= 126)) { - return true; - } - - int category = Character.getType(codePoint); - return (category >= Character.DASH_PUNCTUATION && category <= Character.OTHER_PUNCTUATION) - || (category >= Character.INITIAL_QUOTE_PUNCTUATION && category <= Character.FINAL_QUOTE_PUNCTUATION); - } -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java index ab0d34860c0c8..7ada856f5dd0a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java @@ -6,12 +6,16 @@ */ package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.inference.nlp.BertRequestBuilder; import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; +import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; @@ -20,6 +24,7 @@ import java.util.SortedMap; import java.util.TreeMap; import java.util.function.Function; +import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -41,19 +46,13 @@ public class BertTokenizer implements NlpTokenizer { public static final int SPECIAL_TOKEN_POSITION = -1; - public static final int DEFAULT_MAX_INPUT_CHARS_PER_WORD = 100; - private static final Set NEVER_SPLIT = Set.of(MASK_TOKEN); - private final WordPieceTokenizer wordPieceTokenizer; + private final WordPieceAnalyzer wordPieceAnalyzer; private final List originalVocab; // TODO Not sure this needs to be a sorted map private final SortedMap vocab; - private final boolean doLowerCase; - private final boolean doTokenizeCjKChars; - private final boolean doStripAccents; protected final boolean withSpecialTokens; - private final Set neverSplit; private final int maxSequenceLength; private final NlpTask.RequestBuilder requestBuilder; private final String sepToken; @@ -109,14 +108,17 @@ protected BertTokenizer( String maskToken, String unknownToken ) { - wordPieceTokenizer = new WordPieceTokenizer(vocab, unknownToken, DEFAULT_MAX_INPUT_CHARS_PER_WORD); + wordPieceAnalyzer = new WordPieceAnalyzer( + originalVocab, + new ArrayList<>(neverSplit), + doLowerCase, + doTokenizeCjKChars, + doStripAccents, + unknownToken + ); this.originalVocab = originalVocab; this.vocab = vocab; - this.doLowerCase = doLowerCase; - this.doTokenizeCjKChars = doTokenizeCjKChars; - this.doStripAccents = doStripAccents; this.withSpecialTokens = withSpecialTokens; - this.neverSplit = neverSplit; this.maxSequenceLength = maxSequenceLength; this.requestBuilder = requestBuilderFactory.apply(this); if (vocab.containsKey(unknownToken) == false) { @@ -208,7 +210,7 @@ public TokenizationResult buildTokenizationResult(List wordPieceTokenIds = innerResult.wordPieceTokenIds; + List wordPieceTokenIds = innerResult.tokens; List tokenPositionMap = innerResult.tokenPositionMap; int numTokens = withSpecialTokens ? wordPieceTokenIds.size() + 2 : wordPieceTokenIds.size(); boolean isTruncated = false; @@ -227,8 +229,10 @@ public TokenizationResult.Tokenization tokenize(String seq, Tokenization.Truncat ); } } - BertTokenizationBuilder bertTokenizationBuilder = bertTokenizationBuilder().addTokens(wordPieceTokenIds, tokenPositionMap) - .addEndTokensIfNecessary(); + BertTokenizationBuilder bertTokenizationBuilder = bertTokenizationBuilder().addTokens( + wordPieceTokenIds.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), + tokenPositionMap + ).addEndTokensIfNecessary(); return new TokenizationResult.Tokenization( seq, innerResult.tokens, @@ -241,10 +245,10 @@ public TokenizationResult.Tokenization tokenize(String seq, Tokenization.Truncat @Override public TokenizationResult.Tokenization tokenize(String seq1, String seq2, Tokenization.Truncate truncate) { var innerResultSeq1 = innerTokenize(seq1); - List wordPieceTokenIdsSeq1 = innerResultSeq1.wordPieceTokenIds; + List wordPieceTokenIdsSeq1 = innerResultSeq1.tokens; List tokenPositionMapSeq1 = innerResultSeq1.tokenPositionMap; var innerResultSeq2 = innerTokenize(seq2); - List wordPieceTokenIdsSeq2 = innerResultSeq2.wordPieceTokenIds; + List wordPieceTokenIdsSeq2 = innerResultSeq2.tokens; List tokenPositionMapSeq2 = innerResultSeq2.tokenPositionMap; if (withSpecialTokens == false) { throw new IllegalArgumentException("Unable to do sequence pair tokenization without special tokens"); @@ -298,10 +302,16 @@ public TokenizationResult.Tokenization tokenize(String seq1, String seq2, Tokeni ); } } - BertTokenizationBuilder bertTokenizationBuilder = bertTokenizationBuilder().addTokens(wordPieceTokenIdsSeq1, tokenPositionMapSeq1) - .addTokens(wordPieceTokenIdsSeq2, tokenPositionMapSeq2) + BertTokenizationBuilder bertTokenizationBuilder = bertTokenizationBuilder().addTokens( + wordPieceTokenIdsSeq1.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), + tokenPositionMapSeq1 + ) + .addTokens( + wordPieceTokenIdsSeq2.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), + tokenPositionMapSeq2 + ) .addEndTokensIfNecessary(); - List tokens = new ArrayList<>(innerResultSeq1.tokens); + List tokens = new ArrayList<>(innerResultSeq1.tokens); tokens.addAll(innerResultSeq2.tokens); return new TokenizationResult.Tokenization( seq1 + seq2, @@ -321,36 +331,32 @@ protected int getNumExtraTokensForSeqPair() { } private InnerTokenization innerTokenize(String seq) { - BasicTokenizer basicTokenizer = new BasicTokenizer(doLowerCase, doTokenizeCjKChars, doStripAccents, neverSplit); - var tokenSequences = basicTokenizer.tokenize(seq); - List wordPieceTokens = new ArrayList<>(); List tokenPositionMap = new ArrayList<>(); - - for (int sourceIndex = 0; sourceIndex < tokenSequences.size(); sourceIndex++) { - String token = tokenSequences.get(sourceIndex).getToken(); - if (neverSplit.contains(token)) { - wordPieceTokens.add(vocab.getOrDefault(token, vocab.get(unknownToken))); - tokenPositionMap.add(sourceIndex); - } else { - List tokens = wordPieceTokenizer.tokenize(tokenSequences.get(sourceIndex)); - for (int tokenCount = 0; tokenCount < tokens.size(); tokenCount++) { - tokenPositionMap.add(sourceIndex); - } - wordPieceTokens.addAll(tokens); + try (TokenStream ts = wordPieceAnalyzer.tokenStream("input", seq)) { + ts.reset(); + PositionIncrementAttribute tokenPos = ts.addAttribute(PositionIncrementAttribute.class); + int currPos = -1; + while (ts.incrementToken()) { + currPos += tokenPos.getPositionIncrement(); + tokenPositionMap.add(currPos); } + } catch (IOException ex) { + throw new UncheckedIOException(ex); } + return new InnerTokenization(new ArrayList<>(wordPieceAnalyzer.getTokens()), tokenPositionMap); + } - return new InnerTokenization(tokenSequences, wordPieceTokens, tokenPositionMap); + @Override + public void close() { + wordPieceAnalyzer.close(); } private static class InnerTokenization { - List tokens; - List wordPieceTokenIds; + List tokens; List tokenPositionMap; - InnerTokenization(List tokens, List wordPieceTokenIds, List tokenPositionMap) { + InnerTokenization(List tokens, List tokenPositionMap) { this.tokens = tokens; - this.wordPieceTokenIds = wordPieceTokenIds; this.tokenPositionMap = tokenPositionMap; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenTrieNode.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNode.java similarity index 57% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenTrieNode.java rename to x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNode.java index a6716a9580372..9253759e41232 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenTrieNode.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNode.java @@ -7,22 +7,22 @@ package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; +import org.apache.lucene.analysis.CharArrayMap; +import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.Nullable; +import java.io.IOException; import java.util.Collection; -import java.util.HashMap; import java.util.List; -import java.util.Map; import java.util.Objects; -import java.util.function.Function; -class TokenTrieNode { +public class CharSeqTokenTrieNode { - private static final String EMPTY_STRING = ""; + public static final CharSeqTokenTrieNode EMPTY = new CharSeqTokenTrieNode(new CharArrayMap<>(0, false)); - private final Map children; + private final CharArrayMap children; - private TokenTrieNode(Map children) { + private CharSeqTokenTrieNode(CharArrayMap children) { this.children = Objects.requireNonNull(children); } @@ -30,8 +30,18 @@ boolean isLeaf() { return children.isEmpty(); } + public void clear() { + if (isLeaf()) { + return; + } + for (CharSeqTokenTrieNode c : children.values()) { + c.clear(); + } + children.clear(); + } + @Nullable - TokenTrieNode getChild(String token) { + CharSeqTokenTrieNode getChild(CharSequence token) { return children.get(token); } @@ -39,7 +49,7 @@ private void insert(List tokens) { if (tokens.isEmpty()) { return; } - TokenTrieNode currentNode = this; + CharSeqTokenTrieNode currentNode = this; int currentTokenIndex = 0; // find leaf @@ -49,15 +59,16 @@ private void insert(List tokens) { } // add rest of tokens as new nodes while (currentTokenIndex < tokens.size()) { - TokenTrieNode childNode = new TokenTrieNode(new HashMap<>()); + CharSeqTokenTrieNode childNode = new CharSeqTokenTrieNode(new CharArrayMap<>(1, false)); currentNode.children.put(tokens.get(currentTokenIndex), childNode); currentNode = childNode; currentTokenIndex++; } } - static TokenTrieNode build(Collection tokens, Function> tokenizeFunction) { - TokenTrieNode root = new TokenTrieNode(new HashMap<>()); + public static CharSeqTokenTrieNode build(Collection tokens, CheckedFunction, IOException> tokenizeFunction) + throws IOException { + CharSeqTokenTrieNode root = new CharSeqTokenTrieNode(new CharArrayMap<>(1, false)); for (String token : tokens) { List subTokens = tokenizeFunction.apply(token); root.insert(subTokens); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/ControlCharFilter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/ControlCharFilter.java new file mode 100644 index 0000000000000..dc27f51c520e9 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/ControlCharFilter.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import com.carrotsearch.hppc.CharArrayList; + +import org.apache.lucene.analysis.charfilter.BaseCharFilter; + +import java.io.CharArrayReader; +import java.io.IOException; +import java.io.Reader; + +/** + * Char filter for removing control chars from a stream + */ +public class ControlCharFilter extends BaseCharFilter { + public static final String NAME = "control_char_filter"; + // TODO this is probably not ultimately necessary, keeping track of where we are in the stream + // and optimizing our replacements (like MappingCharFilter), would be faster and use less memory + private Reader transformedInput; + + public ControlCharFilter(Reader in) { + super(in); + } + + @Override + public int read(char[] cbuf, int off, int len) throws IOException { + if (transformedInput == null) { + fill(); + } + + return transformedInput.read(cbuf, off, len); + } + + @Override + public int read() throws IOException { + if (transformedInput == null) { + fill(); + } + + return transformedInput.read(); + } + + private void fill() throws IOException { + CharArrayList charArrayList = new CharArrayList(1024); + char[] temp = new char[1024]; + int totalRead = 0; + int diff = 0; + for (int cnt = input.read(temp); cnt > 0; cnt = input.read(temp)) { + int pos = 0; + while (pos < cnt) { + int start = pos; + while (start < cnt) { + if (isControlChar(temp[start]) == false) { + break; + } + start++; + } + if (start > pos) { + diff += (start - pos); + addOffCorrectMap(pos + totalRead, diff); + } + int size = 0; + while (size < (cnt - start)) { + // While the category is not a control char; read. + if (isControlChar(temp[start + size]) == false) { + size++; + } else { + break; + } + } + charArrayList.add(temp, start, size); + pos = start + size; + } + totalRead += cnt; + } + transformedInput = new CharArrayReader(charArrayList.toArray()); + } + + private static boolean isControlChar(char c) { + if (c == '\n' || c == '\r' || c == '\t') { + return false; + } + int category = Character.getType(c); + + return category >= Character.CONTROL && category <= Character.SURROGATE; + } + +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DelimitedToken.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DelimitedToken.java index 74f1121cc467f..7d385055aae5d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DelimitedToken.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/DelimitedToken.java @@ -13,43 +13,39 @@ public class DelimitedToken { - /** - * Merges the list of tokens. - * - * Assumes that the tokens are in order. - * - * @param tokens - * @return The merged token - */ - public static DelimitedToken mergeTokens(List tokens) { + static DelimitedToken mergeTokens(List tokens) { if (tokens.size() == 1) { return tokens.get(0); } - - String merged = tokens.stream().map(DelimitedToken::getToken).collect(Collectors.joining()); - return new DelimitedToken(tokens.get(0).getStartPos(), tokens.get(tokens.size() - 1).getEndPos(), merged); + int startOffSet = tokens.get(0).startOffset; + int endOffset = tokens.get(tokens.size() - 1).endOffset; + return new DelimitedToken( + tokens.stream().map(DelimitedToken::charSequence).map(CharSequence::toString).collect(Collectors.joining()), + startOffSet, + endOffset + ); } - private final int startPos; - private final int endPos; - private final String token; + private final CharSequence charSequence; + private final int startOffset; + private final int endOffset; - DelimitedToken(int startPos, int endPos, String token) { - this.startPos = startPos; - this.endPos = endPos; - this.token = token; + public DelimitedToken(CharSequence charSequence, int startOffset, int endOffset) { + this.charSequence = charSequence; + this.startOffset = startOffset; + this.endOffset = endOffset; } - public int getStartPos() { - return startPos; + public CharSequence charSequence() { + return charSequence; } - public int getEndPos() { - return endPos; + public int startOffset() { + return startOffset; } - public String getToken() { - return token; + public int endOffset() { + return endOffset; } @Override @@ -57,16 +53,11 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DelimitedToken that = (DelimitedToken) o; - return startPos == that.startPos && endPos == that.endPos && Objects.equals(token, that.token); + return startOffset == that.startOffset && endOffset == that.endOffset && Objects.equals(charSequence, that.charSequence); } @Override public int hashCode() { - return Objects.hash(startPos, endPos, token); - } - - @Override - public String toString() { - return "{" + "startPos=" + startPos + ", endPos=" + endPos + ", token=" + token + '}'; + return Objects.hash(charSequence, startOffset, endOffset); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MultiCharSequence.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MultiCharSequence.java new file mode 100644 index 0000000000000..f78031834986e --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MultiCharSequence.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.apache.lucene.util.CharsRef; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +public class MultiCharSequence implements CharSequence { + + private final int[] lengths; + private final List sequenceList; + + public MultiCharSequence(List sequenceList) { + this.sequenceList = sequenceList; + this.lengths = new int[sequenceList.size()]; + int i = 0; + int length = 0; + for (CharSequence sequence : sequenceList) { + length += sequence.length(); + lengths[i++] = length; + } + } + + @Override + public int length() { + return lengths[lengths.length - 1]; + } + + @Override + public char charAt(int index) { + int sequenceIndex = Arrays.binarySearch(lengths, index + 1); + if (sequenceIndex < 0) { + sequenceIndex = -1 - sequenceIndex; + } + CharSequence sequence = sequenceList.get(sequenceIndex); + if (sequenceIndex == 0) { + return sequence.charAt(index); + } + return sequence.charAt(index - lengths[sequenceIndex - 1]); + } + + @Override + public CharSequence subSequence(int start, int end) { + if (start == 0 && end >= length()) { + return this; + } + if (start == end) { + return new CharsRef(CharsRef.EMPTY_CHARS, 0, 0); + } + + int startIndex = Arrays.binarySearch(lengths, start); + if (startIndex < 0) { + startIndex = -1 - startIndex; + } + int endIndex = Arrays.binarySearch(lengths, end); + if (endIndex < 0) { + endIndex = -1 - endIndex; + } + if (endIndex > lengths.length - 1) { + endIndex = lengths.length - 1; + } + if (startIndex == endIndex) { + if (startIndex == 0) { + return sequenceList.get(startIndex).subSequence(start, end); + } else { + return sequenceList.get(startIndex).subSequence(start - lengths[startIndex - 1], end - lengths[startIndex - 1]); + } + } + List sequences = new ArrayList<>((endIndex - startIndex) + 1); + if (startIndex == 0) { + sequences.add(sequenceList.get(startIndex).subSequence(start, sequenceList.get(startIndex).length())); + } else { + sequences.add(sequenceList.get(startIndex).subSequence(start - lengths[startIndex - 1], sequenceList.get(startIndex).length())); + } + if (endIndex - startIndex > 1) { + sequences.addAll(sequenceList.subList(startIndex + 1, endIndex)); + } + sequences.add(sequenceList.get(endIndex).subSequence(0, end - lengths[endIndex - 1])); + return new MultiCharSequence(sequences); + } + + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + for (CharSequence sequence : sequenceList) { + builder.append(sequence); + } + return builder.toString(); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java index 59dbb616f7fea..adf303667065b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; +import org.elasticsearch.core.Releasable; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.MPNetTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; @@ -22,7 +23,7 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.VOCABULARY; -public interface NlpTokenizer { +public interface NlpTokenizer extends Releasable { TokenizationResult buildTokenizationResult(List tokenizations); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java index 862be3c43bf67..c13df493d05ce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java @@ -33,7 +33,13 @@ public List getTokenizations() { return tokenizations; } - public void addTokenization(String input, boolean isTruncated, List tokens, int[] tokenIds, int[] tokenMap) { + public void addTokenization( + String input, + boolean isTruncated, + List tokens, + int[] tokenIds, + int[] tokenMap + ) { maxLength = Math.max(maxLength, tokenIds.length); tokenizations.add(new Tokenization(input, tokens, isTruncated, tokenIds, tokenMap)); } @@ -50,12 +56,18 @@ public int getLongestSequenceLength() { public static class Tokenization { private final String input; - private final List tokens; + private final List tokens; private final int[] tokenIds; private final int[] tokenMap; private final boolean truncated; - public Tokenization(String input, List tokens, boolean truncated, int[] tokenIds, int[] tokenMap) { + public Tokenization( + String input, + List tokens, + boolean truncated, + int[] tokenIds, + int[] tokenMap + ) { assert tokenIds.length == tokenMap.length; this.input = input; this.tokens = tokens; @@ -88,7 +100,7 @@ public String getInput() { return input; } - public List getTokens() { + public List getTokens() { return tokens; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceAnalyzer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceAnalyzer.java new file mode 100644 index 0000000000000..6f1b89f20056f --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceAnalyzer.java @@ -0,0 +1,80 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.core.WhitespaceTokenizer; + +import java.io.IOException; +import java.io.Reader; +import java.io.UncheckedIOException; +import java.util.List; + +public class WordPieceAnalyzer extends Analyzer { + private final List vocabulary; + private final List neverSplit; + private final boolean doLowerCase; + private final boolean doTokenizeCjKChars; + private final boolean doStripAccents; + private WordPieceTokenFilter innerTokenFilter; + private final String unknownToken; + + public WordPieceAnalyzer( + List vocabulary, + List neverSplit, + boolean doLowerCase, + boolean doTokenizeCjKChars, + boolean doStripAccents, + String unknownToken + ) { + this.vocabulary = vocabulary; + this.neverSplit = neverSplit; + this.doLowerCase = doLowerCase; + this.doTokenizeCjKChars = doTokenizeCjKChars; + this.doStripAccents = doStripAccents; + this.unknownToken = unknownToken; + } + + @Override + protected TokenStreamComponents createComponents(String fieldName) { + try { + WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(512); + innerTokenFilter = WordPieceTokenFilter.build( + doLowerCase, + doTokenizeCjKChars, + doStripAccents, + neverSplit, + vocabulary, + unknownToken, + 100, + tokenizer + ); + return new TokenStreamComponents(tokenizer, innerTokenFilter); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } + } + + public List getTokens() { + if (innerTokenFilter != null) { + return innerTokenFilter.getTokenizedValues(); + } else { + return List.of(); + } + } + + @Override + protected Reader initReader(String fieldName, Reader reader) { + return new ControlCharFilter(reader); + } + + @Override + protected Reader initReaderForNormalization(String fieldName, Reader reader) { + return new ControlCharFilter(reader); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java new file mode 100644 index 0000000000000..2f6934d238736 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java @@ -0,0 +1,207 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.apache.lucene.analysis.CharArrayMap; +import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; + +public final class WordPieceTokenFilter extends TokenFilter { + protected final LinkedList tokens; + private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + protected final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); + private final PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class); + private static final CharSequence CONTINUATION = "##"; + + private State current; + private final CharArraySet neverSplit; + private final CharArrayMap vocabulary; + private final List tokenizedValues; + private final int maxInputCharsPerWord; + private final int tokenizedUnknown; + private final CharSequence unknownToken; + + public static WordPieceTokenFilter build( + boolean isLowerCase, + boolean isTokenizeCjkChars, + boolean isStripAccents, + List neverSplit, + List dictionary, + String unknownToken, + int maxInputCharsPerWord, + TokenStream input + ) throws IOException { + CharArrayMap vocabMap = new CharArrayMap<>(dictionary.size(), isLowerCase); + int i = 0; + for (var word : dictionary) { + vocabMap.put(word, i++); + } + input = BasicTokenFilter.build(isTokenizeCjkChars, isStripAccents, neverSplit, input); + return new WordPieceTokenFilter(input, new CharArraySet(neverSplit, isLowerCase), vocabMap, unknownToken, maxInputCharsPerWord); + } + + public WordPieceTokenFilter( + TokenStream input, + CharArraySet neverSplit, + CharArrayMap vocabulary, + CharSequence unknownToken, + int maxInputCharsPerWord + ) { + super(input); + this.tokens = new LinkedList<>(); + this.neverSplit = neverSplit; + this.vocabulary = vocabulary; + this.tokenizedValues = new ArrayList<>(); + if (vocabulary.containsKey(unknownToken) == false) { + throw new IllegalArgumentException( + "provided vocabulary does not contain the unknown token of [" + unknownToken.toString() + "]" + ); + } + this.unknownToken = unknownToken; + this.tokenizedUnknown = vocabulary.get(unknownToken); + this.maxInputCharsPerWord = maxInputCharsPerWord; + } + + public List getTokenizedValues() { + return tokenizedValues; + } + + @Override + public void reset() throws IOException { + super.reset(); + tokens.clear(); + tokenizedValues.clear(); + current = null; + } + + @Override + public boolean incrementToken() throws IOException { + // TODO seems like this + lowercase + tokenize cjk + wordpiece could all be the same thing.... + if (tokens.isEmpty() == false) { + assert current != null; + WordPieceToken token = tokens.removeFirst(); + restoreState(current); // keep all other attributes untouched + termAtt.setEmpty().append(token.charSequence()); + offsetAtt.setOffset(token.startOffset(), token.endOffset()); + posIncAtt.setPositionIncrement(0); + return true; + } + + current = null; // not really needed, but for safety + if (input.incrementToken()) { + if (neverSplit.contains(termAtt)) { + Integer maybeTokenized = vocabulary.get(termAtt); + if (maybeTokenized == null) { + tokenizedValues.add( + new WordPieceToken(termAtt.toString(), tokenizedUnknown, offsetAtt.startOffset(), offsetAtt.endOffset()) + ); + } else { + tokenizedValues.add( + new WordPieceToken(termAtt.toString(), maybeTokenized, offsetAtt.startOffset(), offsetAtt.endOffset()) + ); + } + return true; + } + if (termAtt.length() > maxInputCharsPerWord) { + tokenizedValues.add(new WordPieceToken(unknownToken, tokenizedUnknown, offsetAtt.startOffset(), offsetAtt.endOffset())); + termAtt.setEmpty().append(unknownToken); + return true; + } + + boolean isBad = false; + int start = 0; + int length = termAtt.length(); + while (start < length) { + int end = length; + CharSequence currentValidSubStr = null; + + while (start < end) { + CharSequence subStr; + if (start > 0) { + subStr = new MultiCharSequence(List.of(CONTINUATION, termAtt.subSequence(start, end))); + } else { + subStr = termAtt.subSequence(start, end); + } + + if (vocabulary.containsKey(subStr)) { + currentValidSubStr = subStr; + break; + } + end--; + } + + if (currentValidSubStr == null) { + isBad = true; + break; + } + int encoding = vocabulary.get(currentValidSubStr); + WordPieceToken t = new WordPieceToken(currentValidSubStr, encoding, offsetAtt.startOffset(), offsetAtt.endOffset()); + tokenizedValues.add(t); + tokens.add(t); + start = end; + } + + if (isBad) { + tokens.clear(); + WordPieceToken t = new WordPieceToken(unknownToken, tokenizedUnknown, offsetAtt.startOffset(), offsetAtt.endOffset()); + tokenizedValues.add(t); + termAtt.setEmpty().append(unknownToken); + } else { + current = captureState(); + WordPieceToken token = tokens.removeFirst(); + termAtt.setEmpty().append(token.charSequence()); + offsetAtt.setOffset(token.startOffset(), token.endOffset()); + } + return true; + } + return false; + } + + public static class WordPieceToken extends DelimitedToken implements CharSequence { + public final int encoding; + + WordPieceToken(CharSequence sequence, int encoding, int startOffset, int endOffset) { + super(sequence, startOffset, endOffset); + this.encoding = encoding; + } + + public int getEncoding() { + return this.encoding; + } + + @Override + public int length() { + return charSequence().length(); + } + + @Override + public char charAt(int index) { + return charSequence().charAt(index); + } + + @Override + public CharSequence subSequence(int start, int end) { + return charSequence().subSequence(start, end); + } + + @Override + public String toString() { + return charSequence().toString(); + } + } + +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenizer.java deleted file mode 100644 index b50e70f85f12a..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenizer.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; - -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; - -/** - * SubWord tokenization via the Word Piece algorithm using the - * provided vocabulary. - * - * The input is split by white space and should be pre-processed - * by {@link BasicTokenizer} - */ -public class WordPieceTokenizer { - - private static final String CONTINUATION = "##"; - - private final Map vocab; - private final String unknownToken; - private final int maxInputCharsPerWord; - - /** - * @param vocab The token vocabulary - * @param unknownToken If not found in the vocabulary - * @param maxInputCharsPerWord Inputs tokens longer than this are 'unknown' - */ - public WordPieceTokenizer(Map vocab, String unknownToken, int maxInputCharsPerWord) { - this.vocab = vocab; - this.unknownToken = unknownToken; - this.maxInputCharsPerWord = maxInputCharsPerWord; - } - - /** - * Wordpiece tokenize the input text. - * - * @param token Word to tokenize - * @return List of token IDs - */ - public List tokenize(DelimitedToken token) { - - if (token.getToken().length() > maxInputCharsPerWord) { - assert vocab.containsKey(unknownToken); - return Collections.singletonList(vocab.get(unknownToken)); - } - - List output = new ArrayList<>(); - boolean isBad = false; - int start = 0; - int length = token.getToken().length(); - while (start < length) { - int end = length; - - String currentValidSubStr = null; - - while (start < end) { - String subStr; - if (start > 0) { - subStr = CONTINUATION + token.getToken().substring(start, end); - } else { - subStr = token.getToken().substring(start, end); - } - - if (vocab.containsKey(subStr)) { - currentValidSubStr = subStr; - break; - } - - end--; - } - - if (currentValidSubStr == null) { - isBad = true; - break; - } - - output.add(vocab.get(currentValidSubStr)); - - start = end; - } - - if (isBad) { - return Collections.singletonList(vocab.get(unknownToken)); - } else { - return output; - } - } -} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilderTests.java index 4087199b462d8..a9a3227c43971 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilderTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizer; +import org.junit.After; import java.io.IOException; import java.util.Arrays; @@ -27,8 +28,17 @@ public class BertRequestBuilderTests extends ESTestCase { + private BertTokenizer tokenizer; + + @After + public void closeIt() { + if (tokenizer != null) { + tokenizer.close(); + } + } + public void testBuildRequest() throws IOException { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, null, 512, null)).build(); + tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, null, 512, null)).build(); BertRequestBuilder requestBuilder = new BertRequestBuilder(tokenizer); NlpTask.Request request = requestBuilder.buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); @@ -53,7 +63,7 @@ public static List nthListItemFromMap(String name, int n, Map vocab = Arrays.asList("The", "capital", "of", BertTokenizer.MASK_TOKEN, "is", "Paris", "France"); - List tokens = new BasicTokenizer(randomBoolean(), randomBoolean(), randomBoolean()).tokenize(input); + List tokens = List.of(); int[] tokenMap = new int[] { 0, 1, 2, 3, 4, 5 }; int[] tokenIds = new int[] { 0, 1, 2, 3, 4, 5 }; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilderTests.java index d848e6c9dd1b8..3779ab43262d9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilderTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.MPNetTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.MPNetTokenizer; +import org.junit.After; import java.io.IOException; import java.util.Arrays; @@ -26,9 +27,17 @@ import static org.hamcrest.Matchers.hasSize; public class MPNetRequestBuilderTests extends ESTestCase { + private MPNetTokenizer tokenizer; + + @After + public void closeIt() { + if (tokenizer != null) { + tokenizer.close(); + } + } public void testBuildRequest() throws IOException { - MPNetTokenizer tokenizer = MPNetTokenizer.mpBuilder(TEST_CASED_VOCAB, new MPNetTokenization(null, null, 512, null)).build(); + tokenizer = MPNetTokenizer.mpBuilder(TEST_CASED_VOCAB, new MPNetTokenization(null, null, 512, null)).build(); MPNetRequestBuilder requestBuilder = new MPNetRequestBuilder(tokenizer); NlpTask.Request request = requestBuilder.buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); @@ -51,7 +60,7 @@ public static List nthListItemFromMap(String name, int n, Map tokens = new BasicTokenizer(randomBoolean(), randomBoolean(), randomBoolean()).tokenize(input); + List tokens = basicTokenize(randomBoolean(), randomBoolean(), List.of(), input); assertThat(tokens, hasSize(12)); List taggedTokens = new ArrayList<>(); @@ -193,9 +194,9 @@ public void testGroupTaggedTokens() { assertThat(entityGroups.get(2).getEntity(), equalTo("Elastic")); } - public void testGroupTaggedTokens_GivenNoEntities() { + public void testGroupTaggedTokens_GivenNoEntities() throws IOException { String input = "Hi there"; - List tokens = new BasicTokenizer(randomBoolean(), randomBoolean(), randomBoolean()).tokenize(input); + List tokens = basicTokenize(randomBoolean(), randomBoolean(), List.of(), input); List taggedTokens = new ArrayList<>(); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(0), NerProcessor.IobTag.O, 1.0)); @@ -205,9 +206,9 @@ public void testGroupTaggedTokens_GivenNoEntities() { assertThat(entityGroups, is(empty())); } - public void testGroupTaggedTokens_GivenConsecutiveEntities() { + public void testGroupTaggedTokens_GivenConsecutiveEntities() throws IOException { String input = "Rita, Sue, and Bob too"; - List tokens = new BasicTokenizer(randomBoolean(), randomBoolean(), randomBoolean()).tokenize(input); + List tokens = basicTokenize(randomBoolean(), randomBoolean(), List.of(), input); List taggedTokens = new ArrayList<>(); int i = 0; @@ -229,9 +230,9 @@ public void testGroupTaggedTokens_GivenConsecutiveEntities() { assertThat(entityGroups.get(2).getEntity(), equalTo("Bob")); } - public void testGroupTaggedTokens_GivenConsecutiveContinuingEntities() { + public void testGroupTaggedTokens_GivenConsecutiveContinuingEntities() throws IOException { String input = "FirstName SecondName, NextPerson NextPersonSecondName. something_else"; - List tokens = new BasicTokenizer(randomBoolean(), randomBoolean(), randomBoolean()).tokenize(input); + List tokens = basicTokenize(randomBoolean(), randomBoolean(), List.of(), input); List taggedTokens = new ArrayList<>(); int i = 0; @@ -251,9 +252,9 @@ public void testGroupTaggedTokens_GivenConsecutiveContinuingEntities() { assertThat(entityGroups.get(2).getClassName(), equalTo("ORG")); } - public void testEntityContainsPunctuation() { + public void testEntityContainsPunctuation() throws IOException { String input = "Alexander, my name is Benjamin Trent, I work at Acme Inc.."; - List tokens = new BasicTokenizer(randomBoolean(), randomBoolean(), randomBoolean()).tokenize(input); + List tokens = basicTokenize(randomBoolean(), randomBoolean(), List.of(), input); List taggedTokens = new ArrayList<>(); int i = 0; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenFilterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenFilterTests.java new file mode 100644 index 0000000000000..d2b7fdcd1e559 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenFilterTests.java @@ -0,0 +1,134 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.BaseTokenStreamTestCase; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.core.WhitespaceTokenizer; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; + +import java.io.IOException; +import java.io.Reader; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.List; + +/** + * Some test cases taken from + * https://github.com/huggingface/transformers/blob/ba8c4d0ac04acfcdbdeaed954f698d6d5ec3e532/tests/test_tokenization_bert.py + */ +public class BasicTokenFilterTests extends BaseTokenStreamTestCase { + + public void testNeverSplit_GivenNoLowerCase() throws IOException { + Analyzer analyzer = basicAnalyzerFromSettings(false, false, List.of("[UNK]")); + assertAnalyzesToNoCharFilter(analyzer, "1 (return) [ Patois ", new String[] { "1", "(", "return", ")", "[", "Patois" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello [UNK].", new String[] { "Hello", "[UNK]", "." }); + assertAnalyzesToNoCharFilter(analyzer, "Hello-[UNK]", new String[] { "Hello", "-", "[UNK]" }); + assertAnalyzesToNoCharFilter( + analyzer, + " \tHeLLo!how \n Are yoU? [UNK]", + new String[] { "HeLLo", "!", "how", "Are", "yoU", "?", "[UNK]" } + ); + assertAnalyzesToNoCharFilter(analyzer, "Hello [UNK]?", new String[] { "Hello", "[UNK]", "?" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello [UNK]!!", new String[] { "Hello", "[UNK]", "!", "!" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello~[UNK][UNK]", new String[] { "Hello", "~", "[UNK]", "[UNK]" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello-[unk]", new String[] { "Hello", "-", "[", "unk", "]" }); + } + + public void testNeverSplit_GivenLowerCase() throws IOException { + Analyzer analyzer = basicAnalyzerFromSettings(false, false, List.of("[UNK]")); + assertAnalyzesToNoCharFilter( + analyzer, + " \tHeLLo!how \n Are yoU? [UNK]", + new String[] { "HeLLo", "!", "how", "Are", "yoU", "?", "[UNK]" } + ); + assertAnalyzesToNoCharFilter(analyzer, "Hello [UNK].", new String[] { "Hello", "[UNK]", "." }); + assertAnalyzesToNoCharFilter(analyzer, "Hello [UNK]?", new String[] { "Hello", "[UNK]", "?" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello [UNK]!!", new String[] { "Hello", "[UNK]", "!", "!" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello-[UNK]", new String[] { "Hello", "-", "[UNK]" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello~[UNK][UNK]", new String[] { "Hello", "~", "[UNK]", "[UNK]" }); + assertAnalyzesToNoCharFilter(analyzer, "Hello-[unk]", new String[] { "Hello", "-", "[", "unk", "]" }); + } + + public void testSplitCJK() throws Exception { + Analyzer analyzer = basicAnalyzerFromSettings(true, false, List.of("[UNK]")); + assertAnalyzesToNoCharFilter(analyzer, "hello ah\u535A\u63A8zz", new String[] { "hello", "ah", "\u535A", "\u63A8", "zz" }); + assertAnalyzesToNoCharFilter(analyzer, "hello world", new String[] { "hello", "world" }); + } + + public void testStripAccents() throws Exception { + Analyzer analyzer = basicAnalyzerFromSettings(true, true, List.of("[UNK]")); + assertAnalyzesToNoCharFilter(analyzer, "HäLLo how are you", new String[] { "HaLLo", "how", "are", "you" }); + } + + private static void assertAnalyzesToNoCharFilter(Analyzer a, String input, String[] output) throws IOException { + assertTokenStreamContents(a.tokenStream("dummy", input), output, null, null, null, null, null, input.length()); + checkResetException(a, input); + // We don't allow the random char filter because our offsets aren't corrected appropriately due to "never_split" + // If we could figure out a way to pass "never_split" through whichever passed char_filter there was, then it would work + checkAnalysisConsistency(random(), a, false, input); + } + + public void testIsPunctuation() { + assertTrue(BasicTokenFilter.isPunctuationMark('-')); + assertTrue(BasicTokenFilter.isPunctuationMark('$')); + assertTrue(BasicTokenFilter.isPunctuationMark('`')); + assertTrue(BasicTokenFilter.isPunctuationMark('.')); + assertFalse(BasicTokenFilter.isPunctuationMark(' ')); + assertFalse(BasicTokenFilter.isPunctuationMark('A')); + assertTrue(BasicTokenFilter.isPunctuationMark('[')); + } + + public static Analyzer basicAnalyzerFromSettings(boolean isTokenizeCjkChars, boolean isStripAccents, List neverSplit) { + return new Analyzer() { + @Override + protected TokenStreamComponents createComponents(String fieldName) { + Tokenizer t = new WhitespaceTokenizer(); + try { + return new TokenStreamComponents(t, BasicTokenFilter.build(isTokenizeCjkChars, isStripAccents, neverSplit, t)); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } + } + + @Override + protected Reader initReader(String fieldName, Reader reader) { + return new ControlCharFilter(reader); + } + }; + } + + public static List basicTokenize(Analyzer analyzer, String input) throws IOException { + try (TokenStream test = analyzer.tokenStream("test", input)) { + test.reset(); + CharTermAttribute term = test.addAttribute(CharTermAttribute.class); + OffsetAttribute offsetAttribute = test.addAttribute(OffsetAttribute.class); + List tokens = new ArrayList<>(); + while (test.incrementToken()) { + tokens.add(new DelimitedToken(term.toString(), offsetAttribute.startOffset(), offsetAttribute.endOffset())); + } + test.end(); + return tokens; + } + } + + public static List basicTokenize( + boolean isTokenizeCjkChars, + boolean isStripAccents, + List neverSplit, + String input + ) throws IOException { + try (Analyzer analyzer = basicAnalyzerFromSettings(isTokenizeCjkChars, isStripAccents, neverSplit)) { + return basicTokenize(analyzer, input); + } + } + +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenizerTests.java deleted file mode 100644 index effe3be0da5a6..0000000000000 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BasicTokenizerTests.java +++ /dev/null @@ -1,266 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; - -import org.elasticsearch.test.ESTestCase; - -import java.util.Collections; -import java.util.List; -import java.util.stream.Collectors; - -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.sameInstance; - -/** - * Some test cases taken from - * https://github.com/huggingface/transformers/blob/ba8c4d0ac04acfcdbdeaed954f698d6d5ec3e532/tests/test_tokenization_bert.py - */ -public class BasicTokenizerTests extends ESTestCase { - - public void testLowerCase() { - BasicTokenizer tokenizer = new BasicTokenizer(); - var tokens = tokenizer.tokenize(" \tHeLLo!how \n Are yoU? "); - assertThat(tokenStrings(tokens), contains("hello", "!", "how", "are", "you", "?")); - - tokens = tokenizer.tokenize("H\u00E9llo"); - assertThat(tokenStrings(tokens), contains("hello")); - } - - public void testLowerCaseWithoutStripAccents() { - BasicTokenizer tokenizer = new BasicTokenizer(true, true, false); - var tokens = tokenizer.tokenize(" \tHäLLo!how \n Are yoU? "); - assertThat(tokenStrings(tokens), contains("hällo", "!", "how", "are", "you", "?")); - - tokens = tokenizer.tokenize("H\u00E9llo"); - assertThat(tokenStrings(tokens), contains("h\u00E9llo")); - } - - public void testLowerCaseStripAccentsDefault() { - BasicTokenizer tokenizer = new BasicTokenizer(true, true); - var tokens = tokenizer.tokenize(" \tHäLLo!how \n Are yoU? "); - assertThat(tokenStrings(tokens), contains("hallo", "!", "how", "are", "you", "?")); - - tokens = tokenizer.tokenize("H\u00E9llo"); - assertThat(tokenStrings(tokens), contains("hello")); - } - - public void testNoLower() { - var tokens = new BasicTokenizer(false, true, false).tokenize(" \tHäLLo!how \n Are yoU? "); - assertThat(tokenStrings(tokens), contains("HäLLo", "!", "how", "Are", "yoU", "?")); - } - - public void testNoLowerStripAccents() { - var tokens = new BasicTokenizer(false, true, true).tokenize(" \tHäLLo!how \n Are yoU? "); - assertThat(tokenStrings(tokens), contains("HaLLo", "!", "how", "Are", "yoU", "?")); - } - - public void testNeverSplit_GivenNoLowerCase() { - BasicTokenizer tokenizer = new BasicTokenizer(false, false, false, Collections.singleton("[UNK]")); - var tokens = tokenizer.tokenize(" \tHeLLo!how \n Are yoU? [UNK]"); - assertThat(tokenStrings(tokens), contains("HeLLo", "!", "how", "Are", "yoU", "?", "[UNK]")); - - tokens = tokenizer.tokenize("Hello [UNK]."); - assertThat(tokenStrings(tokens), contains("Hello", "[UNK]", ".")); - - tokens = tokenizer.tokenize("Hello [UNK]?"); - assertThat(tokenStrings(tokens), contains("Hello", "[UNK]", "?")); - - tokens = tokenizer.tokenize("Hello [UNK]!!"); - assertThat(tokenStrings(tokens), contains("Hello", "[UNK]", "!", "!")); - - tokens = tokenizer.tokenize("Hello-[UNK]"); - assertThat(tokenStrings(tokens), contains("Hello", "-", "[UNK]")); - tokens = tokenizer.tokenize("Hello~[UNK][UNK]"); - assertThat(tokenStrings(tokens), contains("Hello", "~", "[UNK]", "[UNK]")); - assertThat(tokenStrings(tokenizer.tokenize("Hello~[[UNK]")), contains("Hello", "~", "[", "[UNK]")); - assertThat(tokenStrings(tokenizer.tokenize("Hello~[[[UNK]")), contains("Hello", "~", "[", "[", "[UNK]")); - assertThat(tokenStrings(tokenizer.tokenize("Hello~[UNK]]")), contains("Hello", "~", "[UNK]", "]")); - assertThat(tokenStrings(tokenizer.tokenize("Hello~[UNK]]]")), contains("Hello", "~", "[UNK]", "]", "]")); - assertThat(tokenStrings(tokenizer.tokenize("Hello~[[UNK]]")), contains("Hello", "~", "[", "[UNK]", "]")); - tokens = tokenizer.tokenize("Hello-[unk]"); - assertThat(tokenStrings(tokens), contains("Hello", "-", "[", "unk", "]")); - } - - public void testNeverSplit_GivenLowerCase() { - BasicTokenizer tokenizer = new BasicTokenizer(true, false, false, Collections.singleton("[UNK]")); - var tokens = tokenizer.tokenize(" \tHeLLo!how \n Are yoU? [UNK]"); - assertThat(tokenStrings(tokens), contains("hello", "!", "how", "are", "you", "?", "[UNK]")); - - tokens = tokenizer.tokenize("Hello [UNK]."); - assertThat(tokenStrings(tokens), contains("hello", "[UNK]", ".")); - - tokens = tokenizer.tokenize("Hello [UNK]?"); - assertThat(tokenStrings(tokens), contains("hello", "[UNK]", "?")); - - tokens = tokenizer.tokenize("Hello [UNK]!!"); - assertThat(tokenStrings(tokens), contains("hello", "[UNK]", "!", "!")); - - tokens = tokenizer.tokenize("Hello-[UNK]"); - assertThat(tokenStrings(tokens), contains("hello", "-", "[UNK]")); - tokens = tokenizer.tokenize("Hello~[UNK][UNK]"); - assertThat(tokenStrings(tokens), contains("hello", "~", "[UNK]", "[UNK]")); - tokens = tokenizer.tokenize("Hello-[unk]"); - assertThat(tokenStrings(tokens), contains("hello", "-", "[", "unk", "]")); - } - - public void testSplitOnPunctuation() { - var tokens = BasicTokenizer.splitOnPunctuation(new DelimitedToken(0, 3, "hi!")); - assertEquals(new DelimitedToken(0, 2, "hi"), tokens.get(0)); - assertEquals(new DelimitedToken(2, 3, "!"), tokens.get(1)); - - tokens = BasicTokenizer.splitOnPunctuation(makeToken("hi.")); - assertEquals(new DelimitedToken(0, 2, "hi"), tokens.get(0)); - assertEquals(new DelimitedToken(2, 3, "."), tokens.get(1)); - - tokens = BasicTokenizer.splitOnPunctuation(makeToken("!hi")); - assertEquals(new DelimitedToken(0, 1, "!"), tokens.get(0)); - assertEquals(new DelimitedToken(1, 3, "hi"), tokens.get(1)); - - tokens = BasicTokenizer.splitOnPunctuation(makeToken("don't")); - assertEquals(new DelimitedToken(0, 3, "don"), tokens.get(0)); - assertEquals(new DelimitedToken(3, 4, "'"), tokens.get(1)); - assertEquals(new DelimitedToken(4, 5, "t"), tokens.get(2)); - - tokens = BasicTokenizer.splitOnPunctuation(makeToken("!!hi")); - assertEquals(new DelimitedToken(0, 1, "!"), tokens.get(0)); - assertEquals(new DelimitedToken(1, 2, "!"), tokens.get(1)); - assertEquals(new DelimitedToken(2, 4, "hi"), tokens.get(2)); - - tokens = BasicTokenizer.splitOnPunctuation(makeToken("[hi]")); - assertEquals(new DelimitedToken(0, 1, "["), tokens.get(0)); - assertEquals(new DelimitedToken(1, 3, "hi"), tokens.get(1)); - assertEquals(new DelimitedToken(3, 4, "]"), tokens.get(2)); - - tokens = BasicTokenizer.splitOnPunctuation(makeToken("!!")); - assertEquals(new DelimitedToken(0, 1, "!"), tokens.get(0)); - assertEquals(new DelimitedToken(1, 2, "!"), tokens.get(1)); - - tokens = BasicTokenizer.splitOnPunctuation(makeToken("elastic’s")); - assertEquals(new DelimitedToken(0, 7, "elastic"), tokens.get(0)); - assertEquals(new DelimitedToken(7, 8, "’"), tokens.get(1)); - assertEquals(new DelimitedToken(8, 9, "s"), tokens.get(2)); - - tokens = BasicTokenizer.splitOnPunctuation(new DelimitedToken(4, 13, "elastic’s")); - assertEquals(new DelimitedToken(4, 11, "elastic"), tokens.get(0)); - assertEquals(new DelimitedToken(11, 12, "’"), tokens.get(1)); - assertEquals(new DelimitedToken(12, 13, "s"), tokens.get(2)); - } - - public void testStripAccents() { - assertEquals("Hallo", BasicTokenizer.stripAccents("Hällo")); - } - - public void testTokenizeCjkChars() { - assertEquals(" \u535A \u63A8 ", BasicTokenizer.tokenizeCjkChars("\u535A\u63A8")); - - String noCjkChars = "hello"; - assertThat(BasicTokenizer.tokenizeCjkChars(noCjkChars), sameInstance(noCjkChars)); - } - - public void testTokenizeChinese() { - var tokens = new BasicTokenizer().tokenize("ah\u535A\u63A8zz"); - assertThat(tokenStrings(tokens), contains("ah", "\u535A", "\u63A8", "zz")); - } - - public void testCleanText() { - assertEquals("change these chars to spaces", BasicTokenizer.cleanText("change\tthese chars\rto\nspaces")); - assertEquals("filter control chars", BasicTokenizer.cleanText("\u0000filter \uFFFDcontrol chars\u0005")); - } - - public void testIsWhitespace() { - assertTrue(BasicTokenizer.isWhiteSpace(' ')); - assertTrue(BasicTokenizer.isWhiteSpace('\t')); - assertTrue(BasicTokenizer.isWhiteSpace('\r')); - assertTrue(BasicTokenizer.isWhiteSpace('\n')); - assertTrue(BasicTokenizer.isWhiteSpace('\u00A0')); - - assertFalse(BasicTokenizer.isWhiteSpace('_')); - assertFalse(BasicTokenizer.isWhiteSpace('A')); - } - - public void testIsControl() { - assertTrue(BasicTokenizer.isControlChar('\u0005')); - assertTrue(BasicTokenizer.isControlChar('\u001C')); - - assertFalse(BasicTokenizer.isControlChar('A')); - assertFalse(BasicTokenizer.isControlChar(' ')); - assertFalse(BasicTokenizer.isControlChar('\t')); - assertFalse(BasicTokenizer.isControlChar('\r')); - } - - public void testIsPunctuation() { - assertTrue(BasicTokenizer.isPunctuationMark('-')); - assertTrue(BasicTokenizer.isPunctuationMark('$')); - assertTrue(BasicTokenizer.isPunctuationMark('`')); - assertTrue(BasicTokenizer.isPunctuationMark('.')); - assertFalse(BasicTokenizer.isPunctuationMark(' ')); - assertFalse(BasicTokenizer.isPunctuationMark('A')); - assertTrue(BasicTokenizer.isPunctuationMark('[')); - } - - public void testIsCjkChar() { - assertTrue(BasicTokenizer.isCjkChar(0x3400)); - assertFalse(BasicTokenizer.isCjkChar(0x4DC0)); - - assertTrue(BasicTokenizer.isCjkChar(0xF900)); - assertFalse(BasicTokenizer.isCjkChar(0xFB00)); - - assertTrue(BasicTokenizer.isCjkChar(0x20000)); - assertFalse(BasicTokenizer.isCjkChar(0x2A6E0)); - - assertTrue(BasicTokenizer.isCjkChar(0x20000)); - assertFalse(BasicTokenizer.isCjkChar(0x2A6E0)); - - assertTrue(BasicTokenizer.isCjkChar(0x2A700)); - assertFalse(BasicTokenizer.isCjkChar(0x2CEB0)); - - assertTrue(BasicTokenizer.isCjkChar(0x2F800)); - assertFalse(BasicTokenizer.isCjkChar(0x2FA20)); - } - - public void testWhitespaceTokenize() { - { - List delimitedTokens = BasicTokenizer.whiteSpaceTokenize("hello! how are you?"); - assertThat(delimitedTokens, hasSize(4)); - assertThat(tokenStrings(delimitedTokens), contains("hello!", "how", "are", "you?")); - - assertThat(delimitedTokens.get(0), equalTo(new DelimitedToken(0, 6, "hello!"))); - assertThat(delimitedTokens.get(1), equalTo(new DelimitedToken(7, 10, "how"))); - assertThat(delimitedTokens.get(2), equalTo(new DelimitedToken(11, 14, "are"))); - assertThat(delimitedTokens.get(3), equalTo(new DelimitedToken(15, 19, "you?"))); - } - { - List delimitedTokens = BasicTokenizer.whiteSpaceTokenize(" leading whitespace"); - assertThat(delimitedTokens, hasSize(2)); - assertThat(tokenStrings(delimitedTokens), contains("leading", "whitespace")); - - assertThat(delimitedTokens.get(0), equalTo(new DelimitedToken(3, 10, "leading"))); - assertThat(delimitedTokens.get(1), equalTo(new DelimitedToken(11, 21, "whitespace"))); - } - { - List delimitedTokens = BasicTokenizer.whiteSpaceTokenize("double spaced text "); - assertThat(delimitedTokens, hasSize(3)); - assertThat(tokenStrings(delimitedTokens), contains("double", "spaced", "text")); - - assertThat(delimitedTokens.get(0), equalTo(new DelimitedToken(0, 6, "double"))); - assertThat(delimitedTokens.get(1), equalTo(new DelimitedToken(8, 14, "spaced"))); - assertThat(delimitedTokens.get(2), equalTo(new DelimitedToken(16, 20, "text"))); - } - } - - private List tokenStrings(List tokens) { - return tokens.stream().map(DelimitedToken::getToken).collect(Collectors.toList()); - } - - private DelimitedToken makeToken(String str) { - return new DelimitedToken(0, str.length(), str); - } - -} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java index fe44e606887bc..381df2230532e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java @@ -47,97 +47,119 @@ public class BertTokenizerTests extends ESTestCase { BertTokenizer.PAD_TOKEN ); - private List tokenStrings(List tokens) { - return tokens.stream().map(DelimitedToken::getToken).collect(Collectors.toList()); + private List tokenStrings(List tokens) { + return tokens.stream().map(WordPieceTokenFilter.WordPieceToken::toString).collect(Collectors.toList()); } public void testTokenize() { - BertTokenizer tokenizer = BertTokenizer.builder( - TEST_CASED_VOCAB, - new BertTokenization(null, false, null, Tokenization.Truncate.NONE) - ).build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elasticsearch", "fun")); - assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, false, null, Tokenization.Truncate.NONE) + ).build() + ) { + TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.getTokens()), contains("Elastic", "##search", "fun")); + assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + } } public void testTokenizeLargeInputNoTruncation() { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, false, 5, Tokenization.Truncate.NONE)) - .build(); - - ElasticsearchStatusException ex = expectThrows( - ElasticsearchStatusException.class, - () -> tokenizer.tokenize("Elasticsearch fun with Pancake and Godzilla", Tokenization.Truncate.NONE) - ); - assertThat(ex.getMessage(), equalTo("Input too large. The tokenized input length [8] exceeds the maximum sequence length [5]")); - - BertTokenizer specialCharTokenizer = BertTokenizer.builder( - TEST_CASED_VOCAB, - new BertTokenization(null, true, 5, Tokenization.Truncate.NONE) - ).build(); - - // Shouldn't throw - tokenizer.tokenize("Elasticsearch fun with Pancake", Tokenization.Truncate.NONE); - - // Should throw as special chars add two tokens - expectThrows( - ElasticsearchStatusException.class, - () -> specialCharTokenizer.tokenize("Elasticsearch fun with Pancake", Tokenization.Truncate.NONE) - ); + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, false, 5, Tokenization.Truncate.NONE) + ).build(); + BertTokenizer specialCharTokenizer = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, true, 5, Tokenization.Truncate.NONE) + ).build() + ) { + + ElasticsearchStatusException ex = expectThrows( + ElasticsearchStatusException.class, + () -> tokenizer.tokenize("Elasticsearch fun with Pancake and Godzilla", Tokenization.Truncate.NONE) + ); + assertThat(ex.getMessage(), equalTo("Input too large. The tokenized input length [8] exceeds the maximum sequence length [5]")); + + // Shouldn't throw + tokenizer.tokenize("Elasticsearch fun with Pancake", Tokenization.Truncate.NONE); + + // Should throw as special chars add two tokens + expectThrows( + ElasticsearchStatusException.class, + () -> specialCharTokenizer.tokenize("Elasticsearch fun with Pancake", Tokenization.Truncate.NONE) + ); + } + } public void testTokenizeLargeInputTruncation() { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, false, 5, Tokenization.Truncate.FIRST)) - .build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( - "Elasticsearch fun with Pancake and Godzilla", - Tokenization.Truncate.FIRST - ); - assertArrayEquals(new int[] { 0, 1, 3, 18, 17 }, tokenization.getTokenIds()); - - BertTokenizer tokenizerWithSpecialTokens = BertTokenizer.builder( - TEST_CASED_VOCAB, - new BertTokenization(null, true, 5, Tokenization.Truncate.FIRST) - ).build(); - tokenization = tokenizerWithSpecialTokens.tokenize("Elasticsearch fun with Pancake and Godzilla", Tokenization.Truncate.FIRST); - assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.getTokenMap()); + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, false, 5, Tokenization.Truncate.FIRST) + ).build() + ) { + + TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + "Elasticsearch fun with Pancake and Godzilla", + Tokenization.Truncate.FIRST + ); + assertArrayEquals(new int[] { 0, 1, 3, 18, 17 }, tokenization.getTokenIds()); + } + + try ( + BertTokenizer tokenizerWithSpecialTokens = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, true, 5, Tokenization.Truncate.FIRST) + ).build() + ) { + var tokenization = tokenizerWithSpecialTokens.tokenize( + "Elasticsearch fun with Pancake and Godzilla", + Tokenization.Truncate.FIRST + ); + assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.getTokenMap()); + } } public void testTokenizeAppendSpecialTokens() { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()).build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.getTokenMap()); + try (BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()).build()) { + TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.getTokenMap()); + } } public void testNeverSplitTokens() { final String specialToken = "SP001"; - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()) - .setNeverSplit(Collections.singleton(specialToken)) - .setWithSpecialTokens(false) - .build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( - "Elasticsearch " + specialToken + " fun", - Tokenization.Truncate.NONE - ); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elasticsearch", specialToken, "fun")); - assertArrayEquals(new int[] { 0, 1, 15, 3 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2 }, tokenization.getTokenMap()); + try ( + BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()) + .setNeverSplit(Collections.singleton(specialToken)) + .setWithSpecialTokens(false) + .build() + ) { + + TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + "Elasticsearch " + specialToken + " fun", + Tokenization.Truncate.NONE + ); + assertThat(tokenStrings(tokenization.getTokens()), contains("Elastic", "##search", specialToken, "fun")); + assertArrayEquals(new int[] { 0, 1, 15, 3 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2 }, tokenization.getTokenMap()); + } } public void testDoLowerCase() { - { + try ( BertTokenizer tokenizer = BertTokenizer.builder( Arrays.asList("elastic", "##search", "fun", BertTokenizer.UNKNOWN_TOKEN, BertTokenizer.PAD_TOKEN), Tokenization.createDefault() - ).setDoLowerCase(false).setWithSpecialTokens(false).build(); + ).setDoLowerCase(false).setWithSpecialTokens(false).build() + ) { TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); assertArrayEquals(new int[] { 3, 2 }, tokenization.getTokenIds()); @@ -148,195 +170,234 @@ public void testDoLowerCase() { assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); } - { + try ( BertTokenizer tokenizer = BertTokenizer.builder( Arrays.asList("elastic", "##search", "fun", BertTokenizer.UNKNOWN_TOKEN, BertTokenizer.PAD_TOKEN), Tokenization.createDefault() - ).setDoLowerCase(true).setWithSpecialTokens(false).build(); + ).setDoLowerCase(true).setWithSpecialTokens(false).build() + ) { TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); assertArrayEquals(new int[] { 0, 1, 2 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); } } public void testPunctuation() { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()).setWithSpecialTokens(false).build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch, fun.", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elasticsearch", ",", "fun", ".")); - assertArrayEquals(new int[] { 0, 1, 11, 3, 10 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2, 3 }, tokenization.getTokenMap()); - - tokenization = tokenizer.tokenize("Elasticsearch, fun [MASK].", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 0, 1, 11, 3, 14, 10 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.getTokenMap()); + try ( + BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()) + .setWithSpecialTokens(false) + .build() + ) { + TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch, fun.", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.getTokens()), contains("Elastic", "##search", ",", "fun", ".")); + assertArrayEquals(new int[] { 0, 1, 11, 3, 10 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2, 3 }, tokenization.getTokenMap()); + + tokenization = tokenizer.tokenize("Elasticsearch, fun [MASK].", Tokenization.Truncate.NONE); + assertArrayEquals(new int[] { 0, 1, 11, 3, 14, 10 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.getTokenMap()); + } } public void testPunctuationWithMask() { - BertTokenizer tokenizer = BertTokenizer.builder( - List.of( - "[CLS]", - "This", - "is", - "[MASK]", - "-", - "~", - "ta", - "##stic", - "!", - "[SEP]", - "sub", - ",", - ".", - BertTokenizer.UNKNOWN_TOKEN, - BertTokenizer.PAD_TOKEN - ), - Tokenization.createDefault() - ).setWithSpecialTokens(true).setNeverSplit(Set.of("[MASK]")).build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("This is [MASK]-tastic!", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "[MASK]", "-", "tastic", "!")); - assertArrayEquals(new int[] { 0, 1, 2, 3, 4, 6, 7, 8, 9 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 4, 5, -1 }, tokenization.getTokenMap()); - - tokenization = tokenizer.tokenize("This is sub~[MASK]!", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "sub", "~", "[MASK]", "!")); - assertArrayEquals(new int[] { 0, 1, 2, 10, 5, 3, 8, 9 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, -1 }, tokenization.getTokenMap()); - - tokenization = tokenizer.tokenize("This is sub,[MASK].tastic!", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "sub", ",", "[MASK]", ".", "tastic", "!")); - assertArrayEquals(new int[] { 0, 1, 2, 10, 11, 3, 12, 6, 7, 8, 9 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, 6, 6, 7, -1 }, tokenization.getTokenMap()); + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + List.of( + "[CLS]", + "This", + "is", + "[MASK]", + "-", + "~", + "ta", + "##stic", + "!", + "[SEP]", + "sub", + ",", + ".", + BertTokenizer.UNKNOWN_TOKEN, + BertTokenizer.PAD_TOKEN + ), + Tokenization.createDefault() + ).setWithSpecialTokens(true).setNeverSplit(Set.of("[MASK]")).build() + ) { + + TokenizationResult.Tokenization tokenization = tokenizer.tokenize("This is [MASK]-tastic!", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "[MASK]", "-", "ta", "##stic", "!")); + assertArrayEquals(new int[] { 0, 1, 2, 3, 4, 6, 7, 8, 9 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 4, 5, -1 }, tokenization.getTokenMap()); + + tokenization = tokenizer.tokenize("This is sub~[MASK]!", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "sub", "~", "[MASK]", "!")); + assertArrayEquals(new int[] { 0, 1, 2, 10, 5, 3, 8, 9 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, -1 }, tokenization.getTokenMap()); + + tokenization = tokenizer.tokenize("This is sub,[MASK].tastic!", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "sub", ",", "[MASK]", ".", "ta", "##stic", "!")); + assertArrayEquals(new int[] { 0, 1, 2, 10, 11, 3, 12, 6, 7, 8, 9 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, 6, 6, 7, -1 }, tokenization.getTokenMap()); + } } public void testBatchInput() { - BertTokenizer tokenizer = BertTokenizer.builder( - TEST_CASED_VOCAB, - new BertTokenization(null, false, null, Tokenization.Truncate.NONE) - ).build(); - - TokenizationResult tr = tokenizer.buildTokenizationResult( - List.of( - tokenizer.tokenize("Elasticsearch", Tokenization.Truncate.NONE), - tokenizer.tokenize("my little red car", Tokenization.Truncate.NONE), - tokenizer.tokenize("Godzilla day", Tokenization.Truncate.NONE), - tokenizer.tokenize("Godzilla Pancake red car day", Tokenization.Truncate.NONE) - ) - ); - assertThat(tr.getTokenizations(), hasSize(4)); - - TokenizationResult.Tokenization tokenization = tr.getTokenizations().get(0); - assertArrayEquals(new int[] { 0, 1 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0 }, tokenization.getTokenMap()); - - tokenization = tr.getTokenizations().get(1); - assertArrayEquals(new int[] { 4, 5, 6, 7 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 1, 2, 3 }, tokenization.getTokenMap()); - - tokenization = tr.getTokenizations().get(2); - assertArrayEquals(new int[] { 8, 9, 16 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); - - tokenization = tr.getTokenizations().get(3); - assertArrayEquals(new int[] { 8, 9, 17, 6, 7, 16 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.getTokenMap()); + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, false, null, Tokenization.Truncate.NONE) + ).build() + ) { + + TokenizationResult tr = tokenizer.buildTokenizationResult( + List.of( + tokenizer.tokenize("Elasticsearch", Tokenization.Truncate.NONE), + tokenizer.tokenize("my little red car", Tokenization.Truncate.NONE), + tokenizer.tokenize("Godzilla day", Tokenization.Truncate.NONE), + tokenizer.tokenize("Godzilla Pancake red car day", Tokenization.Truncate.NONE) + ) + ); + assertThat(tr.getTokenizations(), hasSize(4)); + + TokenizationResult.Tokenization tokenization = tr.getTokenizations().get(0); + assertArrayEquals(new int[] { 0, 1 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 0, 0 }, tokenization.getTokenMap()); + + tokenization = tr.getTokenizations().get(1); + assertArrayEquals(new int[] { 4, 5, 6, 7 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 0, 1, 2, 3 }, tokenization.getTokenMap()); + + tokenization = tr.getTokenizations().get(2); + assertArrayEquals(new int[] { 8, 9, 16 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + + tokenization = tr.getTokenizations().get(3); + assertArrayEquals(new int[] { 8, 9, 17, 6, 7, 16 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.getTokenMap()); + } } public void testMultiSeqTokenization() { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()) - .setDoLowerCase(false) - .setWithSpecialTokens(true) - .build(); - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( - "Elasticsearch is fun", - "Godzilla my little red car", - Tokenization.Truncate.NONE - ); - - var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); - assertThat( - tokenStream, - contains( - BertTokenizer.CLASS_TOKEN, - "Elastic", - "##search", - "is", - "fun", - BertTokenizer.SEPARATOR_TOKEN, - "God", - "##zilla", - "my", - "little", - "red", - "car", - BertTokenizer.SEPARATOR_TOKEN - ) - ); - assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.getTokenIds()); + try ( + BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()) + .setDoLowerCase(false) + .setWithSpecialTokens(true) + .build() + ) { + TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + "Elasticsearch is fun", + "Godzilla my little red car", + Tokenization.Truncate.NONE + ); + + var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + assertThat( + tokenStream, + contains( + BertTokenizer.CLASS_TOKEN, + "Elastic", + "##search", + "is", + "fun", + BertTokenizer.SEPARATOR_TOKEN, + "God", + "##zilla", + "my", + "little", + "red", + "car", + BertTokenizer.SEPARATOR_TOKEN + ) + ); + assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.getTokenIds()); + } } public void testTokenizeLargeInputMultiSequenceTruncation() { - BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, true, 10, Tokenization.Truncate.FIRST)) - .build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( - "Elasticsearch is fun", - "Godzilla my little red car", - Tokenization.Truncate.FIRST - ); - - var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); - assertThat( - tokenStream, - contains( - BertTokenizer.CLASS_TOKEN, - "Elastic", - BertTokenizer.SEPARATOR_TOKEN, - "God", - "##zilla", - "my", - "little", - "red", - "car", - BertTokenizer.SEPARATOR_TOKEN - ) - ); - - expectThrows( - ElasticsearchStatusException.class, - () -> BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, true, 8, Tokenization.Truncate.NONE)) - .build() - .tokenize("Elasticsearch is fun", "Godzilla my little red car", Tokenization.Truncate.NONE) - ); - - tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, true, 10, Tokenization.Truncate.SECOND)).build(); - - tokenization = tokenizer.tokenize("Elasticsearch is fun", "Godzilla my little red car", Tokenization.Truncate.SECOND); - tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); - assertThat( - tokenStream, - contains( - BertTokenizer.CLASS_TOKEN, - "Elastic", - "##search", - "is", - "fun", - BertTokenizer.SEPARATOR_TOKEN, - "God", - "##zilla", - "my", - BertTokenizer.SEPARATOR_TOKEN - ) - ); + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, true, 10, Tokenization.Truncate.FIRST) + ).build() + ) { + + TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + "Elasticsearch is fun", + "Godzilla my little red car", + Tokenization.Truncate.FIRST + ); + + var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + assertThat( + tokenStream, + contains( + BertTokenizer.CLASS_TOKEN, + "Elastic", + BertTokenizer.SEPARATOR_TOKEN, + "God", + "##zilla", + "my", + "little", + "red", + "car", + BertTokenizer.SEPARATOR_TOKEN + ) + ); + + expectThrows( + ElasticsearchStatusException.class, + () -> BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, true, 8, Tokenization.Truncate.NONE)) + .build() + .tokenize("Elasticsearch is fun", "Godzilla my little red car", Tokenization.Truncate.NONE) + ); + } + + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + TEST_CASED_VOCAB, + new BertTokenization(null, true, 10, Tokenization.Truncate.SECOND) + ).build() + ) { + + TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + "Elasticsearch is fun", + "Godzilla my little red car", + Tokenization.Truncate.SECOND + ); + var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + assertThat( + tokenStream, + contains( + BertTokenizer.CLASS_TOKEN, + "Elastic", + "##search", + "is", + "fun", + BertTokenizer.SEPARATOR_TOKEN, + "God", + "##zilla", + "my", + BertTokenizer.SEPARATOR_TOKEN + ) + ); + } } public void testMultiSeqRequiresSpecialTokens() { - BertTokenizer tokenizer = BertTokenizer.builder( - List.of("foo", BertTokenizer.UNKNOWN_TOKEN, BertTokenizer.PAD_TOKEN, BertTokenizer.CLASS_TOKEN, BertTokenizer.SEPARATOR_TOKEN), - Tokenization.createDefault() - ).setDoLowerCase(false).setWithSpecialTokens(false).build(); - expectThrows(Exception.class, () -> tokenizer.tokenize("foo", "foo", Tokenization.Truncate.NONE)); + try ( + BertTokenizer tokenizer = BertTokenizer.builder( + List.of( + "foo", + BertTokenizer.UNKNOWN_TOKEN, + BertTokenizer.PAD_TOKEN, + BertTokenizer.CLASS_TOKEN, + BertTokenizer.SEPARATOR_TOKEN + ), + Tokenization.createDefault() + ).setDoLowerCase(false).setWithSpecialTokens(false).build() + ) { + expectThrows(Exception.class, () -> tokenizer.tokenize("foo", "foo", Tokenization.Truncate.NONE)); + } } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenTrieNodeTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNodeTests.java similarity index 66% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenTrieNodeTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNodeTests.java index a96d557d36b50..b04cad380af15 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenTrieNodeTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNodeTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.test.ESTestCase; +import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -17,15 +18,15 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class TokenTrieNodeTests extends ESTestCase { +public class CharSeqTokenTrieNodeTests extends ESTestCase { - public void testEmpty() { - TokenTrieNode root = TokenTrieNode.build(Collections.emptyList(), s -> Arrays.asList(s.split(":"))); + public void testEmpty() throws IOException { + CharSeqTokenTrieNode root = CharSeqTokenTrieNode.build(Collections.emptyList(), s -> Arrays.asList(s.split(":"))); assertThat(root.isLeaf(), is(true)); } - public void testTokensWithoutDelimiter() { - TokenTrieNode root = TokenTrieNode.build(List.of("a", "b", "c"), s -> Arrays.asList(s.split(":"))); + public void testTokensWithoutDelimiter() throws IOException { + CharSeqTokenTrieNode root = CharSeqTokenTrieNode.build(List.of("a", "b", "c"), s -> Arrays.asList(s.split(":"))); assertThat(root.isLeaf(), is(false)); assertThat(root.getChild("a").isLeaf(), is(true)); @@ -34,48 +35,51 @@ public void testTokensWithoutDelimiter() { assertThat(root.getChild("d"), is(nullValue())); } - public void testTokensWithDelimiter() { - TokenTrieNode root = TokenTrieNode.build(List.of("aa:bb:cc", "aa:bb:dd", "bb:aa:cc", "bb:bb:cc"), s -> Arrays.asList(s.split(":"))); + public void testTokensWithDelimiter() throws IOException { + CharSeqTokenTrieNode root = CharSeqTokenTrieNode.build( + List.of("aa:bb:cc", "aa:bb:dd", "bb:aa:cc", "bb:bb:cc"), + s -> Arrays.asList(s.split(":")) + ); assertThat(root.isLeaf(), is(false)); // Let's look at the aa branch first { - TokenTrieNode aaNode = root.getChild("aa"); + CharSeqTokenTrieNode aaNode = root.getChild("aa"); assertThat(aaNode, is(notNullValue())); assertThat(aaNode.isLeaf(), is(false)); assertThat(aaNode.getChild("zz"), is(nullValue())); - TokenTrieNode bbNode = aaNode.getChild("bb"); + CharSeqTokenTrieNode bbNode = aaNode.getChild("bb"); assertThat(bbNode, is(notNullValue())); assertThat(bbNode.isLeaf(), is(false)); assertThat(bbNode.getChild("zz"), is(nullValue())); - TokenTrieNode ccNode = bbNode.getChild("cc"); + CharSeqTokenTrieNode ccNode = bbNode.getChild("cc"); assertThat(ccNode, is(notNullValue())); assertThat(ccNode.isLeaf(), is(true)); assertThat(ccNode.getChild("zz"), is(nullValue())); - TokenTrieNode ddNode = bbNode.getChild("dd"); + CharSeqTokenTrieNode ddNode = bbNode.getChild("dd"); assertThat(ddNode, is(notNullValue())); assertThat(ddNode.isLeaf(), is(true)); assertThat(ddNode.getChild("zz"), is(nullValue())); } // Now the bb branch { - TokenTrieNode bbNode = root.getChild("bb"); + CharSeqTokenTrieNode bbNode = root.getChild("bb"); assertThat(bbNode, is(notNullValue())); assertThat(bbNode.isLeaf(), is(false)); assertThat(bbNode.getChild("zz"), is(nullValue())); - TokenTrieNode aaNode = bbNode.getChild("aa"); + CharSeqTokenTrieNode aaNode = bbNode.getChild("aa"); assertThat(aaNode, is(notNullValue())); assertThat(aaNode.isLeaf(), is(false)); assertThat(aaNode.getChild("zz"), is(nullValue())); - TokenTrieNode aaCcNode = aaNode.getChild("cc"); + CharSeqTokenTrieNode aaCcNode = aaNode.getChild("cc"); assertThat(aaCcNode, is(notNullValue())); assertThat(aaCcNode.isLeaf(), is(true)); assertThat(aaCcNode.getChild("zz"), is(nullValue())); - TokenTrieNode bbBbNode = bbNode.getChild("bb"); + CharSeqTokenTrieNode bbBbNode = bbNode.getChild("bb"); assertThat(bbBbNode, is(notNullValue())); assertThat(bbBbNode.isLeaf(), is(false)); assertThat(bbBbNode.getChild("zz"), is(nullValue())); - TokenTrieNode bbCcNode = bbBbNode.getChild("cc"); + CharSeqTokenTrieNode bbCcNode = bbBbNode.getChild("cc"); assertThat(bbCcNode, is(notNullValue())); assertThat(bbCcNode.isLeaf(), is(true)); assertThat(bbCcNode.getChild("zz"), is(nullValue())); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/ControlCharFilterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/ControlCharFilterTests.java new file mode 100644 index 0000000000000..0735ccea569ba --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/ControlCharFilterTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.elasticsearch.test.ESTestCase; + +import java.io.CharArrayReader; +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +public class ControlCharFilterTests extends ESTestCase { + + public void testOnlyControlChars() throws IOException { + ControlCharFilter controlCharFilter = new ControlCharFilter( + new CharArrayReader(new char[] { Character.FORMAT, Character.CONTROL, Character.CONTROL, Character.CONTROL }) + ); + char[] output = new char[10]; + assertThat(controlCharFilter.read(output, 0, 5), equalTo(-1)); + } + + public void testEmpty() throws IOException { + ControlCharFilter controlCharFilter = new ControlCharFilter(new CharArrayReader(new char[] {})); + char[] output = new char[10]; + assertThat(controlCharFilter.read(output, 0, 5), equalTo(-1)); + } + + public void testCorrect() throws IOException { + ControlCharFilter controlCharFilter = new ControlCharFilter( + new CharArrayReader( + new char[] { + Character.FORMAT, + Character.FORMAT, + 'a', + Character.FORMAT, + Character.FORMAT, + 'b', + 'b', + Character.CONTROL, + 'c', + 'c', + Character.CONTROL, + Character.CONTROL } + ) + ); + char[] output = new char[10]; + int read = controlCharFilter.read(output, 0, 10); + assertThat(read, equalTo(5)); + assertThat(new String(output, 0, read), equalTo("abbcc")); + } + + public void testCorrectForLongString() throws IOException { + char[] charArray = new char[2000]; + int i = 0; + for (; i < 1000; i++) { + charArray[i] = 'a'; + } + charArray[i++] = Character.CONTROL; + charArray[i++] = Character.CONTROL; + for (int j = 0; j < 997; j++) { + charArray[i++] = 'a'; + } + charArray[i] = Character.CONTROL; + ControlCharFilter controlCharFilter = new ControlCharFilter(new CharArrayReader(charArray)); + char[] output = new char[2000]; + int read = controlCharFilter.read(output); + assertThat(read, equalTo(1997)); + for (int j = 0; j < read; j++) { + assertEquals('a', output[j]); + } + } + +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java index 43d292fecd4c5..8bec4be872ff0 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java @@ -42,54 +42,59 @@ public class MPNetTokenizerTests extends ESTestCase { MPNetTokenizer.PAD_TOKEN ); - private List tokenStrings(List tokens) { - return tokens.stream().map(DelimitedToken::getToken).collect(Collectors.toList()); + private List tokenStrings(List tokens) { + return tokens.stream().map(WordPieceTokenFilter.WordPieceToken::toString).collect(Collectors.toList()); } public void testTokenize() { - BertTokenizer tokenizer = MPNetTokenizer.mpBuilder( - TEST_CASED_VOCAB, - new MPNetTokenization(null, false, null, Tokenization.Truncate.NONE) - ).build(); - - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elasticsearch", "fun")); - assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + try ( + BertTokenizer tokenizer = MPNetTokenizer.mpBuilder( + TEST_CASED_VOCAB, + new MPNetTokenization(null, false, null, Tokenization.Truncate.NONE) + ).build() + ) { + TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.getTokens()), contains("Elastic", "##search", "fun")); + assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + } } public void testMultiSeqTokenization() { - MPNetTokenizer tokenizer = MPNetTokenizer.mpBuilder( - TEST_CASED_VOCAB, - new MPNetTokenization(null, false, null, Tokenization.Truncate.NONE) - ).setDoLowerCase(false).setWithSpecialTokens(true).build(); - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( - "Elasticsearch is fun", - "Godzilla my little red car", - Tokenization.Truncate.NONE - ); + try ( + MPNetTokenizer tokenizer = MPNetTokenizer.mpBuilder( + TEST_CASED_VOCAB, + new MPNetTokenization(null, false, null, Tokenization.Truncate.NONE) + ).setDoLowerCase(false).setWithSpecialTokens(true).build() + ) { + TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + "Elasticsearch is fun", + "Godzilla my little red car", + Tokenization.Truncate.NONE + ); - var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); - assertThat( - tokenStream, - contains( - MPNetTokenizer.CLASS_TOKEN, - "Elastic", - "##search", - "is", - "fun", - MPNetTokenizer.SEPARATOR_TOKEN, - MPNetTokenizer.SEPARATOR_TOKEN, - "God", - "##zilla", - "my", - "little", - "red", - "car", - MPNetTokenizer.SEPARATOR_TOKEN - ) - ); - assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.getTokenIds()); + var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + assertThat( + tokenStream, + contains( + MPNetTokenizer.CLASS_TOKEN, + "Elastic", + "##search", + "is", + "fun", + MPNetTokenizer.SEPARATOR_TOKEN, + MPNetTokenizer.SEPARATOR_TOKEN, + "God", + "##zilla", + "my", + "little", + "red", + "car", + MPNetTokenizer.SEPARATOR_TOKEN + ) + ); + assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.getTokenIds()); + } } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MultiCharSequenceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MultiCharSequenceTests.java new file mode 100644 index 0000000000000..471cb55518b66 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MultiCharSequenceTests.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class MultiCharSequenceTests extends ESTestCase { + + public void testMultiCharSequence() { + + { + CharSequence sequence = new MultiCharSequence(List.of("This is a test")); + assertThat(sequence.length(), equalTo("This is a test".length())); + assertThat(sequence.charAt(5), equalTo("This is a test".charAt(5))); + assertThat(sequence.subSequence(2, 7).toString(), equalTo("This is a test".subSequence(2, 7).toString())); + } + + { + CharSequence sequence = new MultiCharSequence(List.of("This is a test", "another string")); + assertThat(sequence.length(), equalTo("This is a test".length() + "another string".length())); + assertThat(sequence.charAt(5), equalTo("This is a test".charAt(5))); + assertThat(sequence.charAt(16), equalTo('o')); + assertThat(sequence.subSequence(2, 7).toString(), equalTo("This is a test".subSequence(2, 7).toString())); + assertThat(sequence.subSequence(14, 18).toString(), equalTo("anot")); + assertThat(sequence.subSequence(14, 28).toString(), equalTo("another string")); + assertThat(sequence.subSequence(13, 18).toString(), equalTo("tanot")); + assertThat(sequence.subSequence(13, 15).toString(), equalTo("ta")); + } + + { + CharSequence sequence = new MultiCharSequence(List.of("This is a test", "another string", "almost last")); + assertThat(sequence.length(), equalTo("This is a test".length() + "another string".length() + "almost last".length())); + assertThat(sequence.charAt(5), equalTo("This is a test".charAt(5))); + assertThat(sequence.charAt(16), equalTo('o')); + assertThat(sequence.subSequence(2, 7).toString(), equalTo("This is a test".subSequence(2, 7).toString())); + assertThat(sequence.subSequence(14, 18).toString(), equalTo("anot")); + assertThat(sequence.subSequence(14, 28).toString(), equalTo("another string")); + assertThat(sequence.subSequence(13, 18).toString(), equalTo("tanot")); + assertThat(sequence.subSequence(13, 15).toString(), equalTo("ta")); + assertThat(sequence.subSequence(2, 30).toString(), equalTo("is is a testanother stringal")); + assertThat(sequence.subSequence(30, 35).toString(), equalTo("most ")); + } + + } + +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilterTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilterTests.java new file mode 100644 index 0000000000000..c08602771de93 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilterTests.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.BaseTokenStreamTestCase; +import org.apache.lucene.analysis.core.WhitespaceTokenizer; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.List; + +public class WordPieceTokenFilterTests extends BaseTokenStreamTestCase { + + public static final String UNKNOWN_TOKEN = "[UNK]"; + + public void testTokenize() throws IOException { + List vocab = List.of(UNKNOWN_TOKEN, "[CLS]", "[SEP]", "want", "##want", "##ed", "wa", "un", "runn", "##ing"); + TestNLPAnalyzer analyzer = new TestNLPAnalyzer(vocab, UNKNOWN_TOKEN, 512); + + assertAnalyzesTo(analyzer, "", new String[0]); + assertAnalyzesTo(analyzer, "unwanted", new String[] { "un", "##want", "##ed" }, new int[] { 1, 0, 0 }); + assertAnalyzesTo(analyzer, "running", new String[] { "runn", "##ing" }, new int[] { 1, 0 }); + assertAnalyzesTo(analyzer, "unwantedX", new String[] { "[UNK]" }, new int[] { 1 }); + } + + public void testMaxCharLength() throws IOException { + List vocab = List.of(UNKNOWN_TOKEN, "[CLS]", "[SEP]", "want", "##want", "##ed", "wa", "un", "runn", "##ing", "become"); + TestNLPAnalyzer analyzer = new TestNLPAnalyzer(vocab, UNKNOWN_TOKEN, 4); + + assertAnalyzesTo(analyzer, "become", new String[] { UNKNOWN_TOKEN }, new int[] { 1 }); + } + + static class TestNLPAnalyzer extends Analyzer { + private final List dictionary; + private final String unknownToken; + private final int maxTokenSize; + + TestNLPAnalyzer(List dictionary, String unknownToken, int maxTokenSize) { + this.dictionary = dictionary; + this.unknownToken = unknownToken; + this.maxTokenSize = maxTokenSize; + } + + @Override + protected TokenStreamComponents createComponents(String fieldName) { + try { + WhitespaceTokenizer tokenizer = new WhitespaceTokenizer(512); + WordPieceTokenFilter filter = WordPieceTokenFilter.build( + false, + false, + false, + List.of(), + dictionary, + unknownToken, + maxTokenSize, + tokenizer + ); + return new TokenStreamComponents(tokenizer, filter); + } catch (IOException ex) { + throw new UncheckedIOException(ex); + } + } + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenizerTests.java deleted file mode 100644 index c62df28007eef..0000000000000 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenizerTests.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; - -import org.elasticsearch.test.ESTestCase; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.empty; - -public class WordPieceTokenizerTests extends ESTestCase { - - public static final String UNKNOWN_TOKEN = "[UNK]"; - - public void testTokenize() { - String[] vocab = { UNKNOWN_TOKEN, "[CLS]", "[SEP]", "want", "##want", "##ed", "wa", "un", "runn", "##ing" }; - Map vocabMap = createVocabMap(vocab); - - WordPieceTokenizer tokenizer = new WordPieceTokenizer(vocabMap, UNKNOWN_TOKEN, 100); - - var tokenIds = tokenizer.tokenize(new DelimitedToken(0, 0, "")); - assertThat(tokenIds, empty()); - - tokenIds = tokenizer.tokenize(makeToken("unwanted")); - List tokenStrings = tokenIds.stream().map(index -> vocab[index]).collect(Collectors.toList()); - assertThat(tokenStrings, contains("un", "##want", "##ed")); - - tokenIds = tokenizer.tokenize(makeToken("running")); - tokenStrings = tokenIds.stream().map(index -> vocab[index]).collect(Collectors.toList()); - assertThat(tokenStrings, contains("runn", "##ing")); - - tokenIds = tokenizer.tokenize(makeToken("unwantedX")); - tokenStrings = tokenIds.stream().map(index -> vocab[index]).collect(Collectors.toList()); - assertThat(tokenStrings, contains(UNKNOWN_TOKEN)); - } - - private DelimitedToken makeToken(String str) { - return new DelimitedToken(0, str.length(), str); - } - - public void testMaxCharLength() { - String[] vocab = { "Some", "words", "will", "become", "UNK" }; - Map vocabMap = createVocabMap(vocab); - - WordPieceTokenizer tokenizer = new WordPieceTokenizer(vocabMap, "UNK", 4); - var tokenIds = tokenizer.tokenize(new DelimitedToken(0, 0, "become")); - List tokenStrings = tokenIds.stream().map(index -> vocab[index]).collect(Collectors.toList()); - assertThat(tokenStrings, contains("UNK")); - } - - static Map createVocabMap(String... words) { - Map vocabMap = new HashMap<>(); - for (int i = 0; i < words.length; i++) { - vocabMap.put(words[i], i); - } - return vocabMap; - } -} From a2d82d2c2cba9bc87e2d86b2a54ed61aac262afe Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 9 Feb 2022 13:41:42 -0500 Subject: [PATCH 020/167] Fixing bwc testing version for random_sampler aggregation (#83742) random_sampler aggregation is behind a feature flag and isn't bwc with 8.1. So, bumping 8.2 serialization version to disallow testing to 8.1 --- .../test/search.aggregation/450_random_sampler.yml | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_random_sampler.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_random_sampler.yml index 2699ca14dd4c6..ff2cf8fe8112f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_random_sampler.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/450_random_sampler.yml @@ -32,8 +32,8 @@ setup: "Test random_sampler aggregation with no filter": - skip: features: close_to - version: " - 8.0.99" - reason: added in 8.1.0 + version: " - 8.1.99" + reason: added in 8.2.0 - do: search: index: data @@ -60,8 +60,8 @@ setup: --- "Test random_sampler aggregation with filter": - skip: - version: " - 8.0.99" - reason: added in 8.1.0 + version: " - 8.1.99" + reason: added in 8.2.0 - do: search: index: data @@ -124,8 +124,8 @@ setup: --- "Test random_sampler aggregation with poor settings": - skip: - version: " - 8.0.99" - reason: added in 8.1.0 + version: " - 8.1.99" + reason: added in 8.2.0 - do: catch: /\[probability\] must be between 0 and 1/ search: From 5366db90cc89994311343f2f79cf2301c2fba370 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 9 Feb 2022 13:46:28 -0500 Subject: [PATCH 021/167] Muting test SamplingContextTests#testScaling (#83749) muting test, relating to https://github.com/elastic/elasticsearch/issues/83748 --- .../search/aggregations/support/SamplingContextTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/SamplingContextTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/SamplingContextTests.java index b5e761e9061ce..a6662873c425c 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/SamplingContextTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/SamplingContextTests.java @@ -23,6 +23,7 @@ private static SamplingContext randomContext() { return new SamplingContext(randomDoubleBetween(1e-8, 0.1, false), randomInt()); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/83748") public void testScaling() { for (int i = 0; i < 20; i++) { SamplingContext samplingContext = randomContext(); From 21533cc4308b48dd7fc4514e8ac10fbdefe50073 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Wed, 9 Feb 2022 14:07:36 -0500 Subject: [PATCH 022/167] Speed up merging field-caps response (#83704) - Sort the index responses before merging to avoid sorting them for each field name - Track the number of searchable indices to avoid looping to find the number of non-searchable indices for each field name --- docs/changelog/83704.yaml | 5 + .../action/fieldcaps/FieldCapabilities.java | 116 ++++++++------ .../TransportFieldCapabilitiesAction.java | 28 ++-- .../fieldcaps/FieldCapabilitiesTests.java | 142 ++++++++++++++++++ 4 files changed, 231 insertions(+), 60 deletions(-) create mode 100644 docs/changelog/83704.yaml diff --git a/docs/changelog/83704.yaml b/docs/changelog/83704.yaml new file mode 100644 index 0000000000000..9138548d63fdf --- /dev/null +++ b/docs/changelog/83704.yaml @@ -0,0 +1,5 @@ +pr: 83704 +summary: Speed up merging field-caps response +area: Search +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java index 3ee3f88aad75e..98ecb98791194 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; @@ -476,22 +477,19 @@ static class Builder { private final String name; private final String type; private boolean isMetadataField; - private boolean isSearchable; - private boolean isAggregatable; - private boolean isDimension; + private int searchableIndices = 0; + private int aggregatableIndices = 0; + private int dimensionIndices = 0; private TimeSeriesParams.MetricType metricType; - private boolean metricTypeIsSet; - private List indiceList; - private Map> meta; + private boolean hasConflictMetricType; + private final List indiceList; + private final Map> meta; Builder(String name, String type) { this.name = name; this.type = type; - this.isSearchable = true; - this.isAggregatable = true; - this.isDimension = true; this.metricType = null; - this.metricTypeIsSet = false; + this.hasConflictMetricType = false; this.indiceList = new ArrayList<>(); this.meta = new HashMap<>(); } @@ -508,81 +506,101 @@ void add( TimeSeriesParams.MetricType metricType, Map meta ) { - IndexCaps indexCaps = new IndexCaps(index, search, agg, isDimension, metricType); - indiceList.add(indexCaps); - this.isSearchable &= search; - this.isAggregatable &= agg; + assert indiceList.isEmpty() || indiceList.get(indiceList.size() - 1).name.compareTo(index) < 0 + : "indices aren't sorted; previous [" + indiceList.get(indiceList.size() - 1).name + "], current [" + index + "]"; + if (search) { + searchableIndices++; + } + if (agg) { + aggregatableIndices++; + } + if (isDimension) { + dimensionIndices++; + } this.isMetadataField |= isMetadataField; - this.isDimension &= isDimension; // If we have discrepancy in metric types or in some indices this field is not marked as a metric field - we will // treat is a non-metric field and report this discrepancy in metricConflictsIndices - if (this.metricTypeIsSet) { - if (this.metricType != metricType) { - this.metricType = null; - } - } else { - this.metricTypeIsSet = true; + if (indiceList.isEmpty()) { this.metricType = metricType; + } else if (this.metricType != metricType) { + hasConflictMetricType = true; + this.metricType = null; } + IndexCaps indexCaps = new IndexCaps(index, search, agg, isDimension, metricType); + indiceList.add(indexCaps); for (Map.Entry entry : meta.entrySet()) { this.meta.computeIfAbsent(entry.getKey(), key -> new HashSet<>()).add(entry.getValue()); } } - List getIndices() { - return indiceList.stream().map(c -> c.name).collect(Collectors.toList()); + void getIndices(Collection indices) { + indiceList.forEach(cap -> indices.add(cap.name)); } FieldCapabilities build(boolean withIndices) { final String[] indices; - Collections.sort(indiceList, Comparator.comparing(o -> o.name)); if (withIndices) { indices = indiceList.stream().map(caps -> caps.name).toArray(String[]::new); } else { indices = null; } + // Iff this field is searchable in some indices AND non-searchable in others + // we record the list of non-searchable indices + final boolean isSearchable = searchableIndices == indiceList.size(); final String[] nonSearchableIndices; - if (isSearchable == false && indiceList.stream().anyMatch((caps) -> caps.isSearchable)) { - // Iff this field is searchable in some indices AND non-searchable in others - // we record the list of non-searchable indices - nonSearchableIndices = indiceList.stream() - .filter((caps) -> caps.isSearchable == false) - .map(caps -> caps.name) - .toArray(String[]::new); - } else { + if (isSearchable || searchableIndices == 0) { nonSearchableIndices = null; + } else { + nonSearchableIndices = new String[indiceList.size() - searchableIndices]; + int index = 0; + for (IndexCaps indexCaps : indiceList) { + if (indexCaps.isSearchable == false) { + nonSearchableIndices[index++] = indexCaps.name; + } + } } + // Iff this field is aggregatable in some indices AND non-aggregatable in others + // we keep the list of non-aggregatable indices + final boolean isAggregatable = aggregatableIndices == indiceList.size(); final String[] nonAggregatableIndices; - if (isAggregatable == false && indiceList.stream().anyMatch((caps) -> caps.isAggregatable)) { - // Iff this field is aggregatable in some indices AND non-searchable in others - // we keep the list of non-aggregatable indices - nonAggregatableIndices = indiceList.stream() - .filter((caps) -> caps.isAggregatable == false) - .map(caps -> caps.name) - .toArray(String[]::new); - } else { + if (isAggregatable || aggregatableIndices == 0) { nonAggregatableIndices = null; + } else { + nonAggregatableIndices = new String[indiceList.size() - aggregatableIndices]; + int index = 0; + for (IndexCaps indexCaps : indiceList) { + if (indexCaps.isAggregatable == false) { + nonAggregatableIndices[index++] = indexCaps.name; + } + } } + // Collect all indices that have dimension == false if this field is marked as a dimension in at least one index + final boolean isDimension = dimensionIndices == indiceList.size(); final String[] nonDimensionIndices; - if (isDimension == false && indiceList.stream().anyMatch((caps) -> caps.isDimension)) { - // Collect all indices that have dimension == false if this field is marked as a dimension in at least one index - nonDimensionIndices = indiceList.stream() - .filter((caps) -> caps.isDimension == false) - .map(caps -> caps.name) - .toArray(String[]::new); - } else { + if (isDimension || dimensionIndices == 0) { nonDimensionIndices = null; + } else { + nonDimensionIndices = new String[indiceList.size() - dimensionIndices]; + int index = 0; + for (IndexCaps indexCaps : indiceList) { + if (indexCaps.isDimension == false) { + nonDimensionIndices[index++] = indexCaps.name; + } + } } final String[] metricConflictsIndices; - if (indiceList.stream().anyMatch((caps) -> caps.metricType != metricType)) { + if (hasConflictMetricType) { // Collect all indices that have this field. If it is marked differently in different indices, we cannot really // make a decisions which index is "right" and which index is "wrong" so collecting all indices where this field // is present is probably the only sensible thing to do here - metricConflictsIndices = indiceList.stream().map(caps -> caps.name).toArray(String[]::new); + metricConflictsIndices = Objects.requireNonNullElseGet( + indices, + () -> indiceList.stream().map(caps -> caps.name).toArray(String[]::new) + ); } else { metricConflictsIndices = null; } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index 71e4e19c4de1f..16413339d719d 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -36,8 +36,8 @@ import org.elasticsearch.transport.TransportService; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -219,13 +219,17 @@ private static FieldCapabilitiesRequest prepareRemoteRequest( } private FieldCapabilitiesResponse merge( - Map indexResponses, + Map indexResponsesMap, boolean includeUnmapped, List failures ) { - String[] indices = indexResponses.keySet().stream().sorted().toArray(String[]::new); + final List indexResponses = indexResponsesMap.values() + .stream() + .sorted(Comparator.comparing(FieldCapabilitiesIndexResponse::getIndexName)) + .toList(); + final String[] indices = indexResponses.stream().map(FieldCapabilitiesIndexResponse::getIndexName).toArray(String[]::new); final Map> responseMapBuilder = new HashMap<>(); - for (FieldCapabilitiesIndexResponse response : indexResponses.values()) { + for (FieldCapabilitiesIndexResponse response : indexResponses) { innerMerge(responseMapBuilder, response); } final Map> responseMap = new HashMap<>(); @@ -245,14 +249,16 @@ private FieldCapabilitiesResponse merge( } private void addUnmappedFields(String[] indices, String field, Map typeMap) { - Set unmappedIndices = new HashSet<>(Arrays.asList(indices)); - typeMap.values().forEach((b) -> b.getIndices().forEach(unmappedIndices::remove)); - if (unmappedIndices.isEmpty() == false) { - FieldCapabilities.Builder unmapped = new FieldCapabilities.Builder(field, "unmapped"); - typeMap.put("unmapped", unmapped); - for (String index : unmappedIndices) { - unmapped.add(index, false, false, false, false, null, Collections.emptyMap()); + final Set mappedIndices = new HashSet<>(); + typeMap.values().forEach(t -> t.getIndices(mappedIndices)); + if (mappedIndices.size() != indices.length) { + final FieldCapabilities.Builder unmapped = new FieldCapabilities.Builder(field, "unmapped"); + for (String index : indices) { + if (mappedIndices.contains(index) == false) { + unmapped.add(index, false, false, false, false, null, Collections.emptyMap()); + } } + typeMap.put("unmapped", unmapped); } } diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java index 92d97085a251b..549c643f5c2bc 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesTests.java @@ -9,6 +9,8 @@ package org.elasticsearch.action.fieldcaps; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.iterable.Iterables; +import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; @@ -16,9 +18,15 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.stream.IntStream; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class FieldCapabilitiesTests extends AbstractSerializingTestCase { @@ -158,6 +166,140 @@ public void testBuilder() { } } + public void testRandomBuilder() { + List indices = IntStream.range(0, randomIntBetween(1, 50)) + .mapToObj(n -> String.format(Locale.ROOT, "index_%2d", n)) + .toList(); + Set searchableIndices = new HashSet<>(randomSubsetOf(indices)); + Set aggregatableIndices = new HashSet<>(randomSubsetOf(indices)); + Set dimensionIndices = new HashSet<>(randomSubsetOf(indices)); + FieldCapabilities.Builder builder = new FieldCapabilities.Builder("field", "type"); + for (String index : indices) { + builder.add( + index, + randomBoolean(), + searchableIndices.contains(index), + aggregatableIndices.contains(index), + dimensionIndices.contains(index), + null, + Map.of() + ); + } + FieldCapabilities fieldCaps = builder.build(randomBoolean()); + // search + if (searchableIndices.isEmpty()) { + assertFalse(fieldCaps.isSearchable()); + assertNull(fieldCaps.nonSearchableIndices()); + } else if (searchableIndices.size() == indices.size()) { + assertTrue(fieldCaps.isSearchable()); + assertNull(fieldCaps.nonSearchableIndices()); + } else { + assertFalse(fieldCaps.isSearchable()); + assertThat( + Sets.newHashSet(fieldCaps.nonSearchableIndices()), + equalTo(Sets.difference(Sets.newHashSet(indices), searchableIndices)) + ); + } + // aggregate + if (aggregatableIndices.isEmpty()) { + assertFalse(fieldCaps.isAggregatable()); + assertNull(fieldCaps.nonAggregatableIndices()); + } else if (aggregatableIndices.size() == indices.size()) { + assertTrue(fieldCaps.isAggregatable()); + assertNull(fieldCaps.nonAggregatableIndices()); + } else { + assertFalse(fieldCaps.isAggregatable()); + assertThat( + Sets.newHashSet(fieldCaps.nonAggregatableIndices()), + equalTo(Sets.difference(Sets.newHashSet(indices), aggregatableIndices)) + ); + } + // dimension + if (dimensionIndices.isEmpty()) { + assertFalse(fieldCaps.isDimension()); + assertNull(fieldCaps.nonDimensionIndices()); + } else if (dimensionIndices.size() == indices.size()) { + assertTrue(fieldCaps.isDimension()); + assertNull(fieldCaps.nonDimensionIndices()); + } else { + assertFalse(fieldCaps.isDimension()); + assertThat( + Sets.newHashSet(fieldCaps.nonDimensionIndices()), + equalTo(Sets.difference(Sets.newHashSet(indices), dimensionIndices)) + ); + } + } + + public void testBuilderSingleMetricType() { + List indices = IntStream.range(0, randomIntBetween(1, 50)) + .mapToObj(n -> String.format(Locale.ROOT, "index_%2d", n)) + .toList(); + TimeSeriesParams.MetricType metric = randomBoolean() ? null : randomFrom(TimeSeriesParams.MetricType.values()); + FieldCapabilities.Builder builder = new FieldCapabilities.Builder("field", "type"); + for (String index : indices) { + builder.add(index, randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), metric, Map.of()); + } + FieldCapabilities fieldCaps = builder.build(randomBoolean()); + assertThat(fieldCaps.getMetricType(), equalTo(metric)); + assertNull(fieldCaps.metricConflictsIndices()); + } + + public void testBuilderMixedMetricType() { + List indices = IntStream.range(0, randomIntBetween(1, 50)) + .mapToObj(n -> String.format(Locale.ROOT, "index_%2d", n)) + .toList(); + Map metricTypes = new HashMap<>(); + for (String index : indices) { + if (randomBoolean()) { + metricTypes.put(index, randomFrom(TimeSeriesParams.MetricType.values())); + } + } + FieldCapabilities.Builder builder = new FieldCapabilities.Builder("field", "type"); + for (String index : indices) { + builder.add(index, randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), metricTypes.get(index), Map.of()); + } + FieldCapabilities fieldCaps = builder.build(randomBoolean()); + if (metricTypes.isEmpty()) { + assertNull(fieldCaps.getMetricType()); + assertNull(fieldCaps.metricConflictsIndices()); + } else if (metricTypes.size() == indices.size() && metricTypes.values().size() == 1) { + assertThat(fieldCaps.getMetricType(), equalTo(Iterables.get(metricTypes.values(), 0))); + assertNull(fieldCaps.metricConflictsIndices()); + } else { + assertNull(fieldCaps.getMetricType()); + assertThat(fieldCaps.metricConflictsIndices(), equalTo(indices.toArray(String[]::new))); + } + } + + public void testOutOfOrderIndices() { + FieldCapabilities.Builder builder = new FieldCapabilities.Builder("field", "type"); + int numIndex = randomIntBetween(1, 5); + for (int i = 1; i <= numIndex; i++) { + builder.add( + "index-" + i, + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomFrom(TimeSeriesParams.MetricType.values()), + Map.of() + ); + } + final String outOfOrderIndex = randomBoolean() ? "abc" : "index-" + randomIntBetween(1, numIndex); + AssertionError error = expectThrows(AssertionError.class, () -> { + builder.add( + outOfOrderIndex, + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomBoolean(), + randomFrom(TimeSeriesParams.MetricType.values()), + Map.of() + ); + }); + assertThat(error.getMessage(), containsString("indices aren't sorted")); + } + static FieldCapabilities randomFieldCaps(String fieldName) { String[] indices = null; if (randomBoolean()) { From 4e28da43b9c26a1dfea5ddc28294b0ed69728dbf Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Wed, 9 Feb 2022 11:20:14 -0800 Subject: [PATCH 023/167] Avoid eagerly loading StoredFieldsReader in fetch phase (#83693) Every time we create a hit document, we create a new SourceLookup and call setSegmentAndDocument. This in turn creates a new StoredFieldsReader, which is pretty expensive. In scenarios where you are retrieving a lot of hits, this can add significant overhead. Prior to version 7.11, we did not create a new SourceLookup per hit, so this is a performance regression. This PR updates setSegmentAndDocument to avoid eagerly creating a new StoredFieldsReader (through StoredFieldsReader#getMergeInstance). --- docs/changelog/83693.yaml | 6 ++ .../elasticsearch/core/MemoizedSupplier.java | 29 +++++++ .../search/lookup/SourceLookup.java | 23 ++++-- .../search/lookup/SourceLookupTests.java | 78 +++++++++++++++++++ 4 files changed, 128 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/83693.yaml create mode 100644 libs/core/src/main/java/org/elasticsearch/core/MemoizedSupplier.java create mode 100644 server/src/test/java/org/elasticsearch/search/lookup/SourceLookupTests.java diff --git a/docs/changelog/83693.yaml b/docs/changelog/83693.yaml new file mode 100644 index 0000000000000..f6b64ea3b2609 --- /dev/null +++ b/docs/changelog/83693.yaml @@ -0,0 +1,6 @@ +pr: 83693 +summary: Avoid eagerly loading `StoredFieldsReader` in fetch phase +area: Search +type: bug +issues: + - 82777 diff --git a/libs/core/src/main/java/org/elasticsearch/core/MemoizedSupplier.java b/libs/core/src/main/java/org/elasticsearch/core/MemoizedSupplier.java new file mode 100644 index 0000000000000..5167c5759567c --- /dev/null +++ b/libs/core/src/main/java/org/elasticsearch/core/MemoizedSupplier.java @@ -0,0 +1,29 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.core; + +import java.util.function.Supplier; + +public class MemoizedSupplier implements Supplier { + private Supplier supplier; + private T value; + + public MemoizedSupplier(Supplier supplier) { + this.supplier = supplier; + } + + @Override + public T get() { + if (supplier != null) { + value = supplier.get(); + supplier = null; + } + return value; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java b/server/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java index c80276696c63f..7b3eb0120c271 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SourceLookup.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.search.lookup; +import org.apache.lucene.codecs.StoredFieldsReader; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.elasticsearch.ElasticsearchParseException; @@ -15,6 +16,7 @@ import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.MemoizedSupplier; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.fieldvisitor.FieldsVisitor; @@ -26,13 +28,14 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Supplier; import static java.util.Collections.emptyMap; public class SourceLookup implements Map { private LeafReader reader; - CheckedBiConsumer fieldReader; + private CheckedBiConsumer fieldReader; private int docId = -1; @@ -104,19 +107,23 @@ public static Map sourceAsMap(BytesReference source) throws Elas } public void setSegmentAndDocument(LeafReaderContext context, int docId) { + // if we are called with the same document, don't invalidate source if (this.reader == context.reader() && this.docId == docId) { - // if we are called with the same document, don't invalidate source return; } + + // only reset reader and fieldReader when reader changes if (this.reader != context.reader()) { this.reader = context.reader(); - // only reset reader and fieldReader when reader changes + + // All the docs to fetch are adjacent but Lucene stored fields are optimized + // for random access and don't optimize for sequential access - except for merging. + // So we do a little hack here and pretend we're going to do merges in order to + // get better sequential access. if (context.reader()instanceof SequentialStoredFieldsLeafReader lf) { - // All the docs to fetch are adjacent but Lucene stored fields are optimized - // for random access and don't optimize for sequential access - except for merging. - // So we do a little hack here and pretend we're going to do merges in order to - // get better sequential access. - fieldReader = lf.getSequentialStoredFieldsReader()::visitDocument; + // Avoid eagerly loading the stored fields reader, since this can be expensive + Supplier supplier = new MemoizedSupplier<>(lf::getSequentialStoredFieldsReader); + fieldReader = (d, v) -> supplier.get().visitDocument(d, v); } else { fieldReader = context.reader()::document; } diff --git a/server/src/test/java/org/elasticsearch/search/lookup/SourceLookupTests.java b/server/src/test/java/org/elasticsearch/search/lookup/SourceLookupTests.java new file mode 100644 index 0000000000000..21ec0d4584f91 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/lookup/SourceLookupTests.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.lookup; + +import org.apache.lucene.codecs.StoredFieldsReader; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.StringField; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.store.Directory; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentFactory; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; + +public class SourceLookupTests extends ESTestCase { + + public void testSetSegmentAndDocument() throws IOException { + try (Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir)) { + Document doc = new Document(); + doc.add(new StringField("field", "value", Field.Store.YES)); + iw.addDocument(doc); + + try (IndexReader reader = iw.getReader()) { + LeafReaderContext readerContext = reader.leaves().get(0); + + SourceLookup sourceLookup = new SourceLookup(); + sourceLookup.setSegmentAndDocument(readerContext, 42); + sourceLookup.setSource( + BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("field", "value").endObject()) + ); + assertNotNull(sourceLookup.internalSourceRef()); + + // Source should be preserved if we pass in the same reader and document + sourceLookup.setSegmentAndDocument(readerContext, 42); + assertNotNull(sourceLookup.internalSourceRef()); + + // Check that the stored fields reader is not loaded eagerly + LeafReader throwingReader = new SequentialStoredFieldsLeafReader(readerContext.reader()) { + @Override + protected StoredFieldsReader doGetSequentialStoredFieldsReader(StoredFieldsReader reader) { + throw new UnsupportedOperationException("attempted to load stored fields reader"); + } + + @Override + public CacheHelper getReaderCacheHelper() { + return in.getReaderCacheHelper(); + } + + @Override + public CacheHelper getCoreCacheHelper() { + return in.getCoreCacheHelper(); + } + }; + + sourceLookup.setSegmentAndDocument(throwingReader.getContext(), 0); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, sourceLookup::source); + assertThat(e.getCause(), instanceOf(UnsupportedOperationException.class)); + assertThat(e.getCause().getMessage(), containsString("attempted to load stored fields reader")); + } + } + } +} From 104efd434326a0ed2adcf76d2aa987451b862b09 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 9 Feb 2022 13:44:13 -0800 Subject: [PATCH 024/167] [DOCS] Minor edits to trained model APIs (#81549) --- .../ml/trained-models/apis/put-trained-models.asciidoc | 3 +-- .../apis/stop-trained-model-deployment.asciidoc | 4 ++-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc index 9ea0d2220d349..1363e179f6e1f 100644 --- a/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc +++ b/docs/reference/ml/trained-models/apis/put-trained-models.asciidoc @@ -33,7 +33,6 @@ Requires the `manage_ml` cluster privilege. This privilege is included in the The create trained model API enables you to supply a trained model that is not created by {dfanalytics}. - [[ml-put-trained-models-path-params]] == {api-path-parms-title} @@ -893,7 +892,7 @@ Appropriate types are: * `tree_ensemble`: The model definition is an ensemble model of decision trees. * `lang_ident`: A special type reserved for language identification models. * `pytorch`: The stored definition is a PyTorch (specifically a TorchScript) model. Currently only -NLP models are supported. +NLP models are supported. For more information, refer to {ml-docs}/ml-nlp.html[{nlp-cap}]. -- `tags`:: diff --git a/docs/reference/ml/trained-models/apis/stop-trained-model-deployment.asciidoc b/docs/reference/ml/trained-models/apis/stop-trained-model-deployment.asciidoc index c3a17da0c5322..b2310a35b1f06 100644 --- a/docs/reference/ml/trained-models/apis/stop-trained-model-deployment.asciidoc +++ b/docs/reference/ml/trained-models/apis/stop-trained-model-deployment.asciidoc @@ -21,10 +21,10 @@ Stops a trained model deployment. Requires the `manage_ml` cluster privilege. This privilege is included in the `machine_learning_admin` built-in role. -//// [[stop-trained-model-deployment-desc]] == {api-description-title} -//// + +Deployment is required only for trained models that have a PyTorch `model_type`. [[stop-trained-model-deployment-path-params]] == {api-path-parms-title} From 3d2c33307c608ee4ea016c1824561e75da4a6b81 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 9 Feb 2022 17:34:54 -0500 Subject: [PATCH 025/167] TSDB: unicode dimensions (#83681) This fixes dimensions with unicode characters in their name. It also make the sort order of these dimensions something we can explain: convert all dimensions to utf-8 and sort by that. --- .../index/mapper/LuceneDocument.java | 9 +++--- .../index/mapper/TimeSeriesIdFieldMapper.java | 28 +++++++++++-------- .../elasticsearch/search/DocValueFormat.java | 4 +-- .../mapper/TimeSeriesIdFieldMapperTests.java | 19 +++++++++++++ .../timeseries/TimeSeriesAggregatorTests.java | 5 ++-- 5 files changed, 45 insertions(+), 20 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LuceneDocument.java b/server/src/main/java/org/elasticsearch/index/mapper/LuceneDocument.java index 3cb2b030ebeff..de3c08653ff0b 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LuceneDocument.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LuceneDocument.java @@ -36,7 +36,7 @@ public class LuceneDocument implements Iterable { * for generating the _tsid field. The map will be used by {@link TimeSeriesIdFieldMapper} * to build the _tsid field for the document. */ - private SortedMap dimensionBytes; + private SortedMap dimensionBytes; LuceneDocument(String path, LuceneDocument parent) { fields = new ArrayList<>(); @@ -114,16 +114,17 @@ public IndexableField getByKey(Object key) { * to build the _tsid field for the document. */ public void addDimensionBytes(String fieldName, BytesReference tsidBytes) { + BytesRef fieldNameBytes = new BytesRef(fieldName); if (dimensionBytes == null) { // It is a {@link TreeMap} so that it is order by field name. dimensionBytes = new TreeMap<>(); - } else if (dimensionBytes.containsKey(fieldName)) { + } else if (dimensionBytes.containsKey(fieldNameBytes)) { throw new IllegalArgumentException("Dimension field [" + fieldName + "] cannot be a multi-valued field."); } - dimensionBytes.put(fieldName, tsidBytes); + dimensionBytes.put(fieldNameBytes, tsidBytes); } - public SortedMap getDimensionBytes() { + public SortedMap getDimensionBytes() { if (dimensionBytes == null) { return Collections.emptySortedMap(); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java index 160b3732d03d3..4ccf0e28f734d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java @@ -33,6 +33,7 @@ import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.SortedMap; import java.util.function.Supplier; @@ -141,12 +142,12 @@ public void postParse(DocumentParserContext context) throws IOException { assert fieldType().isIndexed() == false; // SortedMap is expected to be sorted by key (field name) - SortedMap dimensionFields = context.doc().getDimensionBytes(); + SortedMap dimensionFields = context.doc().getDimensionBytes(); BytesReference timeSeriesId = buildTsidField(dimensionFields); context.doc().add(new SortedDocValuesField(fieldType().name(), timeSeriesId.toBytesRef())); } - public static BytesReference buildTsidField(SortedMap dimensionFields) throws IOException { + public static BytesReference buildTsidField(SortedMap dimensionFields) throws IOException { if (dimensionFields == null || dimensionFields.isEmpty()) { throw new IllegalArgumentException("Dimension fields are missing."); } @@ -166,19 +167,22 @@ protected String contentType() { return CONTENT_TYPE; } - public static void encodeTsid(StreamOutput out, SortedMap dimensionFields) throws IOException { + public static void encodeTsid(StreamOutput out, SortedMap dimensionFields) throws IOException { out.writeVInt(dimensionFields.size()); - for (Map.Entry entry : dimensionFields.entrySet()) { - String fieldName = entry.getKey(); - BytesRef fieldNameBytes = new BytesRef(fieldName); - int len = fieldNameBytes.length; - if (len > DIMENSION_NAME_LIMIT) { + for (Map.Entry entry : dimensionFields.entrySet()) { + BytesRef fieldName = entry.getKey(); + if (fieldName.length > DIMENSION_NAME_LIMIT) { throw new IllegalArgumentException( - "Dimension name must be less than [" + DIMENSION_NAME_LIMIT + "] bytes but [" + fieldName + "] was [" + len + "]." + String.format( + Locale.ROOT, + "Dimension name must be less than [%d] bytes but [%s] was [%s].", + DIMENSION_NAME_LIMIT, + fieldName.utf8ToString(), + fieldName.length + ) ); } - // Write field name in utf-8 instead of writeString's utf-16-ish thing - out.writeBytesRef(fieldNameBytes); + out.writeBytesRef(fieldName); entry.getValue().writeTo(out); } @@ -193,7 +197,7 @@ public static Map decodeTsid(StreamInput in) { Map result = new LinkedHashMap(size); for (int i = 0; i < size; i++) { - String name = in.readString(); + String name = in.readBytesRef().utf8ToString(); int type = in.read(); switch (type) { diff --git a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java index 1a41a4398536d..8cd8cee8aaf19 100644 --- a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -706,9 +706,9 @@ public BytesRef parseBytesRef(Object value) { } Map m = (Map) value; - SortedMap dimensionFields = new TreeMap<>(); + SortedMap dimensionFields = new TreeMap<>(); for (Map.Entry entry : m.entrySet()) { - String k = (String) entry.getKey(); + BytesRef k = new BytesRef(entry.getKey().toString()); Object v = entry.getValue(); BytesReference bytes; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java index c8849fb9c7415..633ba8ef30efe 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapperTests.java @@ -17,6 +17,8 @@ import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.List; +import java.util.Map; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; @@ -123,6 +125,23 @@ public void testStrings() throws IOException { ); } + public void testUnicodeKeys() throws IOException { + String fire = new String(new int[] { 0x1F525 }, 0, 1); + String coffee = "\u2615"; + DocumentMapper docMapper = createDocumentMapper("a", mapping(b -> { + b.startObject(fire).field("type", "keyword").field("time_series_dimension", true).endObject(); + b.startObject(coffee).field("type", "keyword").field("time_series_dimension", true).endObject(); + })); + + ParsedDocument doc = parseDocument(docMapper, b -> b.field(fire, "hot").field(coffee, "good")); + Map tsid = TimeSeriesIdFieldMapper.decodeTsid( + new ByteArrayStreamInput(doc.rootDoc().getBinaryValue("_tsid").bytes) + ); + assertMap(tsid, matchesMap().entry(coffee, "good").entry(fire, "hot")); + // Also make sure the keys are in order + assertThat(List.copyOf(tsid.keySet()), equalTo(List.of(coffee, fire))); + } + public void testKeywordTooLong() throws IOException { DocumentMapper docMapper = createDocumentMapper( "a", diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java index 93bba946c17bd..41bd15a4e075a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java @@ -17,6 +17,7 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.core.CheckedConsumer; @@ -80,7 +81,7 @@ public void testStandAloneTimeSeriesWithSum() throws IOException { public static void writeTS(RandomIndexWriter iw, long timestamp, Object[] dimensions, Object[] metrics) throws IOException { final List fields = new ArrayList<>(); fields.add(new SortedNumericDocValuesField(DataStreamTimestampFieldMapper.DEFAULT_PATH, timestamp)); - final SortedMap dimensionFields = new TreeMap<>(); + final SortedMap dimensionFields = new TreeMap<>(); for (int i = 0; i < dimensions.length; i += 2) { final BytesReference reference; if (dimensions[i + 1] instanceof Number) { @@ -88,7 +89,7 @@ public static void writeTS(RandomIndexWriter iw, long timestamp, Object[] dimens } else { reference = TimeSeriesIdFieldMapper.encodeTsidValue(dimensions[i + 1].toString()); } - dimensionFields.put(dimensions[i].toString(), reference); + dimensionFields.put(new BytesRef(dimensions[i].toString()), reference); } for (int i = 0; i < metrics.length; i += 2) { if (metrics[i + 1] instanceof Integer || metrics[i + 1] instanceof Long) { From 3b6e223ff194c4d6e7cc8ddb9986551a6f99f831 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 9 Feb 2022 16:41:18 -0600 Subject: [PATCH 026/167] Test: use regex in index template warnings yaml test (#83737) Fixes: #82502 --- .../rest-api-spec/test/lang_mustache/60_typed_keys.yml | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml index 0ffb7640c1976..9bd58d554d0c3 100644 --- a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml +++ b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/60_typed_keys.yml @@ -1,8 +1,6 @@ setup: - skip: - version: all - reason: 'AwaitsFix: https://github.com/elastic/elasticsearch/issues/82502' - features: allowed_warnings + features: allowed_warnings_regex - do: indices.put_template: name: index_template @@ -22,8 +20,8 @@ setup: type: keyword - do: - allowed_warnings: - - "index [test-0] matches multiple legacy templates [global, index_template], composable templates will only match a single template" + allowed_warnings_regex: + - "index \\[test-[0-2]\\] matches multiple legacy templates \\[global, index_template\\], composable templates will only match a single template" bulk: refresh: true body: From 23be6d3e016ed2f0044db4517b191dcd76421d05 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Thu, 10 Feb 2022 10:00:16 +1100 Subject: [PATCH 027/167] User Profile - Beta docs for APIs (#83495) This PR adds beta docs for the 3 existing APIs: * Activate Profile * Get Profile by ID * Update Profile Data Co-authored-by: Adam Locke --- x-pack/docs/build.gradle | 7 + x-pack/docs/en/rest-api/security.asciidoc | 14 +- .../security/activate-user-profile.asciidoc | 125 ++++++++++++- .../security/get-user-profile.asciidoc | 131 +++++++++++++- .../update-user-profile-data.asciidoc | 168 +++++++++++++++++- 5 files changed, 436 insertions(+), 9 deletions(-) diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 1486ec7c8afa6..8da90eb9e7bfd 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.internal.info.BuildParams + apply plugin: 'elasticsearch.docs-test' apply plugin: 'elasticsearch.rest-resources' @@ -60,6 +62,11 @@ testClusters.matching { it.name == "integTest" }.configureEach { setting 'xpack.security.authc.realms.saml.saml1.sp.acs', 'https://kibana.org/api/security/saml/callback' setting 'xpack.security.authc.realms.saml.saml1.attributes.principal', 'uid' setting 'xpack.security.authc.realms.saml.saml1.attributes.name', 'urn:oid:2.5.4.3' + + if (BuildParams.isSnapshotBuild() == false) { + systemProperty 'es.user_profile_feature_flag_enabled', 'true' + } + user username: 'test_admin' } diff --git a/x-pack/docs/en/rest-api/security.asciidoc b/x-pack/docs/en/rest-api/security.asciidoc index e3e4a47f17b32..78f4f3db00cd9 100644 --- a/x-pack/docs/en/rest-api/security.asciidoc +++ b/x-pack/docs/en/rest-api/security.asciidoc @@ -132,7 +132,18 @@ communicate with a secured {es} cluster. * <> * <> +[discrete] +[[security-user-profile-apis]] +=== User Profile + +Use the following APIs to retrieve and manage user profiles. + +* <> +* <> +* <> + +include::security/activate-user-profile.asciidoc[] include::security/authenticate.asciidoc[] include::security/change-password.asciidoc[] include::security/clear-cache.asciidoc[] @@ -165,6 +176,7 @@ include::security/get-service-accounts.asciidoc[] include::security/get-service-credentials.asciidoc[] include::security/get-tokens.asciidoc[] include::security/get-user-privileges.asciidoc[] +include::security/get-user-profile.asciidoc[] include::security/get-users.asciidoc[] include::security/grant-api-keys.asciidoc[] include::security/has-privileges.asciidoc[] @@ -181,6 +193,4 @@ include::security/saml-invalidate-api.asciidoc[] include::security/saml-complete-logout-api.asciidoc[] include::security/saml-sp-metadata.asciidoc[] include::security/ssl.asciidoc[] -include::security/get-user-profile.asciidoc[] -include::security/activate-user-profile.asciidoc[] include::security/update-user-profile-data.asciidoc[] diff --git a/x-pack/docs/en/rest-api/security/activate-user-profile.asciidoc b/x-pack/docs/en/rest-api/security/activate-user-profile.asciidoc index 15a117b76a036..9e825e2e2ad9b 100644 --- a/x-pack/docs/en/rest-api/security/activate-user-profile.asciidoc +++ b/x-pack/docs/en/rest-api/security/activate-user-profile.asciidoc @@ -1,10 +1,131 @@ [role="xpack"] [[security-api-activiate-user-profile]] === Activate user profile API + +beta::[] + ++++ Activate user profile ++++ -Creates or updates the user profile on behalf of another user +Creates or updates a user profile on behalf of another user. + +[[security-api-activate-user-profile-request]] +==== {api-request-title} + +`POST /_security/profile/_activate` + +[[security-api-activate-user-profile-prereqs]] +==== {api-prereq-title} + +* To use this API, you must have the `manage_user_profile` cluster privilege. + +[[security-api-activate-user-profile-desc]] +==== {api-description-title} + +The activate user profile API creates or updates a profile document for end +users with information that is extracted from the user's authentication object, +including `username`, `full_name`, `roles`, and the authentication realm. + +When updating a profile document, the API enables the document if it was +disabled. Any updates do not change existing content for either the `access` or +`data` fields. + +This API is intended only for use by applications (such as {kib}) that need to +create or update profiles for end users. + +IMPORTANT: The calling application must have either an `access_token`, or a +combination of `username` and `password` for the user that the profile document +is intended for. + +[role="child_attributes"] +[[security-api-activate-user-profile-request-body]] +==== {api-request-body-title} + +`access_token`:: +(Required*, string) +The user's access token. If you specify the `access_token` grant type, this +parameter is required. It is not valid with other grant types. + +`grant_type`:: +(Required, string) +The type of grant. ++ +.Valid values for `grant_type` +[%collapsible%open] +==== +`access_token`:: +(Required*, string) +In this type of grant, you must supply an access token that was created by the +{es} token service. For more information, see +<> and <>. + +`password`:: +(Required*, string) +In this type of grant, you must supply the `username` and `password` for the +user that you want to create the API key for. +==== + +`password`:: +(Optional*, string) +The user's password. If you specify the `password` grant type, this parameter is +required. It is not valid with other grant types. + +`username`:: +(Optional*, string) +The username that identifies the user. If you specify the `password` grant type, +this parameter is required. It is not valid with other grant types. + +*Indicates that the setting is required in some, but not all situations. + +[[security-api-activate-user-profile-response-body]] +==== {api-response-body-title} + +A successful activate user profile API call returns a JSON structure that contains +the profile unique ID, user information, timestamp for the operation and version +control numbers. + +[[security-api-activate-user-profile-example]] +==== {api-examples-title} + +[source,console] +---- +POST /_security/profile/_activate +{ + "grant_type": "password", + "username" : "jacknich", + "password" : "l0ng-r4nd0m-p@ssw0rd" +} +---- +// TEST[setup:jacknich_user] + +The API returns the following response: -coming::[8.2.0] +[source,console-result] +---- +{ + "uid": "u_kd2JMqwUQwSCCOxMv7M1vw", + "enabled": true, + "last_synchronized": 1642650651037, + "user": { + "username": "jacknich", + "roles": [ + "admin", "other_role1" + ], + "realm_name": "native", + "full_name": "Jack Nicholson", + "email": "jacknich@example.com", + "active": true + }, + "access": {}, + "data": {}, + "_doc": { + "_primary_term": 88, + "_seq_no": 66 + } +} +---- +// TESTRESPONSE[s/u_kd2JMqwUQwSCCOxMv7M1vw/$body.uid/] +// TESTRESPONSE[s/1642650651037/$body.last_synchronized/] +// TESTRESPONSE[s/88/$body._doc._primary_term/] +// TESTRESPONSE[s/66/$body._doc._seq_no/] diff --git a/x-pack/docs/en/rest-api/security/get-user-profile.asciidoc b/x-pack/docs/en/rest-api/security/get-user-profile.asciidoc index dc081c19b4f2c..2fbefe439eadd 100644 --- a/x-pack/docs/en/rest-api/security/get-user-profile.asciidoc +++ b/x-pack/docs/en/rest-api/security/get-user-profile.asciidoc @@ -1,10 +1,137 @@ [role="xpack"] [[security-api-get-user-profile]] === Get user profile API + +beta::[] + ++++ Get user profile ++++ -Retrieves a user's profile given the unique profile ID +Retrieves a user's profile using the unique profile ID. + +[[security-api-get-user-profile-request]] +==== {api-request-title} + +`GET /_security/profile/` + +[[security-api-get-user-profile-prereqs]] +==== {api-prereq-title} + +* To use this API, you must have _at least_ the `manage_user_profile` cluster privilege. + + +[[security-api-get-user-profile-desc]] +==== {api-description-title} + +The get user profile API returns the user profile document matching a specified +`uid`, which is generated when +<>. + +[[security-api-get-user-profile-path-params]] +==== {api-path-parms-title} + +`uid`:: +(Required, string) A unique identifier for the user profile. + +[[security-api-get-user-profile-query-params]] +==== {api-query-parms-title} + +`data`:: +(Optional, string) Comma-separated list of filters for the `data` field of +the profile document. To return all content, use `data=*`. To return a +subset of content, use `data=` to retrieve the content nested under the +specified ``. Defaults to returning no content. + +[[security-api-get-user-profile-response-body]] +==== {api-response-body-title} + +A successful call returns the JSON representation of the user profile +and its internal versioning numbers. The API returns an empty object +if no profile document is found for the provided `uid`. +The content of the `data` field is not returned by default to avoid deserializing +a potential large payload. + +[[security-api-get-user-profile-example]] +==== {api-examples-title} + +[source,console] +---- +GET /_security/profile/u_kd2JMqwUQwSCCOxMv7M1vw +---- +// TEST[skip:uid is random and no way to ensure this uid exists] + +The API returns the following response for a `uid` matching `u_kd2JMqwUQwSCCOxMv7M1vw`: + +[source,js] +---- +{ + "u_kd2JMqwUQwSCCOxMv7M1vw": { + "uid": "u_kd2JMqwUQwSCCOxMv7M1vw", + "enabled": true, + "last_synchronized": 1642650651037, + "user": { + "username": "jacknich", + "roles": [ + "admin", "other_role1" + ], + "realm_name": "native1", + "full_name": "Jack Nicholson", + "email": "jacknich@example.com", + "active": true + }, + "access": {}, + "data": {}, <1> + "_doc": { + "_primary_term": 1, + "_seq_no": 0 + } + } +} +---- +// NOTCONSOLE +// Besides the uid being random, the response cannot be compared against due to +// the last_synchronized and _doc fields being unpredictable. + +<1> No content is returned in the `data` field by default. + +The following request retrieves a subset of `data` that's nested under the +key `app1`, along with the user's profile: + +[source,console] +---- +GET /_security/profile/u_kd2JMqwUQwSCCOxMv7M1vw?data=app1.key1 +---- +// TEST[skip:uid is random and no way to ensure this uid exists] -coming::[8.2.0] +[source,js] +---- +{ + "u_kd2JMqwUQwSCCOxMv7M1vw": { + "uid": "u_kd2JMqwUQwSCCOxMv7M1vw", + "enabled": true, + "last_synchronized": 1642650651037, + "user": { + "username": "jacknich", + "roles": [ + "admin", "other_role1" + ], + "realm_name": "native1", + "full_name": "Jack Nicholson", + "email": "jacknich@example.com", + "active": true + }, + "access": {}, + "data": { + "app1": { + "key1": "value1" + } + }, + "_doc": { + "_primary_term": 1, + "_seq_no": 0 + } + } +} +---- +// NOTCONSOLE diff --git a/x-pack/docs/en/rest-api/security/update-user-profile-data.asciidoc b/x-pack/docs/en/rest-api/security/update-user-profile-data.asciidoc index 40946cd6196ab..f0127339aa7ef 100644 --- a/x-pack/docs/en/rest-api/security/update-user-profile-data.asciidoc +++ b/x-pack/docs/en/rest-api/security/update-user-profile-data.asciidoc @@ -1,10 +1,172 @@ [role="xpack"] [[security-api-update-user-profile-data]] -=== Activate user profile data API +=== Update user profile data API + +beta::[] + ++++ Update user profile data ++++ -Update application specific data for the user profile of the given unique ID. +Updates specific data for the user profile that's associated with the specified +unique ID. + +[[security-api-update-user-profile-data-request]] +==== {api-request-title} + +`POST /_security/profile/_data/` + +[[security-api-update-user-profile-data-prereqs]] +==== {api-prereq-title} + +To use this API, you must have one of the following privileges: + +* The `manage_user_profile` cluster privilege. +* The `update_profile_data` global privilege for the namespaces that are +referenced in the request. + +[[security-api-update-user-profile-data-desc]] +==== {api-description-title} + +The update user profile API updates the `access` and `data` fields of an +existing user profile document with JSON objects. New keys and their values are +added to the profile document, and conflicting keys are replaced by data that's +included in the request. + +For both `access` and `data`, content is namespaced by the top-level fields. +The `update_profile_data` global privilege grants privileges for updating only +the allowed namespaces. + +[[security-api-update-user-profile-data-path-params]] +==== {api-path-parms-title} + +`uid`:: +(Required, string) A unique identifier for the user profile. + +[[security-api-update-user-profile-data-query-params]] +==== {api-query-parms-title} + +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=if_seq_no] + +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=if_primary_term] + +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=refresh] + +`uid`:: +(Required, string) A unique identifier for the user profile. + +[[security-api-update-user-profile-data-request-body]] +==== {api-request-body-title} + +`access`:: +(Required*, object) +Searchable data that you want to associate with the user profile. +This field supports a nested data structure. Within the `access` object, +top-level keys cannot begin with an underscore (`_`) or contain a period (`.`). + +`data`:: +(Required*, object) +Non-searchable data that you want to associate with the user profile. +This field supports a nested data structure. Within the `data` object, top-level +keys cannot begin with an underscore (`_`) or contain a period (`.`) +The `data` object is not searchable, but can be retrieved with the +<>. + +*Indicates that the setting is required in some, but not all situations. + +[[security-api-update-user-profile-data-response-body]] +==== {api-response-body-title} + +A successful update user profile data API call returns a JSON structure +indicating that the request is acknowledged: + +[source,js] +---- +{ + "acknowledged": true +} +---- +// NOTCONSOLE + +[[security-api-update-user-profile-data-example]] +==== {api-examples-title} + +The following request updates a profile document for a `uid` matching +`u_kd2JMqwUQwSCCOxMv7M1vw`: + +[source,console] +---- +POST /_security/profile/_data/u_kd2JMqwUQwSCCOxMv7M1vw +{ + "access": { + "app1": { + "tag": "prod" + } + }, + "data": { + "app1": { + "theme": "default" + } + } +} +---- +// TEST[skip:uid is random and no way to ensure this uid exists] + +You can update the profile data to replace some keys and add new keys: + +[source,console] +---- +POST /_security/profile/_data/u_kd2JMqwUQwSCCOxMv7M1vw +{ + "access": { + "app1": { + "tag": "dev" + } + }, + "data": { + "app1": { + "font": "large" + } + } +} +---- +// TEST[skip:uid is random and no way to ensure this uid exists] + +If you run the request again, the consolidated profile data is returned: -coming::[8.2.0] +[source,js] +---- +{ + "u_kd2JMqwUQwSCCOxMv7M1vw": { + "uid": "u_kd2JMqwUQwSCCOxMv7M1vw", + "enabled": true, + "last_synchronized": 1642650651037, + "user": { + "username": "jacknich", + "roles": [ + "admin", "other_role1" + ], + "realm_name": "native1", + "full_name": "Jack Nicholson", + "email": "jacknich@example.com", + "active": true + }, + "access": { + "app1": { + "tag": "dev" + } + }, + "data": { + "app1": { + "theme": "default", + "font": "large" + } + }, + "_doc": { + "_primary_term": 1, + "_seq_no": 0 + } + } +} +---- +// NOTCONSOLE From 0ddfad4cd7df03acb49d1bace4d3bb87d5582c62 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 10 Feb 2022 00:30:15 +0100 Subject: [PATCH 028/167] Fix release build (#83720) - Add `es.index_mode_feature_flag_registered` feature flag to data-streams module's internalClusterTest task. - Add `es.random_sampler_feature_flag_registered` feature flag to xpack rest tests with security qa module. Closes #83722 --- modules/data-streams/build.gradle | 6 ++++++ qa/mixed-cluster/build.gradle | 2 ++ x-pack/qa/core-rest-tests-with-security/build.gradle | 1 + x-pack/qa/runtime-fields/build.gradle | 1 + 4 files changed, 10 insertions(+) diff --git a/modules/data-streams/build.gradle b/modules/data-streams/build.gradle index 9951e2a3e1f54..9faa8036f1cc3 100644 --- a/modules/data-streams/build.gradle +++ b/modules/data-streams/build.gradle @@ -51,3 +51,9 @@ if (BuildParams.inFipsJvm){ tasks.named("javaRestTest").configure{enabled = false } tasks.named("yamlRestTest").configure{enabled = false } } + +if (BuildParams.isSnapshotBuild() == false) { + tasks.named("internalClusterTest").configure { + systemProperty 'es.index_mode_feature_flag_registered', 'true' + } +} diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index 7f9d1e57adeb7..53b179d600763 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -43,6 +43,7 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> println "Upgrading one node to create a mixed cluster" if (BuildParams.isSnapshotBuild() == false) { baseCluster.get().nodes."${baseName}-0".systemProperty 'es.index_mode_feature_flag_registered', 'true' + baseCluster.get().nodes."${baseName}-0".systemProperty 'es.random_sampler_feature_flag_registered', 'true' } baseCluster.get().nextNodeToNextVersion() // Getting the endpoints causes a wait for the cluster @@ -50,6 +51,7 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> println "Upgrading another node to create a mixed cluster" if (BuildParams.isSnapshotBuild() == false) { baseCluster.get().nodes."${baseName}-1".systemProperty 'es.index_mode_feature_flag_registered', 'true' + baseCluster.get().nodes."${baseName}-1".systemProperty 'es.random_sampler_feature_flag_registered', 'true' } baseCluster.get().nextNodeToNextVersion() nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index e8fdffe47c01f..77303f88e64eb 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -33,5 +33,6 @@ testClusters.matching { it.name == "integTest" }.configureEach { setting 'xpack.security.autoconfiguration.enabled', 'false' if (BuildParams.isSnapshotBuild() == false) { systemProperty 'es.index_mode_feature_flag_registered', 'true' + systemProperty 'es.random_sampler_feature_flag_registered', 'true' } } diff --git a/x-pack/qa/runtime-fields/build.gradle b/x-pack/qa/runtime-fields/build.gradle index c78903de66081..a115b74a0bc90 100644 --- a/x-pack/qa/runtime-fields/build.gradle +++ b/x-pack/qa/runtime-fields/build.gradle @@ -42,6 +42,7 @@ subprojects { setting 'xpack.security.enabled', 'false' if (BuildParams.isSnapshotBuild() == false) { systemProperty 'es.index_mode_feature_flag_registered', 'true' + systemProperty 'es.random_sampler_feature_flag_registered', 'true' } } From 98fdf4fa59eadac53fffcf9850cce9c733706ece Mon Sep 17 00:00:00 2001 From: Lukas Wegmann Date: Thu, 10 Feb 2022 08:27:04 +0100 Subject: [PATCH 029/167] SQL: Compress Cursors (#83591) Partially addresses #83284 It looks like compressing cursors is a low hanging fruit to address issues around cursor size that seems to pay off pretty nicely. This is especially relevant for queries using the ListCursor (e.g. SHOW COLUMNS or queries sorting on aggregated columns) but might also benefit queries that store SearchSourceBuilder in the cursor. --- docs/changelog/83591.yaml | 5 ++ .../xpack/sql/qa/rest/RestSqlTestCase.java | 16 ++++ .../xpack/sql/common/io/SqlStreamInput.java | 27 +++--- .../xpack/sql/common/io/SqlStreamOutput.java | 30 +++++-- .../xpack/sql/session/Cursors.java | 4 +- .../AbstractSqlWireSerializingTestCase.java | 15 ++-- .../xpack/sql/common/io/SqlStreamTests.java | 83 +++++++++++++++++++ .../CompositeAggregationCursorTests.java | 2 +- .../xpack/sql/session/ListCursorTests.java | 2 +- 9 files changed, 151 insertions(+), 33 deletions(-) create mode 100644 docs/changelog/83591.yaml create mode 100644 x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/common/io/SqlStreamTests.java diff --git a/docs/changelog/83591.yaml b/docs/changelog/83591.yaml new file mode 100644 index 0000000000000..36200918bf366 --- /dev/null +++ b/docs/changelog/83591.yaml @@ -0,0 +1,5 @@ +pr: 83591 +summary: Compress Cursors +area: SQL +type: enhancement +issues: [] diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java index c5121168116e2..6879422786822 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java @@ -49,6 +49,8 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.stream.Collectors; +import java.util.stream.IntStream; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @@ -74,6 +76,7 @@ import static org.elasticsearch.xpack.sql.proto.CoreProtocol.URL_PARAM_FORMAT; import static org.elasticsearch.xpack.sql.proto.CoreProtocol.WAIT_FOR_COMPLETION_TIMEOUT_NAME; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.lessThan; /** * Integration test for the rest sql action. The one that speaks json directly to a @@ -1433,6 +1436,19 @@ public void testAsyncTextPaginated() throws IOException, InterruptedException { } } + public void testCompressCursor() throws IOException { + String doc = IntStream.range(0, 1000) + .mapToObj(i -> String.format(Locale.ROOT, "\"field%d\": %d", i, i)) + .collect(Collectors.joining(",")); + index("{" + doc + "}"); + + String mode = randomMode(); + Map resp = toMap(runSql(query("SHOW COLUMNS FROM " + indexPattern("test")).fetchSize(1).mode(mode)), mode); + + // without compression, the cursor is at least * 1000 bytes (in fact it is ~35kb) + assertThat(resp.get("cursor").toString().length(), lessThan(5000)); + } + static Map runSql(RequestObjectBuilder builder, String mode) throws IOException { return toMap(runSql(builder.mode(mode)), mode); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamInput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamInput.java index a013845dc2fd5..627072da48252 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamInput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamInput.java @@ -8,6 +8,8 @@ package org.elasticsearch.xpack.sql.common.io; import org.elasticsearch.Version; +import org.elasticsearch.common.compress.CompressorFactory; +import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -23,22 +25,25 @@ */ public class SqlStreamInput extends NamedWriteableAwareStreamInput { - private final ZoneId zoneId; + public static SqlStreamInput fromString(String base64encoded, NamedWriteableRegistry namedWriteableRegistry, Version version) + throws IOException { + byte[] bytes = Base64.getDecoder().decode(base64encoded); + StreamInput in = StreamInput.wrap(bytes); + Version inVersion = Version.readVersion(in); + if (version.compareTo(inVersion) != 0) { + throw new SqlIllegalArgumentException("Unsupported cursor version [{}], expected [{}]", inVersion, version); + } - public SqlStreamInput(String base64encoded, NamedWriteableRegistry namedWriteableRegistry, Version version) throws IOException { - this(Base64.getDecoder().decode(base64encoded), namedWriteableRegistry, version); + InputStreamStreamInput uncompressingIn = new InputStreamStreamInput(CompressorFactory.COMPRESSOR.threadLocalInputStream(in)); + return new SqlStreamInput(uncompressingIn, namedWriteableRegistry, inVersion); } - public SqlStreamInput(byte[] input, NamedWriteableRegistry namedWriteableRegistry, Version version) throws IOException { - super(StreamInput.wrap(input), namedWriteableRegistry); + private final ZoneId zoneId; + + private SqlStreamInput(StreamInput input, NamedWriteableRegistry namedWriteableRegistry, Version version) throws IOException { + super(input, namedWriteableRegistry); - // version check first - Version ver = Version.readVersion(delegate); - if (version.compareTo(ver) != 0) { - throw new SqlIllegalArgumentException("Unsupported cursor version [{}], expected [{}]", ver, version); - } delegate.setVersion(version); - // configuration settings zoneId = delegate.readZoneId(); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamOutput.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamOutput.java index 4ef5f67a7c4c2..5bfd829ee2db0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamOutput.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/common/io/SqlStreamOutput.java @@ -8,35 +8,47 @@ package org.elasticsearch.xpack.sql.common.io; import org.elasticsearch.Version; +import org.elasticsearch.common.compress.CompressorFactory; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; +import org.elasticsearch.common.io.stream.StreamOutput; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.OutputStream; import java.nio.charset.StandardCharsets; import java.time.ZoneId; import java.util.Base64; +/** + * Output stream for writing SQL cursors. The output is compressed if it would become larger than {@code compressionThreshold} + * bytes otherwise (see {@code DEFAULT_COMPRESSION_THRESHOLD}). + * + * The wire format is {@code version compressedPayload}. + */ public class SqlStreamOutput extends OutputStreamStreamOutput { private final ByteArrayOutputStream bytes; - public SqlStreamOutput(Version version, ZoneId zoneId) throws IOException { - this(new ByteArrayOutputStream(), version, zoneId); + public static SqlStreamOutput create(Version version, ZoneId zoneId) throws IOException { + ByteArrayOutputStream bytes = new ByteArrayOutputStream(); + StreamOutput uncompressedOut = new OutputStreamStreamOutput(Base64.getEncoder().wrap(bytes)); + Version.writeVersion(version, uncompressedOut); + OutputStream out = CompressorFactory.COMPRESSOR.threadLocalOutputStream(uncompressedOut); + return new SqlStreamOutput(bytes, out, version, zoneId); } - private SqlStreamOutput(ByteArrayOutputStream bytes, Version version, ZoneId zoneId) throws IOException { - super(Base64.getEncoder().wrap(new OutputStreamStreamOutput(bytes))); + private SqlStreamOutput(ByteArrayOutputStream bytes, OutputStream out, Version version, ZoneId zoneId) throws IOException { + super(out); this.bytes = bytes; - - Version.writeVersion(version, this); - writeZoneId(zoneId); + super.setVersion(version); + this.writeZoneId(zoneId); } /** * Should be called _after_ closing the stream - there are no guarantees otherwise. */ - public String streamAsString() { - // Base64 uses this encoding instead of UTF-8 + public String streamAsString() throws IOException { return bytes.toString(StandardCharsets.ISO_8859_1); } + } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java index ccd687d1550ab..9384e1b5f989e 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java @@ -74,7 +74,7 @@ static String encodeToString(Cursor info, Version version, ZoneId zoneId) { if (info == Cursor.EMPTY) { return StringUtils.EMPTY; } - try (SqlStreamOutput output = new SqlStreamOutput(version, zoneId)) { + try (SqlStreamOutput output = SqlStreamOutput.create(version, zoneId)) { output.writeNamedWriteable(info); output.close(); // return the string only after closing the resource @@ -91,7 +91,7 @@ public static Tuple decodeFromStringWithZone(String base64) { if (base64.isEmpty()) { return new Tuple<>(Cursor.EMPTY, null); } - try (SqlStreamInput in = new SqlStreamInput(base64, WRITEABLE_REGISTRY, VERSION)) { + try (SqlStreamInput in = SqlStreamInput.fromString(base64, WRITEABLE_REGISTRY, VERSION)) { Cursor cursor = in.readNamedWriteable(Cursor.class); return new Tuple<>(cursor, in.zoneId()); } catch (IOException ex) { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/AbstractSqlWireSerializingTestCase.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/AbstractSqlWireSerializingTestCase.java index 888304f47a216..abcbdccf0e3b4 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/AbstractSqlWireSerializingTestCase.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/AbstractSqlWireSerializingTestCase.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.sql; import org.elasticsearch.Version; -import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractWireTestCase; @@ -23,14 +22,12 @@ public abstract class AbstractSqlWireSerializingTestCase ex @Override protected T copyInstance(T instance, Version version) throws IOException { - try (BytesStreamOutput output = new BytesStreamOutput()) { - ZoneId zoneId = instanceZoneId(instance); - SqlStreamOutput out = new SqlStreamOutput(version, zoneId); - instance.writeTo(out); - out.close(); - try (SqlStreamInput in = new SqlStreamInput(out.streamAsString(), getNamedWriteableRegistry(), version)) { - return instanceReader().read(in); - } + ZoneId zoneId = instanceZoneId(instance); + SqlStreamOutput out = SqlStreamOutput.create(version, zoneId); + instance.writeTo(out); + out.close(); + try (SqlStreamInput in = SqlStreamInput.fromString(out.streamAsString(), getNamedWriteableRegistry(), version)) { + return instanceReader().read(in); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/common/io/SqlStreamTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/common/io/SqlStreamTests.java new file mode 100644 index 0000000000000..d239e07e8b06a --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/common/io/SqlStreamTests.java @@ -0,0 +1,83 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.sql.common.io; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.InputStreamStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.util.Arrays; +import java.util.Base64; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.lessThan; + +public class SqlStreamTests extends ESTestCase { + + public void testWriteAndRead() throws IOException { + BytesRef payload = new BytesRef(randomByteArrayOfLength(randomIntBetween(10, 1000))); + + SqlStreamOutput out = SqlStreamOutput.create(Version.CURRENT, randomZone()); + out.writeBytesRef(payload); + out.close(); + String encoded = out.streamAsString(); + + SqlStreamInput in = SqlStreamInput.fromString(encoded, new NamedWriteableRegistry(List.of()), Version.CURRENT); + BytesRef read = in.readBytesRef(); + + assertArrayEquals(payload.bytes, read.bytes); + } + + public void testPayloadIsCompressed() throws IOException { + SqlStreamOutput out = SqlStreamOutput.create(Version.CURRENT, randomZone()); + byte[] payload = new byte[1000]; + Arrays.fill(payload, (byte) 0); + out.write(payload); + out.close(); + + String result = out.streamAsString(); + assertThat(result.length(), lessThan(1000)); + } + + public void testOldCursorProducesVersionMismatchError() { + SqlIllegalArgumentException ex = expectThrows( + SqlIllegalArgumentException.class, + () -> SqlStreamInput.fromString( + // some cursor produced by ES 7.15.1 + "97S0AwFaAWMBCHRlc3RfZW1whgEBAQljb21wb3NpdGUHZ3JvdXBieQEDbWF4CDJkMTBjNGJhAAD/AQls" + + "YW5ndWFnZXMAAAD/AAD/AQAIYmRlZjg4ZTUBBmdlbmRlcgAAAQAAAQEKAQhiZGVmODhlNf8AAgEAAAAA" + + "AP////8PAAAAAAAAAAAAAAAAAVoDAAICAAAAAAAAAAAKAP////8PAgFtCDJkMTBjNGJhBXZhbHVlAAEE" + + "QllURQFrCGJkZWY4OGU1AAABAwA=", + new NamedWriteableRegistry(List.of()), + Version.V_8_2_0 + ) + ); + + assertThat(ex.getMessage(), containsString("Unsupported cursor version [7.15.1], expected [8.2.0]")); + } + + public void testVersionCanBeReadByOldNodes() throws IOException { + Version version = randomFrom(Version.V_7_0_0, Version.V_7_2_1, Version.V_8_1_0); + SqlStreamOutput out = SqlStreamOutput.create(version, randomZone()); + out.writeString("payload"); + out.close(); + String encoded = out.streamAsString(); + + byte[] bytes = Base64.getDecoder().decode(encoded); + InputStreamStreamInput in = new InputStreamStreamInput(new ByteArrayInputStream(bytes)); + + assertEquals(version, Version.readVersion(in)); + } + +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java index 4815a7feec358..8cff328daa88a 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggregationCursorTests.java @@ -30,7 +30,7 @@ public static CompositeAggCursor randomCompositeCursor() { } return new CompositeAggCursor( - new byte[randomInt(256)], + new byte[randomInt(1024)], extractors, randomBitSet(extractorsSize), randomIntBetween(10, 1024), diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/session/ListCursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/session/ListCursorTests.java index b4d4c0201c182..ba08e25c381d8 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/session/ListCursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/session/ListCursorTests.java @@ -19,7 +19,7 @@ public class ListCursorTests extends AbstractSqlWireSerializingTestCase { public static ListCursor randomPagingListCursor() { - int size = between(1, 20); + int size = between(1, 100); int depth = between(1, 20); List> values = new ArrayList<>(size); From 037261356e4ecbd1b7b91611ad9c9171dafc9b89 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Thu, 10 Feb 2022 09:14:17 +0100 Subject: [PATCH 030/167] Convert 'id' and '_id' values in REST API tests to strings (#82681) Follow-up from #77144 (comment) with converting id/_id to always be strings instead of integers. This makes the type value in the Elasticsearch specification be only string instead of string | number. this change was generated using following command on ubuntu find . -type f -name "*.yml" -print0 | xargs -0 sed -i -r 's/([^a-zA-Z0-9_\.]id|[^a-zA-Z0-9_]_id):(\s*)([0-9]+)/\1:\2"\3"/g' --- .../test/stats/20_empty_bucket.yml | 4 +- .../test/stats/30_single_value_field.yml | 30 ++-- .../test/stats/40_multi_value_field.yml | 30 ++-- .../test/search.query/10_match.yml | 4 +- .../test/search.query/20_ngram_search.yml | 4 +- .../search.query/30_ngram_highligthing.yml | 2 +- .../test/search.query/40_query_string.yml | 4 +- .../41_query_string_with_default_analyzer.yml | 2 +- .../search.query/50_queries_with_synonyms.yml | 10 +- .../test/search.query/60_synonym_graph.yml | 18 +-- .../test/search.suggest/30_synonyms.yml | 2 +- .../test/termvectors/10_payloads.yml | 4 +- .../test/data_stream/100_delete_by_query.yml | 2 +- .../test/data_stream/110_update_by_query.yml | 6 +- .../test/data_stream/20_unsupported_apis.yml | 2 +- .../ingest/100_date_index_name_processor.yml | 2 +- .../rest-api-spec/test/ingest/110_sort.yml | 4 +- .../rest-api-spec/test/ingest/120_grok.yml | 12 +- .../test/ingest/130_escape_dot.yml | 8 +- .../rest-api-spec/test/ingest/140_json.yml | 12 +- .../rest-api-spec/test/ingest/150_kv.yml | 8 +- .../test/ingest/160_urldecode.yml | 4 +- .../rest-api-spec/test/ingest/170_version.yml | 4 +- .../test/ingest/180_bytes_processor.yml | 4 +- .../test/ingest/190_script_processor.yml | 32 ++-- .../test/ingest/200_default_pipeline.yml | 26 ++-- .../test/ingest/200_dissect_processor.yml | 6 +- .../test/ingest/210_conditional_processor.yml | 8 +- .../test/ingest/210_pipeline_processor.yml | 20 +-- .../test/ingest/220_drop_processor.yml | 12 +- .../test/ingest/230_change_target_index.yml | 8 +- .../test/ingest/240_required_pipeline.yml | 26 ++-- .../rest-api-spec/test/ingest/250_csv.yml | 16 +- .../rest-api-spec/test/ingest/260_seq_no.yml | 6 +- .../test/ingest/270_set_processor.yml | 12 +- .../rest-api-spec/test/ingest/280_rename.yml | 4 +- .../test/ingest/30_date_processor.yml | 24 +-- .../rest-api-spec/test/ingest/40_mutate.yml | 8 +- .../test/ingest/50_on_failure.yml | 12 +- .../rest-api-spec/test/ingest/60_fail.yml | 6 +- .../rest-api-spec/test/ingest/80_foreach.yml | 8 +- .../test/ingest_geoip/20_geoip_processor.yml | 32 ++-- .../20_useragent_processor.yml | 8 +- .../test/ingest-useragent/30_custom_regex.yml | 4 +- .../test/lang_expression/20_search.yml | 2 +- .../20_render_search_template.yml | 4 +- .../test/lang_mustache/30_search_template.yml | 16 +- .../50_multi_search_template.yml | 8 +- .../test/painless/100_terms_agg.yml | 6 +- .../test/painless/130_metric_agg.yml | 6 +- .../rest-api-spec/test/painless/15_update.yml | 22 +-- .../test/painless/20_scriptfield.yml | 2 +- .../test/painless/25_script_upsert.yml | 20 +-- .../rest-api-spec/test/painless/30_search.yml | 22 +-- .../test/painless/50_script_doc_values.yml | 144 +++++++++--------- .../painless/60_script_doc_values_binary.yml | 6 +- .../test/painless/70_mov_fn_agg.yml | 12 +- .../test/painless/80_script_score.yml | 14 +- .../test/match_only_text/10_basic.yml | 8 +- .../test/rank_feature/10_basic.yml | 4 +- .../test/rank_features/10_basic.yml | 4 +- .../test/scaled_float/10_basic.yml | 8 +- .../test/search-as-you-type/10_basic.yml | 6 +- .../search-as-you-type/20_highlighting.yml | 2 +- .../rest-api-spec/test/11_parent_child.yml | 4 +- .../rest-api-spec/test/20_parent_join.yml | 4 +- .../rest-api-spec/test/30_inner_hits.yml | 8 +- .../test/rank_eval/40_rank_eval_templated.yml | 12 +- .../test/delete_by_query/10_basic.yml | 32 ++-- .../test/delete_by_query/20_validation.yml | 8 +- .../test/delete_by_query/40_versioning.yml | 4 +- .../50_wait_for_active_shards.yml | 2 +- .../test/delete_by_query/80_slices.yml | 36 ++--- .../rest-api-spec/test/reindex/10_basic.yml | 24 +-- .../test/reindex/20_validation.yml | 16 +- .../test/reindex/25_no_auto_create.yml | 6 +- .../rest-api-spec/test/reindex/30_search.yml | 18 +-- .../test/reindex/35_search_failures.yml | 2 +- .../test/reindex/40_versioning.yml | 24 +-- .../rest-api-spec/test/reindex/50_routing.yml | 8 +- .../reindex/60_wait_for_active_shards.yml | 4 +- .../test/reindex/70_throttle.yml | 24 +-- .../rest-api-spec/test/reindex/80_slices.yml | 36 ++--- .../test/reindex/85_scripting.yml | 42 ++--- .../rest-api-spec/test/reindex/90_remote.yml | 28 ++-- .../test/reindex/95_parent_join.yml | 14 +- .../test/update_by_query/10_basic.yml | 36 ++--- .../test/update_by_query/20_validation.yml | 14 +- .../test/update_by_query/30_new_fields.yml | 2 +- .../update_by_query/35_search_failure.yml | 2 +- .../test/update_by_query/40_versioning.yml | 8 +- .../test/update_by_query/50_consistency.yml | 4 +- .../test/update_by_query/70_slices.yml | 36 ++--- .../test/update_by_query/80_scripting.yml | 46 +++--- .../test/repository_azure/20_repository.yml | 14 +- .../test/repository_gcs/20_repository.yml | 14 +- .../20_repository_permanent_credentials.yml | 20 +-- .../30_repository_temporary_credentials.yml | 14 +- .../40_repository_ec2_credentials.yml | 14 +- .../50_repository_ecs_credentials.yml | 14 +- .../test/repository_url/10_basic.yml | 14 +- .../test/analysis_icu/20_search.yml | 2 +- .../test/analysis_kuromoji/20_search.yml | 2 +- .../test/analysis_nori/20_search.yml | 2 +- .../test/analysis_phonetic/40_search.yml | 2 +- .../test/analysis_smartcn/20_search.yml | 2 +- .../test/analysis_stempel/20_search.yml | 2 +- .../test/analysis_ukrainian/20_search.yml | 2 +- .../test/painless_whitelist/20_whitelist.yml | 2 +- .../test/painless_whitelist/30_static.yml | 2 +- .../test/painless_whitelist/40_instance.yml | 2 +- .../test/example-rescore/20_score.yml | 4 +- .../test/script_expert_scoring/20_score.yml | 6 +- .../20_attachment_processor.yml | 24 +-- .../ingest_attachment/30_files_supported.yml | 8 +- .../test/mapper_annotatedtext/10_basic.yml | 14 +- .../test/mapper_murmur3/10_basic.yml | 12 +- .../test/mapper_size/10_basic.yml | 4 +- .../test/store_smb/15_index_creation.yml | 4 +- .../ingest_mustache/10_ingest_disabled.yml | 2 +- .../10_pipeline_with_mustache_templates.yml | 24 +-- .../test/ingest/20_combine_processors.yml | 12 +- .../ingest/30_update_by_query_with_ingest.yml | 2 +- .../test/ingest/40_reindex_with_ingest.yml | 2 +- .../50_script_processor_using_painless.yml | 8 +- .../60_pipeline_timestamp_date_mapping.yml | 2 +- .../rest-api-spec/test/bulk/80_cas.yml | 6 +- .../rest-api-spec/test/cat.count/10_basic.yml | 4 +- .../test/cat.recovery/10_basic.yml | 2 +- .../rest-api-spec/test/count/10_basic.yml | 2 +- .../test/count/20_query_string.yml | 2 +- .../rest-api-spec/test/create/10_with_id.yml | 6 +- .../test/create/35_external_version.yml | 4 +- .../rest-api-spec/test/create/40_routing.yml | 6 +- .../rest-api-spec/test/create/60_refresh.yml | 12 +- .../rest-api-spec/test/create/70_nested.yml | 4 +- .../rest-api-spec/test/delete/10_basic.yml | 4 +- .../test/delete/11_shard_header.yml | 4 +- .../rest-api-spec/test/delete/12_result.yml | 6 +- .../rest-api-spec/test/delete/20_cas.yml | 6 +- .../test/delete/25_external_version.yml | 6 +- .../test/delete/26_external_gte_version.yml | 10 +- .../rest-api-spec/test/delete/30_routing.yml | 6 +- .../rest-api-spec/test/delete/50_refresh.yml | 16 +- .../rest-api-spec/test/delete/60_missing.yml | 4 +- .../rest-api-spec/test/exists/10_basic.yml | 8 +- .../rest-api-spec/test/exists/40_routing.yml | 6 +- .../test/exists/60_realtime_refresh.yml | 8 +- .../rest-api-spec/test/exists/70_defaults.yml | 4 +- .../test/explain/20_source_filtering.yml | 14 +- .../test/explain/30_query_string.yml | 12 +- .../test/get/15_default_values.yml | 4 +- .../test/get/20_stored_fields.yml | 8 +- .../rest-api-spec/test/get/40_routing.yml | 6 +- .../test/get/50_with_headers.yml | 4 +- .../test/get/60_realtime_refresh.yml | 8 +- .../test/get/70_source_filtering.yml | 16 +- .../rest-api-spec/test/get/80_missing.yml | 4 +- .../rest-api-spec/test/get/90_versions.yml | 20 +-- .../test/get_source/10_basic.yml | 6 +- .../test/get_source/15_default_values.yml | 4 +- .../test/get_source/40_routing.yml | 6 +- .../test/get_source/60_realtime_refresh.yml | 8 +- .../test/get_source/70_source_filtering.yml | 8 +- .../test/get_source/80_missing.yml | 4 +- .../test/get_source/85_source_missing.yml | 6 +- .../rest-api-spec/test/index/10_with_id.yml | 4 +- .../rest-api-spec/test/index/12_result.yml | 4 +- .../rest-api-spec/test/index/20_optype.yml | 6 +- .../rest-api-spec/test/index/30_cas.yml | 10 +- .../test/index/35_external_version.yml | 10 +- .../test/index/36_external_gte_version.yml | 10 +- .../rest-api-spec/test/index/40_routing.yml | 6 +- .../rest-api-spec/test/index/60_refresh.yml | 12 +- .../test/indices.flush/10_basic.yml | 2 +- .../test/indices.stats/10_index.yml | 4 +- .../test/indices.stats/11_metric.yml | 4 +- .../test/indices.stats/12_level.yml | 4 +- .../test/indices.stats/13_fields.yml | 4 +- .../test/indices.stats/14_groups.yml | 2 +- .../test/indices.stats/20_translog.yml | 6 +- .../test/indices.stats/30_segments.yml | 4 +- .../indices.stats/40_updates_on_refresh.yml | 6 +- .../rest-api-spec/test/mget/10_basic.yml | 8 +- .../test/mget/12_non_existent_index.yml | 6 +- .../test/mget/13_missing_metadata.yml | 6 +- .../mget/14_alias_to_multiple_indices.yml | 4 +- .../rest-api-spec/test/mget/15_ids.yml | 4 +- .../test/mget/17_default_index.yml | 8 +- .../test/mget/20_stored_fields.yml | 34 ++--- .../rest-api-spec/test/mget/40_routing.yml | 16 +- .../test/mget/60_realtime_refresh.yml | 2 +- .../test/mget/70_source_filtering.yml | 4 +- .../rest-api-spec/test/mget/80_deprecated.yml | 12 +- .../rest-api-spec/test/mlt/10_basic.yml | 4 +- .../rest-api-spec/test/mlt/20_docs.yml | 10 +- .../rest-api-spec/test/mlt/30_unlike.yml | 10 +- .../rest-api-spec/test/msearch/10_basic.yml | 8 +- .../test/mtermvectors/30_routing.yml | 16 +- .../rest-api-spec/test/range/10_basic.yml | 44 +++--- .../rest-api-spec/test/scroll/10_basic.yml | 26 ++-- .../rest-api-spec/test/scroll/11_clear.yml | 6 +- .../rest-api-spec/test/scroll/12_slices.yml | 18 +-- .../test/scroll/20_keep_alive.yml | 4 +- .../search.aggregation/100_avg_metric.yml | 8 +- .../search.aggregation/110_max_metric.yml | 8 +- .../search.aggregation/120_min_metric.yml | 8 +- .../search.aggregation/130_sum_metric.yml | 8 +- .../140_value_count_metric.yml | 8 +- .../search.aggregation/150_stats_metric.yml | 8 +- .../160_extended_stats_metric.yml | 8 +- .../170_cardinality_metric.yml | 8 +- .../180_percentiles_tdigest_metric.yml | 8 +- .../190_percentiles_hdr_metric.yml | 10 +- .../200_top_hits_metric.yml | 6 +- .../test/search.aggregation/20_terms.yml | 80 +++++----- .../search.aggregation/220_filters_bucket.yml | 10 +- .../test/search.aggregation/230_composite.yml | 40 ++--- .../search.aggregation/240_max_buckets.yml | 16 +- .../search.aggregation/260_weighted_avg.yml | 8 +- .../search.aggregation/280_geohash_grid.yml | 12 +- .../search.aggregation/280_rare_terms.yml | 44 +++--- .../search.aggregation/290_geotile_grid.yml | 12 +- .../test/search.aggregation/300_pipeline.yml | 8 +- .../test/search.aggregation/30_sig_terms.yml | 30 ++-- .../310_date_agg_per_day_of_week.yml | 2 +- .../test/search.aggregation/380_nested.yml | 4 +- .../390_geo_bounds_centroid.yml | 12 +- .../49_range_timezone_bug.yml | 2 +- .../test/search.highlight/10_unified.yml | 2 +- .../test/search.highlight/20_fvh.yml | 2 +- .../30_max_analyzed_offset.yml | 2 +- .../test/search.inner_hits/10_basic.yml | 8 +- .../search.inner_hits/20_highlighting.yml | 4 +- .../test/search/100_stored_fields.yml | 2 +- .../test/search/10_source_filtering.yml | 2 +- .../test/search/110_field_collapsing.yml | 16 +- .../test/search/120_batch_reduce_size.yml | 6 +- .../search/140_pre_filter_search_shards.yml | 12 +- .../search/150_rewrite_on_coordinator.yml | 8 +- .../test/search/160_exists_query.yml | 18 +-- .../161_exists_query_within_nested_query.yml | 16 +- .../test/search/190_index_prefix_search.yml | 2 +- .../test/search/200_ignore_malformed.yml | 6 +- .../test/search/200_index_phrase_search.yml | 2 +- .../test/search/20_default_values.yml | 4 +- .../test/search/220_total_hits_object.yml | 14 +- .../test/search/300_sequence_numbers.yml | 4 +- .../rest-api-spec/test/search/30_limits.yml | 2 +- .../test/search/310_match_bool_prefix.yml | 8 +- .../test/search/330_fetch_fields.yml | 46 +++--- .../test/search/340_flattened.yml | 12 +- .../test/search/350_binary_field.yml | 2 +- .../test/search/350_point_in_time.yml | 12 +- .../test/search/360_combined_fields.yml | 2 +- .../rest-api-spec/test/search/370_profile.yml | 4 +- .../test/search/390_doc_values_search.yml | 4 +- .../test/search/40_indices_boost.yml | 4 +- .../test/search/60_query_string.yml | 2 +- .../test/search/70_response_filtering.yml | 4 +- .../test/search/90_search_after.yml | 8 +- .../rest-api-spec/test/search/issue4895.yml | 2 +- .../test/suggest/20_completion.yml | 24 +-- .../rest-api-spec/test/suggest/30_context.yml | 26 ++-- .../50_completion_with_multi_fields.yml | 20 +-- .../test/termvectors/20_issue7121.yml | 4 +- .../test/termvectors/30_realtime.yml | 6 +- .../test/tsdb/90_unsupported_operations.yml | 6 +- .../rest-api-spec/test/update/10_doc.yml | 6 +- .../test/update/11_shard_header.yml | 4 +- .../rest-api-spec/test/update/12_result.yml | 8 +- .../test/update/13_legacy_doc.yml | 6 +- .../rest-api-spec/test/update/16_noop.yml | 6 +- .../test/update/20_doc_upsert.yml | 8 +- .../test/update/22_doc_as_upsert.yml | 8 +- .../test/update/35_if_seq_no.yml | 12 +- .../rest-api-spec/test/update/40_routing.yml | 8 +- .../rest-api-spec/test/update/60_refresh.yml | 12 +- .../test/update/80_source_filtering.yml | 2 +- .../test/update/85_fields_meta.yml | 4 +- .../rest-api-spec/test/update/90_error.yml | 2 +- .../test/update/95_require_alias.yml | 4 +- .../search.sort/10_nested_path_filter.yml | 4 +- .../test/search/10_cutoff_frequency.yml | 2 +- .../test/search/10_type_query.yml | 2 +- .../test/suite1/20_another_test.yml | 4 +- .../rest-api-spec/test/suite2/10_basic.yml | 4 +- .../rest-api-spec/test/eql/10_basic.yml | 14 +- .../test/eql/20_runtime_mappings.yml | 12 +- .../rest-api-spec/test/rollup/10_basic.yml | 8 +- .../rest-api-spec/test/clear_cache.yml | 6 +- .../rest-api-spec/test/indices_stats.yml | 6 +- .../resources/rest-api-spec/test/pit.yml | 6 +- .../rest-api-spec/test/repository.yml | 6 +- .../rest-api-spec/test/shared_cache_stats.yml | 6 +- .../resources/rest-api-spec/test/stats.yml | 6 +- .../rest-api-spec/test/50_geoline.yml | 6 +- .../test/aggregate-metrics/10_basic.yml | 16 +- .../test/constant_keyword/10_basic.yml | 6 +- .../constant_keyword/20_dynamic_mapping.yml | 6 +- .../10_data_stream_resolvability.yml | 6 +- .../rest-api-spec/test/graph/10_basic.yml | 4 +- .../test/ml/preview_datafeed.yml | 16 +- .../test/roles/11_idx_arrays.yml | 6 +- .../test/searchable_snapshots/10_usage.yml | 6 +- .../test/security/authz/10_index_doc.yml | 8 +- .../test/security/authz/11_delete_doc.yml | 12 +- .../test/security/authz/12_index_alias.yml | 8 +- .../test/security/authz/13_index_datemath.yml | 4 +- .../test/security/authz/14_cat_indices.yml | 6 +- .../test/security/authz/20_get_doc.yml | 10 +- .../security/authz/30_dynamic_put_mapping.yml | 4 +- .../authz/31_rollover_using_alias.yml | 6 +- .../test/security/authz/50_data_streams.yml | 14 +- .../test/security/authz/55_auto_configure.yml | 4 +- .../10_small_users_one_index.yml | 4 +- .../test/set_security_user/20_api_key.yml | 12 +- .../rest-api-spec/test/snapshot/10_basic.yml | 2 +- .../test/spatial/20_geo_centroid.yml | 12 +- .../test/spatial/30_geotile_grid.yml | 12 +- .../test/spatial/40_geohash_grid.yml | 12 +- .../test/spatial/60_geo_line.yml | 18 +-- .../test/spatial/70_script_doc_values.yml | 2 +- .../resources/rest-api-spec/test/sql/sql.yml | 6 +- .../rest-api-spec/test/sql/translate.yml | 2 +- .../test/terms_enum/10_basic.yml | 8 +- .../test/transform/preview_transforms.yml | 8 +- .../test/transform/transforms_cat_apis.yml | 8 +- .../test/vectors/10_dense_vector_basic.yml | 6 +- .../test/vectors/15_dense_vector_l1l2.yml | 6 +- .../vectors/20_dense_vector_special_cases.yml | 14 +- .../test/vectors/40_knn_search.yml | 4 +- .../vectors/50_dense_vector_field_usage.yml | 4 +- .../test/wildcard/10_wildcard_basic.yml | 6 +- .../test/freeze.gone/10_basic_compat.yml | 2 +- .../test/mustache/25_array_compare.yml | 8 +- .../test/mustache/30_search_input.yml | 8 +- .../test/mustache/40_search_transform.yml | 8 +- .../test/mustache/50_webhook_url_escaping.yml | 2 +- .../security/20_test_run_as_execute_watch.yml | 6 +- .../rest-api-spec/test/10_reindex.yml | 26 ++-- .../test/15_reindex_from_remote.yml | 22 +-- .../rest-api-spec/test/20_update_by_query.yml | 18 +-- .../rest-api-spec/test/30_delete_by_query.yml | 16 +- .../test/mixed_cluster/10_basic.yml | 6 +- .../test/upgraded_cluster/10_basic.yml | 2 +- .../test/10_templated_role_query.yml | 4 +- .../test/11_templated_role_query_runas.yml | 4 +- .../rest-api-spec/test/30_search_template.yml | 4 +- 349 files changed, 1728 insertions(+), 1728 deletions(-) diff --git a/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/20_empty_bucket.yml b/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/20_empty_bucket.yml index 3fa6c87869234..0f53b03a84b70 100644 --- a/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/20_empty_bucket.yml +++ b/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/20_empty_bucket.yml @@ -16,13 +16,13 @@ - do: index: index: empty_bucket_idx - id: 1 + id: "1" body: { "value": 0, "val1": 3.1 } - do: index: index: empty_bucket_idx - id: 2 + id: "2" body: { "value": 2, "val1": -3.1 } - do: diff --git a/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/30_single_value_field.yml b/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/30_single_value_field.yml index 77e8bf6359f22..ac0c6eef0cf83 100644 --- a/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/30_single_value_field.yml +++ b/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/30_single_value_field.yml @@ -27,77 +27,77 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "val1": 1.9, "val2": 3.1, "val3": 2.3 } - do: index: index: test - id: 2 + id: "2" body: { "val1": -5.2, "val2": -3.4, "val3": 2.3} - do: index: index: test - id: 3 + id: "3" body: { "val1": -5.2, "val3": 2.3} - do: index: index: test - id: 4 + id: "4" body: { "val1": 18.3, "val2": 104.4, "val3": 2.3} - do: index: index: test - id: 5 + id: "5" body: { "val1": -53.2, "val2": -322.4, "val3": 2.3} - do: index: index: test - id: 6 + id: "6" body: { "val1": -578.9, "val2": 69.9, "val3": 2.3} - do: index: index: test - id: 7 + id: "7" body: { "val1": 16.2, "val2": 17.2, "val3": 2.3} - do: index: index: test - id: 8 + id: "8" body: { "val1": -4222.63, "val2": 316.44, "val3": 2.3} - do: index: index: test - id: 9 + id: "9" body: { "val1": -59999.55, "val2": -3163.4, "val3": 2.3} - do: index: index: test - id: 10 + id: "10" body: { "val1": 782.7, "val2": 789.7, "val3": 2.3} - do: index: index: test - id: 11 + id: "11" body: { "val1": -1.2, "val2": 6.3, "val3": 2.3} - do: index: index: test - id: 12 + id: "12" body: { "val1": 0, "val2": 1.11, "val3": 2.3} - do: index: index: test - id: 13 + id: "13" body: { "val1": 0.1, "val2": 0.92, "val3": 2.3} - do: index: index: test - id: 14 + id: "14" body: { "val1": 0.12, "val2": -82.4, "val3": 2.3} - do: index: index: test - id: 15 + id: "15" body: { "val1": 98.2, "val2": 32.4, "val3": 2.3} - do: diff --git a/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/40_multi_value_field.yml b/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/40_multi_value_field.yml index 467efce78a467..295ac2160f23c 100644 --- a/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/40_multi_value_field.yml +++ b/modules/aggs-matrix-stats/src/yamlRestTest/resources/rest-api-spec/test/stats/40_multi_value_field.yml @@ -27,77 +27,77 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "val1": 1.9, "val2": 3.1, "val3": 2.3, "vals" : [1.9, 16.143] } - do: index: index: test - id: 2 + id: "2" body: { "val1": -5.2, "val2": -3.4, "val3": 2.3, "vals" : [155, 16.23]} - do: index: index: test - id: 3 + id: "3" body: { "val1": -5.2, "val3": 2.3, "vals" : [-455, -32.32]} - do: index: index: test - id: 4 + id: "4" body: { "val1": 18.3, "val2": 104.4, "val3": 2.3, "vals" : [0.14, 92.1]} - do: index: index: test - id: 5 + id: "5" body: { "val1": -53.2, "val2": -322.4, "val3": 2.3, "vals" : [16, 16]} - do: index: index: test - id: 6 + id: "6" body: { "val1": -578.9, "val2": 69.9, "val3": 2.3} - do: index: index: test - id: 7 + id: "7" body: { "val1": 16.2, "val2": 17.2, "val3": 2.3, "vals" : [1234.3, -3433]} - do: index: index: test - id: 8 + id: "8" body: { "val1": -4222.63, "val2": 316.44, "val3": 2.3, "vals" : [177.2, -93.333]} - do: index: index: test - id: 9 + id: "9" body: { "val1": -59999.55, "val2": -3163.4, "val3": 2.3, "vals" : [-29.9, 163.0]} - do: index: index: test - id: 10 + id: "10" body: { "val1": 782.7, "val2": 789.7, "val3": 2.3, "vals" : [-0.2, 1343.3]} - do: index: index: test - id: 11 + id: "11" body: { "val1": -1.2, "val2": 6.3, "val3": 2.3, "vals" : [15.3, 16.9]} - do: index: index: test - id: 12 + id: "12" body: { "val1": 0, "val2": 1.11, "val3": 2.3, "vals" : [-644.4, -644.4]} - do: index: index: test - id: 13 + id: "13" body: { "val1": 0.1, "val2": 0.92, "val3": 2.3, "vals" : [73.2, 0.12]} - do: index: index: test - id: 14 + id: "14" body: { "val1": 0.12, "val2": -82.4, "val3": 2.3, "vals" : [-0.001, 1295.3]} - do: index: index: test - id: 15 + id: "15" body: { "val1": 98.2, "val2": 32.4, "val3": 2.3, "vals" : [15.5, 16.5]} - do: diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/10_match.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/10_match.yml index 6609eb831b226..543a806b92153 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/10_match.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/10_match.yml @@ -33,7 +33,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "the fox runs across the street" } refresh: true @@ -51,7 +51,7 @@ - do: index: index: test - id: 2 + id: "2" body: { "text": "run fox run" } refresh: true diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search.yml index 495932016966d..3993c94937f37 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/20_ngram_search.yml @@ -25,7 +25,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "foo bar baz" } refresh: true @@ -71,7 +71,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "origin": "C.A1234.5678" } refresh: true diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing.yml index 674a6ab438069..5f4545e12e0e1 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/30_ngram_highligthing.yml @@ -43,7 +43,7 @@ - do: index: index: test - id: 1 + id: "1" refresh: true body: name: logicacmg ehemals avinci - the know how company diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string.yml index 4ba16007664f1..1d308d766f514 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/40_query_string.yml @@ -14,7 +14,7 @@ - do: index: index: test - id: 1 + id: "1" body: { field: foo bar} - do: @@ -41,7 +41,7 @@ - do: explain: index: test - id: 1 + id: "1" q: field:bars analyzer: snowball diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer.yml index 6b373d41e8e12..5839d35b44c5a 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/41_query_string_with_default_analyzer.yml @@ -15,7 +15,7 @@ - do: index: index: test - id: 1 + id: "1" body: body: Ich lese die Bücher diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml index dca56565e6954..083251fe782fc 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/50_queries_with_synonyms.yml @@ -26,7 +26,7 @@ - do: index: index: test - id: 3 + id: "3" body: field1: quick lazy huge brown pidgin field2: the quick lazy huge brown fox jumps over the tree @@ -34,14 +34,14 @@ - do: index: index: test - id: 1 + id: "1" body: field1: the quick brown fox - do: index: index: test - id: 2 + id: "2" body: field1: the quick lazy huge brown fox jumps over the tree refresh: true @@ -133,7 +133,7 @@ - do: index: index: test - id: 1 + id: "1" body: text: quick brown fox refresh: true @@ -174,7 +174,7 @@ - do: index: index: test - id: 2 + id: "2" body: text: fast brown fox refresh: true diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph.yml index ae039e453be6c..58c7df39b65b5 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.query/60_synonym_graph.yml @@ -34,41 +34,41 @@ setup: - do: index: index: test - id: 1 + id: "1" body: text: say wtf happened foo - do: index: index: test - id: 2 + id: "2" body: text: bar baz what the fudge man - do: index: index: test - id: 3 + id: "3" body: text: wtf - do: index: index: test - id: 4 + id: "4" body: text: what is the name for fudge - do: index: index: test - id: 5 + id: "5" body: text: bar two three - do: index: index: test - id: 6 + id: "6" body: text: bar baz two three refresh: true @@ -180,14 +180,14 @@ setup: - do: index: index: test - id: 7 + id: "7" body: text: "WTFD!" - do: index: index: test - id: 8 + id: "8" body: text: "Weird Al's WHAT THE FUDGESICLE" refresh: true @@ -214,7 +214,7 @@ setup: - do: index: index: test - id: 9 + id: "9" body: phrase_field: "bar baz" refresh: true diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/30_synonyms.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/30_synonyms.yml index 8b67abb193aa0..72539dfd0b618 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/30_synonyms.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/search.suggest/30_synonyms.yml @@ -24,7 +24,7 @@ - do: index: index: test - id: 1 + id: "1" body: field: input: [ "Foo Fighters" ] diff --git a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/termvectors/10_payloads.yml b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/termvectors/10_payloads.yml index cba4370943206..85e57379bdffe 100644 --- a/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/termvectors/10_payloads.yml +++ b/modules/analysis-common/src/yamlRestTest/resources/rest-api-spec/test/termvectors/10_payloads.yml @@ -23,7 +23,7 @@ - do: index: index: test - id: 1 + id: "1" refresh: true body: text: The quick brown fox is brown. @@ -31,7 +31,7 @@ - do: termvectors: index: test - id: 1 + id: "1" payloads: true - match: {term_vectors.text.field_statistics.sum_doc_freq: 5} - match: {term_vectors.text.terms.brown.tokens.0.payload: PEFMUEhBTlVNPg==} diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/100_delete_by_query.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/100_delete_by_query.yml index 29e2ec83cab19..8b76faf6c44c2 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/100_delete_by_query.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/100_delete_by_query.yml @@ -22,7 +22,7 @@ - do: index: index: simple-data-stream1 - id: 1 + id: "1" op_type: create body: foo: bar diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/110_update_by_query.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/110_update_by_query.yml index b8323d2276395..027b0b1f94050 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/110_update_by_query.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/110_update_by_query.yml @@ -22,7 +22,7 @@ - do: index: index: simple-data-stream1 - id: 1 + id: "1" op_type: create body: { "number": 4, '@timestamp': '2020-12-12' } @@ -39,7 +39,7 @@ - do: index: index: simple-data-stream1 - id: 2 + id: "2" op_type: create body: { "number": 1, '@timestamp': '2020-12-12' } @@ -56,7 +56,7 @@ - do: index: index: simple-data-stream1 - id: 3 + id: "3" op_type: create body: { "number": 5, '@timestamp': '2020-12-12' } diff --git a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/20_unsupported_apis.yml b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/20_unsupported_apis.yml index a3461b5c2c1db..32eb908331026 100644 --- a/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/20_unsupported_apis.yml +++ b/modules/data-streams/src/yamlRestTest/resources/rest-api-spec/test/data_stream/20_unsupported_apis.yml @@ -227,7 +227,7 @@ - '@timestamp': '2020-12-12' - delete: _index: logs-foobar - _id: 10 + _id: "10" - match: { errors: true } - match: { items.0.index.status: 400 } - match: { items.0.index.error.type: illegal_argument_exception } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/100_date_index_name_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/100_date_index_name_processor.yml index 80598adf5f567..fe880181b5896 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/100_date_index_name_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/100_date_index_name_processor.yml @@ -27,7 +27,7 @@ teardown: - do: index: index: events - id: 1 + id: "1" pipeline: "1" body: { date: "2016-04-22T16:32:14.968Z" diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/110_sort.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/110_sort.yml index 3c24d93ad8e58..45ea9a618d305 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/110_sort.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/110_sort.yml @@ -26,7 +26,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -36,5 +36,5 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.values: ["bar", "baz", "foo"] } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/120_grok.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/120_grok.yml index e09ba43506f78..75a15fd415e40 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/120_grok.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/120_grok.yml @@ -27,14 +27,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "123.42 400 "} - do: get: index: test - id: 1 + id: "1" - match: { _source.val: 123.42 } - match: { _source.status: 400 } - match: { _source.msg: "foo" } @@ -64,14 +64,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: ""} - do: get: index: test - id: 1 + id: "1" - match: { _source.msg: "foo" } --- @@ -99,14 +99,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: ""} - do: get: index: test - id: 1 + id: "1" - match: { _source.msg: "foo" } --- diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/130_escape_dot.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/130_escape_dot.yml index f60a6946c2928..5ac3967b27b01 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/130_escape_dot.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/130_escape_dot.yml @@ -29,7 +29,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { foo.bar: "baz" @@ -38,7 +38,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.foo.bar: "baz" } --- "Test escape_dot processor with override and wildcard": @@ -61,7 +61,7 @@ teardown: - do: index: index: test - id: 2 + id: "2" pipeline: "2" body: { foo.bar: "baz", @@ -74,6 +74,6 @@ teardown: - do: get: index: test - id: 2 + id: "2" - match: { _source.foo.bar: "baz" } - match: { _source.foo.qux: "quux" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/140_json.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/140_json.yml index 746858a673531..60208a52aba3e 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/140_json.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/140_json.yml @@ -58,7 +58,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { foo_object: "{\"hello\": \"world\"}", @@ -72,7 +72,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.foo_object.hello: "world" } - match: { _source.foo_array.0: 1 } - match: { _source.foo_string: "bla bla" } @@ -101,7 +101,7 @@ teardown: - do: index: index: test - id: 2 + id: "2" pipeline: "2" body: { json: "{\"dupe\": 1, \"dupe\": 2}", @@ -110,7 +110,7 @@ teardown: - do: get: index: test - id: 2 + id: "2" - match: { _source.dupe: 2 } --- @@ -134,7 +134,7 @@ teardown: - do: index: index: test - id: 3 + id: "3" pipeline: "3" body: { json: "{\"foo\": {\"bar\": \"baz\"} }", @@ -147,6 +147,6 @@ teardown: - do: get: index: test - id: 3 + id: "3" - match: { _source.foo.bar: "baz" } - match: { _source.foo.qux: "quux" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/150_kv.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/150_kv.yml index 486739e49283c..5dfa8052cd7a4 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/150_kv.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/150_kv.yml @@ -27,7 +27,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { foo: "goodbye=everybody hello=world" @@ -36,7 +36,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.goodbye: "everybody" } - match: { _source.hello: "world" } @@ -78,7 +78,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { origin: "field1", @@ -89,7 +89,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.bar.goodbye: "everybody" } - match: { _source.bar.hello: "world" } - match: { _source.goodbye: "everybody" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/160_urldecode.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/160_urldecode.yml index dc428d989a76f..3ad13d2cee1f2 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/160_urldecode.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/160_urldecode.yml @@ -25,7 +25,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { my_url: "https%3a%2f%2felastic.co%2f" @@ -34,5 +34,5 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.my_url: "https://elastic.co/" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/170_version.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/170_version.yml index b57cbbe3b7fb3..822272526eb56 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/170_version.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/170_version.yml @@ -61,14 +61,14 @@ teardown: catch: conflict index: index: test - id: 1 + id: "1" pipeline: "my_pipeline1" body: {} - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline2" body: {} - match: { _version: 1 } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/180_bytes_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/180_bytes_processor.yml index 1deeaa1edf7e3..4db194130c2af 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/180_bytes_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/180_bytes_processor.yml @@ -27,13 +27,13 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {bytes_source_field: "1kb"} - do: get: index: test - id: 1 + id: "1" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml index 85ef086ace51e..75f6867083498 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/190_script_processor.yml @@ -27,14 +27,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "1kb"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "1kb" } - match: { _source.target_field: 1024 } @@ -60,14 +60,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "FooBar"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "FooBar" } - match: { _source.target_field: "foobar" } @@ -93,14 +93,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "FooBar"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "FooBar" } - match: { _source.target_field: "FOOBAR" } @@ -126,14 +126,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "{\"foo\":\"bar\"}"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "{\"foo\":\"bar\"}" } - match: { _source.target_field.foo: "bar" } @@ -159,14 +159,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "{\"foo\":\"bar\"}"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "{\"foo\":\"bar\"}" } - match: { _source.foo: "bar" } @@ -192,14 +192,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "foo%20bar"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "foo%20bar" } - match: { _source.target_field: "foo bar" } @@ -231,14 +231,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "foo"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "foo" } - match: { _source.target_field1: "1:hTSGlFQnR58UCk+NfKRZzA32dPg=" } - match: { _source.target_field2: "1:LQU9qZlK+B5F3KDmev6m5PMibrg=" } @@ -265,14 +265,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {source_field: "http://www.example.com/index.html"} - do: get: index: test - id: 1 + id: "1" - match: { _source.source_field: "http://www.example.com/index.html" } - match: { _source.target_field.scheme: "http" } - match: { _source.target_field.domain: "www.example.com" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_default_pipeline.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_default_pipeline.yml index cee76d0eaca64..b35fd38f08ae3 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_default_pipeline.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_default_pipeline.yml @@ -37,33 +37,33 @@ teardown: - do: index: index: test - id: 1 + id: "1" body: {bytes_source_field: "1kb"} - do: get: index: test - id: 1 + id: "1" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # default pipeline via alias - do: index: index: test_alias - id: 2 + id: "2" body: {bytes_source_field: "1kb"} - do: get: index: test - id: 2 + id: "2" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # default pipeline via upsert - do: update: index: test - id: 3 + id: "3" body: script: source: "ctx._source.ran_script = true" @@ -72,14 +72,14 @@ teardown: - do: get: index: test - id: 3 + id: "3" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # default pipeline via scripted upsert - do: update: index: test - id: 4 + id: "4" body: script: source: "ctx._source.bytes_source_field = '1kb'" @@ -89,21 +89,21 @@ teardown: - do: get: index: test - id: 4 + id: "4" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # default pipeline via doc_as_upsert - do: update: index: test - id: 5 + id: "5" body: doc: { "bytes_source_field":"1kb" } doc_as_upsert: true - do: get: index: test - id: 5 + id: "5" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # default pipeline via bulk upsert @@ -169,14 +169,14 @@ teardown: - do: index: index: test - id: 9 + id: "9" pipeline: "_none" body: {bytes_source_field: "1kb"} - do: get: index: test - id: 9 + id: "9" - match: { _source.bytes_source_field: "1kb" } - is_false: _source.bytes_target_field # bad request @@ -184,7 +184,7 @@ teardown: catch: bad_request index: index: test - id: 10 + id: "10" pipeline: "" body: {bytes_source_field: "1kb"} diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml index 916a7fe656cc2..b170f282ec7f6 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/200_dissect_processor.yml @@ -27,14 +27,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {message: "foo bar baz"} - do: get: index: test - id: 1 + id: "1" - match: { _source.message: "foo bar baz" } - match: { _source.a: "foo" } - match: { _source.b: "bar" } @@ -62,7 +62,7 @@ teardown: catch: '/Unable to find match for dissect pattern: \%\{a\},\%\{b\},\%\{c\} against source: foo bar baz/' index: index: test - id: 2 + id: "2" pipeline: "my_pipeline" body: {message: "foo bar baz"} diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_conditional_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_conditional_processor.yml index 7b0999e4e2980..8ad2be2b41fe4 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_conditional_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_conditional_processor.yml @@ -28,14 +28,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {bytes_source_field: "1kb", conditional_field: "bar"} - do: get: index: test - id: 1 + id: "1" - match: { _source.bytes_source_field: "1kb" } - match: { _source.conditional_field: "bar" } - match: { _source.bytes_target_field: 1024 } @@ -63,14 +63,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {bytes_source_field: "1kb", conditional_field: "bar"} - do: get: index: test - id: 1 + id: "1" - match: { _source.bytes_source_field: "1kb" } - match: { _source.conditional_field: "bar" } - is_false: _source.bytes_target_field diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml index bc82b7f1ca7e1..71e2ea3b9ea79 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/210_pipeline_processor.yml @@ -54,14 +54,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "outer" body: {} - do: get: index: test - id: 1 + id: "1" - match: { _source.foo: "bar" } - match: { _source.baz: "blub" } @@ -103,7 +103,7 @@ teardown: catch: /illegal_state_exception/ index: index: test - id: 1 + id: "1" pipeline: "outer" body: {} - match: { error.root_cause.0.type: "illegal_state_exception" } @@ -161,7 +161,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "outer" body: > { @@ -171,13 +171,13 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.manager: "john" } - do: index: index: test - id: 2 + id: "2" pipeline: "outer" body: > { @@ -187,14 +187,14 @@ teardown: - do: get: index: test - id: 2 + id: "2" - match: { _source.manager: "jan" } - do: catch: /illegal_state_exception/ index: index: test - id: 3 + id: "3" pipeline: "outer" body: > { @@ -266,7 +266,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "pipeline1" body: > { @@ -275,7 +275,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - length: { _source.pipelines: 3 } - match: { _source.pipelines.0: "pipeline1" } - match: { _source.pipelines.1: "another_pipeline" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml index 77a1df81a296a..dcf201666dfeb 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/220_drop_processor.yml @@ -26,7 +26,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { foo: "bar" @@ -35,7 +35,7 @@ teardown: - do: index: index: test - id: 2 + id: "2" pipeline: "my_pipeline" body: { foo: "blub" @@ -45,13 +45,13 @@ teardown: catch: missing get: index: test - id: 1 + id: "1" - match: { found: false } - do: get: index: test - id: 2 + id: "2" - match: { _source.foo: "blub" } --- @@ -80,7 +80,7 @@ teardown: - do: index: index: test - id: 3 + id: "3" pipeline: "my_pipeline_with_failure" body: { foo: "bar" @@ -90,5 +90,5 @@ teardown: catch: missing get: index: test - id: 3 + id: "3" diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/230_change_target_index.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/230_change_target_index.yml index bb2677f9b193f..fde49c280fae2 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/230_change_target_index.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/230_change_target_index.yml @@ -39,7 +39,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "retarget" body: { a: true @@ -48,7 +48,7 @@ teardown: - do: get: index: foo - id: 1 + id: "1" - match: { _source.a: true } # only the foo index @@ -98,7 +98,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" body: { a: true } @@ -106,7 +106,7 @@ teardown: - do: get: index: foo - id: 1 + id: "1" - match: { _source.a: true } # only the foo index diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/240_required_pipeline.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/240_required_pipeline.yml index dafbe0510c321..db4e0e42de78a 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/240_required_pipeline.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/240_required_pipeline.yml @@ -52,33 +52,33 @@ teardown: - do: index: index: test - id: 1 + id: "1" body: {bytes_source_field: "1kb"} - do: get: index: test - id: 1 + id: "1" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # final pipeline via alias - do: index: index: test_alias - id: 2 + id: "2" body: {bytes_source_field: "1kb"} - do: get: index: test - id: 2 + id: "2" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # final pipeline via upsert - do: update: index: test - id: 3 + id: "3" body: script: source: "ctx._source.ran_script = true" @@ -87,14 +87,14 @@ teardown: - do: get: index: test - id: 3 + id: "3" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # final pipeline via scripted upsert - do: update: index: test - id: 4 + id: "4" body: script: source: "ctx._source.bytes_source_field = '1kb'" @@ -104,21 +104,21 @@ teardown: - do: get: index: test - id: 4 + id: "4" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # final pipeline via doc_as_upsert - do: update: index: test - id: 5 + id: "5" body: doc: { "bytes_source_field":"1kb" } doc_as_upsert: true - do: get: index: test - id: 5 + id: "5" - match: { _source.bytes_source_field: "1kb" } - match: { _source.bytes_target_field: 1024 } # final pipeline via bulk upsert @@ -258,7 +258,7 @@ teardown: - do: index: index: index_with_final_pipeline_1 - id: 1 + id: "1" pipeline: "change_target_index" body: {foo: "bar"} @@ -267,14 +267,14 @@ teardown: catch: missing get: index: index_with_final_pipeline_1 - id: 1 + id: "1" - match: { found: false } # document present in re-targeted index and re-targeted index's final pipeline was executed - do: get: index: index_with_final_pipeline_2 - id: 1 + id: "1" - match: { _source.foo: "bar" } - match: { _source.final_pipeline_2: true } - is_false: _source.final_pipeline_1 diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/250_csv.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/250_csv.yml index a38805fb1fec3..ce7ca3866147d 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/250_csv.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/250_csv.yml @@ -27,7 +27,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -37,7 +37,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.a: "aa" } - match: { _source.b: "bb" } - match: { _source.c: "cc" } @@ -66,7 +66,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -76,7 +76,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.a: "aa" } - match: { _source.b: "b;b" } - match: { _source.c: "cc" } @@ -107,7 +107,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -117,7 +117,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.a: "aa" } - match: { _source.b: "bb" } - match: { _source.c: "cc" } @@ -146,7 +146,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -156,7 +156,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.date: "2018-01-06 16:56:14.295748" } - match: { _source.level: "INFO" } - match: { _source.server: "VirtualServer" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/260_seq_no.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/260_seq_no.yml index 7c8c7d1c67750..16e527d409c14 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/260_seq_no.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/260_seq_no.yml @@ -38,7 +38,7 @@ teardown: catch: conflict index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {} - match: { error.root_cause.0.type: "version_conflict_engine_exception" } @@ -47,7 +47,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" body: {} - match: { _seq_no: 0 } - match: { _primary_term: 1 } @@ -55,7 +55,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {} - match: { _seq_no: 1 } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/270_set_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/270_set_processor.yml index 41fc5b99fb275..61fc876d81809 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/270_set_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/270_set_processor.yml @@ -27,7 +27,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { foo: "hello" @@ -35,7 +35,7 @@ teardown: - do: index: index: test - id: 2 + id: "2" pipeline: "1" body: { foo: "hello", @@ -45,13 +45,13 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.foo: "hello" } - do: get: index: test - id: 2 + id: "2" - match: { _source.foo: "hello" } --- "Test set processor with index change and require_alias": @@ -154,7 +154,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { foo_object: { @@ -170,7 +170,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.copied_foo_object.hello: "world" } - match: { _source.copied_foo_array.0: 1 } - match: { _source.copied_foo_string: "bla bla" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml index a6c663a260aae..5e38f09dbd024 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/280_rename.yml @@ -27,7 +27,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "1" body: { message: "test" @@ -36,5 +36,5 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.message: "test" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/30_date_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/30_date_processor.yml index cee302f0e20ed..707a2e2ddb0f7 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/30_date_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/30_date_processor.yml @@ -29,14 +29,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {date_source_field: "12/06/2010"} - do: get: index: test - id: 1 + id: "1" - match: { _source.date_source_field: "12/06/2010" } - match: { _source.date_target_field: "2010-06-12T00:00:00.000+02:00" } @@ -66,14 +66,14 @@ teardown: - do: index: index: test2 - id: 1 + id: "1" pipeline: "my_pipeline_2" body: {date_source_field: "2010-06-01T00:00:00.000"} - do: get: index: test2 - id: 1 + id: "1" - match: { _source.date_source_field: "2010-06-01T00:00:00.000" } # date field without a timezone gets timezone from a pipeline - match: { _source.date_target_field: "2010-06-01T00:00:00.000+01:00" } @@ -81,14 +81,14 @@ teardown: - do: index: index: test2 - id: 2 + id: "2" pipeline: "my_pipeline_2" body: {date_source_field: "2010-06-01T00:00:00.000Z"} - do: get: index: test2 - id: 2 + id: "2" - match: { _source.date_source_field: "2010-06-01T00:00:00.000Z" } # date field with a timezone has its time recalculated to a target timezone from a pipeline - match: { _source.date_target_field: "2010-06-01T01:00:00.000+01:00" } @@ -160,14 +160,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { date_source_1: "2018-02-05T13:44:56.657+0100", date_source_2: "2017-04-04 13:43:09 +0200", date_source_3: "10/Aug/2018:09:45:56 +0200", date_source_4: "1", date_source_5: "1", date_source_6: "4000000050d506482dbdf024", date_source_7: "2018-02-05T13:44:56.657+0100" } - do: get: index: test - id: 1 + id: "1" - match: { _source.date_source_1: "2018-02-05T13:44:56.657+0100" } - match: { _source.date_target_1: "2018-02-05T12:44:56.657Z" } - match: { _source.date_source_2: "2017-04-04 13:43:09 +0200" } @@ -236,14 +236,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {date_source_field: "2020-33"} - do: get: index: test - id: 1 + id: "1" - match: { _source.date_source_field: "2020-33" } - match: { _source.date_target_field: "2020-08-10T00:00:00.000Z" } @@ -302,13 +302,13 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {date_source_field: "2020-33"} - do: get: index: test - id: 1 + id: "1" - match: { _source.date_source_field: "2020-33" } - match: { _source.date_target_field: "2020-08-09T00:00:00.000Z" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/40_mutate.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/40_mutate.yml index 9de9d19c0b879..fac7a9cdb28e8 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/40_mutate.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/40_mutate.yml @@ -89,7 +89,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -108,7 +108,7 @@ teardown: - do: get: index: test - id: 1 + id: "1" - is_false: _source.field_to_rename - is_false: _source.field_to_remove - match: { _source.new_field: ["new_value", "item2", "item3", "item4"] } @@ -148,13 +148,13 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field: "value"} - do: get: index: surprise - id: 1 + id: "1" - length: { _source: 1 } - match: { _source.field: "value" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/50_on_failure.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/50_on_failure.yml index 4d74acdcab39c..eed7b8c76d295 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/50_on_failure.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/50_on_failure.yml @@ -47,14 +47,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "value1"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "value1" } - match: { _source._executed: true } - match: { _source._failed: true } @@ -103,14 +103,14 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "value1"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "value1" } - match: { _source.foofield: "exists" } - match: { _source.foofield2: "ran" } @@ -198,12 +198,12 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {} - do: get: index: test - id: 1 + id: "1" - match: { _source.field: "value" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml index 6b580a09239ec..e2c331deae340 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/60_fail.yml @@ -27,7 +27,7 @@ teardown: catch: request index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {} @@ -60,12 +60,12 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {} - do: get: index: test - id: 1 + id: "1" - match: { _source.error_message: "fail_processor_ran" } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_foreach.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_foreach.yml index 9142317ce1507..f76d5ad2ade3b 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_foreach.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/80_foreach.yml @@ -31,7 +31,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -41,14 +41,14 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.values: ["FOO", "BAR", "BAZ"] } #exceeds the recurse max per thread and will runs some of these on a different thread - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -59,6 +59,6 @@ teardown: - do: get: index: test - id: 1 + id: "1" - match: { _source.values: ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"] } diff --git a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml index d0da405cdeea9..b09dac97eba23 100644 --- a/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml +++ b/modules/ingest-geoip/src/yamlRestTest/resources/rest-api-spec/test/ingest_geoip/20_geoip_processor.yml @@ -19,14 +19,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "89.160.20.128"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "89.160.20.128" } - length: { _source.geoip: 7 } - match: { _source.geoip.city_name: "Linköping" } @@ -60,14 +60,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: ["89.160.20.128", "127.0.0.1"]} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: ["89.160.20.128", "127.0.0.1"] } - length: { _source.geoip: 2 } - length: { _source.geoip.0: 7 } @@ -102,14 +102,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: ["127.0.0.1", "89.160.20.128", "89.160.20.128"]} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: ["127.0.0.1", "89.160.20.128", "89.160.20.128"] } - length: { _source.geoip: 7 } - match: { _source.geoip.city_name: "Linköping" } @@ -147,14 +147,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "89.160.20.128"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "89.160.20.128" } - length: { _source.geoip: 9 } - match: { _source.geoip.city_name: "Linköping" } @@ -190,14 +190,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "89.160.20.128"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "89.160.20.128" } - length: { _source.geoip: 3 } - match: { _source.geoip.country_iso_code: "SE" } @@ -240,28 +240,28 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { field1: "80.231.5.0" } - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "80.231.5.0" } - is_false: _source.geoip - do: index: index: test - id: 2 + id: "2" pipeline: "my_pipeline" body: { field1: "89.160.20.128" } - do: get: index: test - id: 2 + id: "2" - match: { _source.field1: "89.160.20.128" } - length: { _source.geoip: 7 } - match: { _source.geoip.city_name: "Linköping" } @@ -295,14 +295,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "89.160.20.128"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "89.160.20.128" } - length: { _source.geoip: 4 } - match: { _source.geoip.ip: "89.160.20.128" } diff --git a/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yml b/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yml index 8d938eb957222..3293f571a35d0 100644 --- a/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yml +++ b/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/20_useragent_processor.yml @@ -19,14 +19,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" } - match: { _source.user_agent.name: "Chrome" } - match: { _source.user_agent.original: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" } @@ -56,14 +56,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" } - match: { _source.field2.os.full: "Mac OS X 10.9.2" } - is_false: _source.user_agent diff --git a/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/30_custom_regex.yml b/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/30_custom_regex.yml index 3d0179d6ad51a..85b9e24db4bd7 100644 --- a/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/30_custom_regex.yml +++ b/modules/ingest-user-agent/src/yamlRestTest/resources/rest-api-spec/test/ingest-useragent/30_custom_regex.yml @@ -20,14 +20,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: {field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36"} - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.149 Safari/537.36" } - match: { _source.user_agent.name: "Test" } - is_false: _source.user_agent.os diff --git a/modules/lang-expression/src/yamlRestTest/resources/rest-api-spec/test/lang_expression/20_search.yml b/modules/lang-expression/src/yamlRestTest/resources/rest-api-spec/test/lang_expression/20_search.yml index 33bad01a1232a..e850d7273eea6 100644 --- a/modules/lang-expression/src/yamlRestTest/resources/rest-api-spec/test/lang_expression/20_search.yml +++ b/modules/lang-expression/src/yamlRestTest/resources/rest-api-spec/test/lang_expression/20_search.yml @@ -11,7 +11,7 @@ setup: - do: index: index: test123 - id: 1 + id: "1" body: { age: 23 } - do: diff --git a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/20_render_search_template.yml b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/20_render_search_template.yml index 946b63a65d923..6fc117ed2189a 100644 --- a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/20_render_search_template.yml +++ b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/20_render_search_template.yml @@ -109,12 +109,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "value1_foo" } - do: index: index: test - id: 2 + id: "2" body: { "text": "value2_foo value3_foo" } - do: indices.refresh: {} diff --git a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml index a9d3c2da68617..55cd3710219ef 100644 --- a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml +++ b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/30_search_template.yml @@ -4,12 +4,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "value1" } - do: index: index: test - id: 2 + id: "2" body: { "text": "value2" } - do: indices.refresh: {} @@ -68,25 +68,25 @@ - do: index: index: test - id: 1 + id: "1" body: { "theField": "foo" } - do: index: index: test - id: 2 + id: "2" body: { "theField": "foo 2" } - do: index: index: test - id: 3 + id: "3" body: { "theField": "foo 3" } - do: index: index: test - id: 4 + id: "4" body: { "theField": "foo 4" } # we use a different index here since we compare the explain description which contains a doc ID and we can only be sure that it's 0 @@ -94,7 +94,7 @@ - do: index: index: otherindex - id: 5 + id: "5" body: { "otherField": "foo" } - do: indices.refresh: {} @@ -144,7 +144,7 @@ - do: index: index: test - id: 1 + id: "1" body: {} refresh: true diff --git a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml index e92e10b9ad276..b07131c809d55 100644 --- a/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml +++ b/modules/lang-mustache/src/yamlRestTest/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml @@ -4,25 +4,25 @@ setup: - do: index: index: index_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: index_1 - id: 2 + id: "2" body: { foo: baz } - do: index: index: index_1 - id: 3 + id: "3" body: { foo: foo } - do: index: index: index_2 - id: 1 + id: "1" body: { foo: foo } - do: diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/100_terms_agg.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/100_terms_agg.yml index 000e1af694d7d..612f56dd2617e 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/100_terms_agg.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/100_terms_agg.yml @@ -21,7 +21,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: str: "abc" double: 1.0 @@ -30,7 +30,7 @@ setup: - do: index: index: test_1 - id: 2 + id: "2" body: str: "abc" double: 1.0 @@ -39,7 +39,7 @@ setup: - do: index: index: test_1 - id: 3 + id: "3" body: str: "bcd" double: 2.0 diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/130_metric_agg.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/130_metric_agg.yml index 355da5cd27e9e..1c934b8421716 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/130_metric_agg.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/130_metric_agg.yml @@ -17,21 +17,21 @@ setup: - do: index: index: test - id: 1 + id: "1" body: double: 1.0 - do: index: index: test - id: 2 + id: "2" body: double: 1.0 - do: index: index: test - id: 3 + id: "3" body: double: 2.0 diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml index cb118ed9d562f..a23a27a2e6578 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/15_update.yml @@ -4,7 +4,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: foo: bar count: 1 @@ -12,7 +12,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: script: lang: painless @@ -26,7 +26,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: xxx } - match: { _source.count: 1 } @@ -34,7 +34,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: script: lang: painless @@ -47,7 +47,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: yyy } - match: { _source.count: 1 } @@ -55,7 +55,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: script: lang: painless @@ -68,7 +68,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: yyy } - match: { _source.count: 1 } @@ -78,7 +78,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: script: lang: painless @@ -91,7 +91,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: yyy } - match: { _source.foo_length: 3 } @@ -104,7 +104,7 @@ - do: index: index: test_1 - id: 2 + id: "2" body: foo: bar count: 1 @@ -113,7 +113,7 @@ catch: bad_request update: index: test_1 - id: 2 + id: "2" body: script: lang: painless diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/20_scriptfield.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/20_scriptfield.yml index c4ae4fbf38cba..d0ddb9f32d565 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/20_scriptfield.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/20_scriptfield.yml @@ -22,7 +22,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "foo": "aaa", "date": "2017/01/01", diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml index bb6362389a3da..559a54d28a19e 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/25_script_upsert.yml @@ -4,7 +4,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: script: source: "ctx._source.foo = params.bar" @@ -15,7 +15,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: baz } @@ -23,7 +23,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: script: source: "ctx._source.foo = params.bar" @@ -34,14 +34,14 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: xxx } - do: update: index: test_1 - id: 2 + id: "2" body: script: source: "ctx._source.foo = params.bar" @@ -53,14 +53,14 @@ - do: get: index: test_1 - id: 2 + id: "2" - match: { _source.foo: xxx } - do: update: index: test_1 - id: 3 + id: "3" body: script: source: "ctx._source.has_now = ctx._now > 0" @@ -71,14 +71,14 @@ - do: get: index: test_1 - id: 3 + id: "3" - match: { _source.has_now: true } - do: update: index: test_1 - id: 4 + id: "4" body: script: # assume _now is an absolute clock if it's in the range [now - 1m, now]; this tolerance might need adjustment after CI cycles @@ -90,6 +90,6 @@ - do: get: index: test_1 - id: 4 + id: "4" - match: { _source.within_one_minute: true } diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml index a006fde630716..5674d79b52a94 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/30_search.yml @@ -4,17 +4,17 @@ - do: index: index: test - id: 1 + id: "1" body: { "test": "value beck", "num1": 1.0, "bool": true } - do: index: index: test - id: 2 + id: "2" body: { "test": "value beck", "num1": 2.0, "bool": false } - do: index: index: test - id: 3 + id: "3" body: { "test": "value beck", "num1": 3.0, "bool": true } - do: indices.refresh: {} @@ -115,12 +115,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "test": "value beck", "num1": 1.0 } - do: index: index: test - id: 2 + id: "2" body: { "test": "value beck", "num1": 2.0 } - do: indices.refresh: {} @@ -272,7 +272,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "dummy_field": 1 } - do: indices.refresh: {} @@ -322,7 +322,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "dummy_field": 1 } - do: indices.refresh: {} @@ -361,7 +361,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "f": 42 } - do: indices.refresh: {} @@ -388,7 +388,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "dummy_field": 1 } - do: indices.refresh: {} @@ -424,7 +424,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "genre": 1 } - do: @@ -455,7 +455,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "test": "value beck", "num1": 1.0 } - do: indices.refresh: {} diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/50_script_doc_values.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/50_script_doc_values.yml index c2880d17361c3..d3b43575b58a4 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/50_script_doc_values.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/50_script_doc_values.yml @@ -48,7 +48,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: rank: 1 boolean: true @@ -70,14 +70,14 @@ setup: - do: index: index: test - id: 2 + id: "2" body: rank: 2 - do: index: index: test - id: 3 + id: "3" body: rank: 3 boolean: [true, false, true] @@ -105,7 +105,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -116,7 +116,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -127,7 +127,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -138,7 +138,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -149,7 +149,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -160,7 +160,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -171,7 +171,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -182,7 +182,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -193,7 +193,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -204,7 +204,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -221,7 +221,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -232,7 +232,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -243,7 +243,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -254,7 +254,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -353,7 +353,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -364,7 +364,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -375,7 +375,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -386,7 +386,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -397,7 +397,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -408,7 +408,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -419,7 +419,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -430,7 +430,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -441,7 +441,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -452,7 +452,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -463,7 +463,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -474,7 +474,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -485,7 +485,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -496,7 +496,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -507,7 +507,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -520,7 +520,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -532,7 +532,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -544,7 +544,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -556,7 +556,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -568,7 +568,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -580,7 +580,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -592,7 +592,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 3 } } + query: { term: { _id: "3" } } script_fields: field: script: @@ -604,7 +604,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 2 } } + query: { term: { _id: "2" } } script_fields: field: script: @@ -616,7 +616,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: centroid: script: @@ -628,7 +628,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: bbox: script: @@ -642,7 +642,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: topLeft: script: @@ -659,7 +659,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: type: script: @@ -670,7 +670,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: width: script: @@ -687,7 +687,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -698,7 +698,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -780,7 +780,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -791,7 +791,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -860,7 +860,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -871,7 +871,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -884,7 +884,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -895,7 +895,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -958,7 +958,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -969,7 +969,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1069,7 +1069,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1080,7 +1080,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1223,7 +1223,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1234,7 +1234,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1312,7 +1312,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1323,7 +1323,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1404,7 +1404,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1415,7 +1415,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1493,7 +1493,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1543,7 +1543,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1556,7 +1556,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1567,7 +1567,7 @@ setup: search: rest_total_hits_as_int: true body: - query: { term: { _id: 1 } } + query: { term: { _id: "1" } } script_fields: field: script: @@ -1622,7 +1622,7 @@ setup: - do: index: index: versiontest - id: 3000 + id: "3000" version: 50 version_type: external body: @@ -1636,7 +1636,7 @@ setup: index: versiontest rest_total_hits_as_int: true body: - query: { term: { _id: 3000 } } + query: { term: { _id: "3000" } } script_fields: ver: script: @@ -1650,7 +1650,7 @@ setup: - do: index: index: versiontest - id: 3000 + id: "3000" version: 60 version_type: external body: @@ -1662,7 +1662,7 @@ setup: catch: conflict index: index: versiontest - id: 3000 + id: "3000" version: 55 version_type: external body: @@ -1675,7 +1675,7 @@ setup: index: versiontest rest_total_hits_as_int: true body: - query: { term: { _id: 3000 } } + query: { term: { _id: "3000" } } script_fields: ver: script: diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/60_script_doc_values_binary.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/60_script_doc_values_binary.yml index 614ce53fd0649..67457e64c874e 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/60_script_doc_values_binary.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/60_script_doc_values_binary.yml @@ -18,7 +18,7 @@ Content-Type: application/json index: index: test - id: 1 + id: "1" body: binary: "U29tZSBiaW5hcnkgYmxvYg==" @@ -28,7 +28,7 @@ Content-Type: application/json index: index: test - id: 2 + id: "2" body: binary: [ "U29tZSBiaW5hcnkgYmxvYg==", @@ -42,7 +42,7 @@ Content-Type: application/json index: index: test - id: 3 + id: "3" body: {} - do: diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml index 57e7b4e31e057..d699b58607da3 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/70_mov_fn_agg.yml @@ -18,32 +18,32 @@ setup: body: - index: _index: test - _id: 1 + _id: "1" - date: "2017-01-01T00:00:00" value_field: 1 - index: _index: test - _id: 2 + _id: "2" - date: "2017-01-02T00:00:00" value_field: 2 - index: _index: test - _id: 3 + _id: "3" - date: "2017-01-03T00:00:00" value_field: 3 - index: _index: test - _id: 4 + _id: "4" - date: "2017-01-04T00:00:00" value_field: 4 - index: _index: test - _id: 5 + _id: "5" - date: "2017-01-05T00:00:00" value_field: 5 - index: _index: test - _id: 6 + _id: "6" - date: "2017-01-06T00:00:00" value_field: 6 diff --git a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml index 1af3cb3c622b6..1cf44a6b27d1c 100644 --- a/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml +++ b/modules/lang-painless/src/yamlRestTest/resources/rest-api-spec/test/painless/80_script_score.yml @@ -84,12 +84,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "text-location": "location1", "location" : {"lat" : 40.24, "lon" : -70.24} } - do: index: index: test - id: 2 + id: "2" body: { "text-location": "location2", "location" : {"lat" : 40.12, "lon" : -70.12} } - do: indices.refresh: {} @@ -167,12 +167,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "date": "2018-01-01T02:00:00Z"} - do: index: index: test - id: 2 + id: "2" body: { "date": "2018-01-01T01:00:00Z" } - do: indices.refresh: {} @@ -257,14 +257,14 @@ - do: index: index: test - id: 1 + id: "1" body: { "ival" : 40, "lval" : 40, "fval": 40.0, "dval": 40.0} # for this document, the smallest value in the array is chosen, which will be the closest to the origin - do: index: index: test - id: 2 + id: "2" body: { "ival" : [50, 40, 20], "lval" : [50, 40, 20], "fval" : [50.0, 40.0, 20.0], "dval" : [50.0, 40.0, 20.0] } - do: indices.refresh: {} @@ -565,7 +565,7 @@ - do: index: index: test - id: 1 + id: "1" body: object: field: "some text" diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/match_only_text/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/match_only_text/10_basic.yml index d58ef74ea6316..40342b52aeea2 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/match_only_text/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/match_only_text/10_basic.yml @@ -16,25 +16,25 @@ setup: - do: index: index: test - id: 1 + id: "1" body: {} - do: index: index: test - id: 2 + id: "2" body: { "foo": "Apache Lucene powers Elasticsearch" } - do: index: index: test - id: 3 + id: "3" body: { "foo": "Elasticsearch is based on Apache Lucene" } - do: index: index: test - id: 4 + id: "4" body: { "foo": "The Apache Software Foundation manages many projects including Lucene" } - do: diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml index 4fce6c84e817d..fcdf3f5a5fdf2 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_feature/10_basic.yml @@ -16,7 +16,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: pagerank: 10 url_length: 50 @@ -24,7 +24,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: pagerank: 100 url_length: 20 diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml index 7ba892cc87183..4302e33e1cf54 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/rank_features/10_basic.yml @@ -17,7 +17,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: tags: foo: 3 @@ -29,7 +29,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: tags: bar: 6 diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/scaled_float/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/scaled_float/10_basic.yml index f68385d25114f..b7f810fa48206 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/scaled_float/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/scaled_float/10_basic.yml @@ -14,25 +14,25 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "number" : 1 } - do: index: index: test - id: 2 + id: "2" body: { "number" : 1.53 } - do: index: index: test - id: 3 + id: "3" body: { "number" : -2.1 } - do: index: index: test - id: 4 + id: "4" body: { "number" : 1.53 } - do: diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml index 21843dad1d177..15e44e5e7b73a 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/10_basic.yml @@ -19,7 +19,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: a_field: "quick brown fox jump lazy dog" @@ -27,7 +27,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: a_field: "xylophone xylophone xylophone" @@ -39,7 +39,7 @@ setup: - do: get: index: test - id: 1 + id: "1" - is_true: found - match: { _source.a_field: "quick brown fox jump lazy dog" } diff --git a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml index 58441abac8f88..f9be93ca8c110 100644 --- a/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml +++ b/modules/mapper-extras/src/yamlRestTest/resources/rest-api-spec/test/search-as-you-type/20_highlighting.yml @@ -22,7 +22,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: a_field: "quick brown fox jump lazy dog" text_field: "quick brown fox jump lazy dog" diff --git a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/11_parent_child.yml b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/11_parent_child.yml index a6a85fca47fdc..5df0d110d20c5 100644 --- a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/11_parent_child.yml +++ b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/11_parent_child.yml @@ -13,13 +13,13 @@ setup: - do: index: index: test - id: 1 + id: "1" body: {"foo": "bar", "join_field": {"name" : "parent"} } - do: index: index: test - id: 2 + id: "2" routing: "1" body: {"bar": "baz", "join_field": { "name" : "child", "parent": "1"} } diff --git a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/20_parent_join.yml b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/20_parent_join.yml index d92bea5a3c9a8..4f80e4bcb3b38 100644 --- a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/20_parent_join.yml +++ b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/20_parent_join.yml @@ -86,7 +86,7 @@ teardown: query: parent_id: type: child - id: 1 + id: "1" - match: { hits.total: 2 } - match: { hits.hits.0._index: "test" } @@ -143,4 +143,4 @@ teardown: query: parent_id: type: child - id: 1 + id: "1" diff --git a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml index 21033baee3280..48cb6453b17bd 100644 --- a/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml +++ b/modules/parent-join/src/yamlRestTest/resources/rest-api-spec/test/30_inner_hits.yml @@ -18,27 +18,27 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "join_field": { "name": "question" }, "entity_type": "question" } - do: index: index: test - id: 2 + id: "2" routing: "1" body: { "join_field": { "name": "answer", "parent": 1} , "entity_type": "answer" } - do: index: index: test - id: 3 + id: "3" body: { "join_field": { "name": "person" }, "entity_type": "person" } - do: index: index: test routing: "3" - id: 4 + id: "4" body: { "join_field": { "name": "address", "parent": 3 }, "entity_type": "address" } - do: diff --git a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml index 08897e17ef900..193751467a8d1 100644 --- a/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml +++ b/modules/rank-eval/src/yamlRestTest/resources/rest-api-spec/test/rank_eval/40_rank_eval_templated.yml @@ -10,37 +10,37 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "text": "berlin", "title" : "Berlin, Germany" } - do: index: index: test - id: 2 + id: "2" body: { "text": "amsterdam" } - do: index: index: test - id: 3 + id: "3" body: { "text": "amsterdam" } - do: index: index: test - id: 4 + id: "4" body: { "text": "amsterdam" } - do: index: index: test - id: 5 + id: "5" body: { "text": "amsterdam" } - do: index: index: test - id: 6 + id: "6" body: { "text": "amsterdam" } - do: diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml index ab049d5bbc870..90c09c4a23741 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/10_basic.yml @@ -3,7 +3,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -41,7 +41,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -101,7 +101,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -109,7 +109,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test2" } - do: @@ -153,7 +153,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -161,7 +161,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test2" } - do: @@ -204,7 +204,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -212,7 +212,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test2" } - do: @@ -245,12 +245,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "junk" } - do: indices.refresh: {} @@ -284,12 +284,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -327,12 +327,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -370,12 +370,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "kimchy" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/20_validation.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/20_validation.yml index cd7a4e8bce12f..d07b61e32b9e9 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/20_validation.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/20_validation.yml @@ -19,7 +19,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /conflicts may only be .* but was \[cat\]/ @@ -39,7 +39,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/ @@ -59,7 +59,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] should be >= \[slices\]/ @@ -76,7 +76,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /Failed to parse int parameter \[scroll_size\] with value \[asdf\]/ diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/40_versioning.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/40_versioning.yml index 8832b6a65c3dd..cb97e957e8394 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/40_versioning.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/40_versioning.yml @@ -7,7 +7,7 @@ - do: index: index: index1 - id: 1 + id: "1" version: 0 # Starting version is zero version_type: external body: {"delete": 0} @@ -27,5 +27,5 @@ - do: get: index: index1 - id: 1 + id: "1" - match: {_version: 0} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/50_wait_for_active_shards.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/50_wait_for_active_shards.yml index ea8ed4df3e748..0d4b4bc30673b 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/50_wait_for_active_shards.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/50_wait_for_active_shards.yml @@ -9,7 +9,7 @@ - do: index: index: test - id: 1 + id: "1" body: {"text": "test"} - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/80_slices.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/80_slices.yml index efc669173328c..08eb80d789155 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/80_slices.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/delete_by_query/80_slices.yml @@ -3,22 +3,22 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} @@ -67,22 +67,22 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} @@ -177,32 +177,32 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: index: index: test - id: 5 + id: "5" body: { "text": "test" } - do: index: index: test - id: 6 + id: "6" body: { "text": "test" } - do: indices.refresh: @@ -298,22 +298,22 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/10_basic.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/10_basic.yml index 9ef6c1a90c400..4053b0bb9d6b1 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/10_basic.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/10_basic.yml @@ -3,7 +3,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -37,12 +37,12 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: dest - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -76,7 +76,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -132,12 +132,12 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: dest - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -177,12 +177,12 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: dest - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -216,7 +216,7 @@ - do: index: index: source - id: 1 + id: "1" body: {} - do: indices.refresh: {} @@ -233,7 +233,7 @@ - do: get: index: dest - id: 1 + id: "1" - match: { _source: {} } # Make sure reindex closed all the scroll contexts @@ -248,7 +248,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test", "filtered": "removed" } refresh: true @@ -276,7 +276,7 @@ - do: get: index: dest - id: 1 + id: "1" - match: { _source.text: "test" } - is_false: _source.filtered diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml index 4a1b9f9112e96..762e63fb329b4 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/20_validation.yml @@ -44,7 +44,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: reindex: @@ -103,7 +103,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: @@ -125,7 +125,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/ @@ -146,7 +146,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] should be >= \[slices\]/ @@ -168,7 +168,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/ @@ -189,7 +189,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] set to two different values \[4\] and \[5\]/ @@ -262,7 +262,7 @@ - do: index: index: test - id: 1 + id: "1" body: { age: 23 } - do: indices.refresh: {} @@ -388,7 +388,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: catch: /_source:false is not supported in this context/ diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/25_no_auto_create.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/25_no_auto_create.yml index b64da0831621d..844873f95b0f2 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/25_no_auto_create.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/25_no_auto_create.yml @@ -10,7 +10,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: cluster.put_settings: @@ -36,7 +36,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /no such index \[dest\] and \[action.auto_create_index\] \(\[test\]\) doesn't match/ @@ -57,7 +57,7 @@ teardown: - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /no such index \[dest\] and \[action.auto_create_index\] contains \[-dest\] which forbids automatic creation of the index/ diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/30_search.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/30_search.yml index d021848216517..4aac33d52fb6f 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/30_search.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/30_search.yml @@ -3,12 +3,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "junk" } - do: indices.refresh: {} @@ -40,12 +40,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "order": 1 } - do: index: index: test - id: 2 + id: "2" body: { "order": 2 } - do: indices.refresh: {} @@ -84,12 +84,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "order": 1 } - do: index: index: test - id: 2 + id: "2" body: { "order": 2 } - do: indices.refresh: {} @@ -131,7 +131,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "order": 1 } - do: indices.refresh: {} @@ -165,12 +165,12 @@ - do: index: index: test - id: 1 + id: "1" body: { "value": 17 } - do: index: index: test - id: 2 + id: "2" body: { "value": 17 } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/35_search_failures.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/35_search_failures.yml index 44b36119fbc76..fb9fb0302de6c 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/35_search_failures.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/35_search_failures.yml @@ -10,7 +10,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/40_versioning.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/40_versioning.yml index 3d718831187b4..bfd722d2a8d96 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/40_versioning.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/40_versioning.yml @@ -7,19 +7,19 @@ - do: index: index: src - id: 1 + id: "1" body: { "company": "cat" } version: 2 version_type: external - do: index: index: src - id: 2 + id: "2" body: { "company": "cow" } - do: index: index: dest - id: 1 + id: "1" body: { "company": "dog" } - do: indices.refresh: {} @@ -51,19 +51,19 @@ - do: index: index: src - id: 1 + id: "1" body: { "company": "cat" } version: 2 version_type: external - do: index: index: src - id: 2 + id: "2" body: { "company": "cow" } - do: index: index: dest - id: 1 + id: "1" body: { "company": "dog" } - do: indices.refresh: {} @@ -97,19 +97,19 @@ - do: index: index: src - id: 1 + id: "1" body: { "company": "cat" } version: 2 version_type: external - do: index: index: src - id: 2 + id: "2" body: { "company": "cow" } - do: index: index: dest - id: 1 + id: "1" body: { "company": "dog" } - do: indices.refresh: {} @@ -142,17 +142,17 @@ - do: index: index: src - id: 1 + id: "1" body: { "company": "cat" } - do: index: index: src - id: 2 + id: "2" body: { "company": "cow" } - do: index: index: dest - id: 1 + id: "1" body: { "company": "dog" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/50_routing.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/50_routing.yml index d7a0db5451a1d..feea88932f545 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/50_routing.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/50_routing.yml @@ -3,7 +3,7 @@ - do: index: index: src - id: 1 + id: "1" body: { "company": "cat" } - do: indices.refresh: {} @@ -21,7 +21,7 @@ - do: get: index: dest - id: 1 + id: "1" routing: cat - match: { _routing: cat } @@ -30,7 +30,7 @@ - do: index: index: src - id: 1 + id: "1" body: { "company": "cat" } routing: null - do: @@ -49,5 +49,5 @@ - do: get: index: dest - id: 1 + id: "1" - is_false: _routing diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/60_wait_for_active_shards.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/60_wait_for_active_shards.yml index 3498e555d2879..58253459cc03a 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/60_wait_for_active_shards.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/60_wait_for_active_shards.yml @@ -9,7 +9,7 @@ - do: index: index: src - id: 1 + id: "1" body: {"text": "test"} - do: indices.refresh: {} @@ -42,4 +42,4 @@ - do: get: index: dest - id: 1 + id: "1" diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/70_throttle.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/70_throttle.yml index 696fdd068c454..67c917f629db6 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/70_throttle.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/70_throttle.yml @@ -12,17 +12,17 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: indices.refresh: {} @@ -58,17 +58,17 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: indices.refresh: {} @@ -104,17 +104,17 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: indices.refresh: {} @@ -156,17 +156,17 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/80_slices.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/80_slices.yml index d5beb06449a97..8441dfdc3b02a 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/80_slices.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/80_slices.yml @@ -3,22 +3,22 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: index: index: source - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} @@ -63,22 +63,22 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: index: index: source - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} @@ -182,32 +182,32 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: index: index: source - id: 4 + id: "4" body: { "text": "test" } - do: index: index: source - id: 5 + id: "5" body: { "text": "test" } - do: index: index: source - id: 6 + id: "6" body: { "text": "test" } - do: indices.refresh: {} @@ -306,22 +306,22 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: index: index: source - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml index 8716ad303c57f..c0bfa464a0c04 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/85_scripting.yml @@ -3,7 +3,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -37,12 +37,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "blort" } - do: indices.refresh: {} @@ -86,12 +86,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "foo" } - do: indices.refresh: {} @@ -113,14 +113,14 @@ - do: get: index: new_twitter - id: 1 + id: "1" routing: kimchy - match: { _routing: kimchy } - do: get: index: new_twitter - id: 2 + id: "2" routing: foo - match: { _routing: foo } @@ -129,12 +129,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "foo" } - do: indices.refresh: {} @@ -183,12 +183,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "foo" } - do: indices.refresh: {} @@ -216,14 +216,14 @@ - do: index: index: twitter - id: 1 + id: "1" version: 1 version_type: external body: { "user": "kimchy" } - do: index: index: new_twitter - id: 1 + id: "1" version: 1 version_type: external body: { "user": "kimchy" } @@ -260,12 +260,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: new_twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -299,12 +299,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "another" } - do: indices.refresh: {} @@ -349,17 +349,17 @@ - do: index: index: index1 - id: 1 + id: "1" body: { "lang": "en", "id": 123 } - do: index: index: index1 - id: 2 + id: "2" body: { "lang": "en", "id": 456 } - do: index: index: index1 - id: 3 + id: "3" body: { "lang": "fr", "id": 789 } # Destination index - do: @@ -422,7 +422,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/90_remote.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/90_remote.yml index 8354fc0aaf322..325f044466847 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/90_remote.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/90_remote.yml @@ -3,7 +3,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } refresh: true @@ -58,12 +58,12 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test2" } - do: indices.refresh: {} @@ -113,7 +113,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } routing: foo refresh: true @@ -165,7 +165,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } refresh: true @@ -226,13 +226,13 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } refresh: true - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } refresh: true @@ -288,7 +288,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } refresh: true @@ -319,7 +319,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } refresh: true @@ -340,7 +340,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test", "filtered": "removed" } refresh: true @@ -379,7 +379,7 @@ - do: get: index: dest - id: 1 + id: "1" - match: { _source.text: "test" } - is_false: _source.filtered @@ -397,17 +397,17 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test" } - do: index: index: source - id: 3 + id: "3" body: { "text": "test" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/95_parent_join.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/95_parent_join.yml index e4861d35e781a..07aa512a1f4cc 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/95_parent_join.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/reindex/95_parent_join.yml @@ -18,20 +18,20 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "join_field": { "name": "parent" } } - do: index: index: source - id: 2 + id: "2" routing: "1" body: { "join_field": { "name": "child", "parent": "1" } } - do: index: index: source - id: 3 + id: "3" routing: "1" body: { "join_field": { "name": "grand_child", "parent": "2" } } @@ -59,7 +59,7 @@ setup: query: parent_id: type: child - id: 1 + id: "1" - match: {hits.total: 1} - match: {hits.hits.0._id: "2"} @@ -74,7 +74,7 @@ setup: query: parent_id: type: child - id: 1 + id: "1" - match: {hits.total: 1} - match: {hits.hits.0._id: "3"} @@ -117,7 +117,7 @@ setup: query: parent_id: type: child - id: 1 + id: "1" - match: {hits.total: 1} - match: {hits.hits.0._id: "2"} @@ -132,7 +132,7 @@ setup: query: parent_id: type: child - id: 1 + id: "1" - match: {hits.total: 1} - match: {hits.hits.0._id: "3"} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml index 02c4afd731531..a2334139c20e9 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/10_basic.yml @@ -3,7 +3,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -29,7 +29,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -85,7 +85,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -93,7 +93,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test2" } - do: @@ -124,7 +124,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -132,7 +132,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test2" } - do: @@ -162,7 +162,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -170,7 +170,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test2" } - do: @@ -190,12 +190,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "junk" } - do: indices.refresh: {} @@ -222,12 +222,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -252,12 +252,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -282,12 +282,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -341,7 +341,7 @@ - do: index: index: test - id: 1 + id: "1" body: {} - do: indices.refresh: {} @@ -354,7 +354,7 @@ - do: get: index: test - id: 1 + id: "1" - match: { _source: {} } - match: { _version: 2 } diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml index 46d1db0d74df4..df3d7d5ef5da4 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/20_validation.yml @@ -3,7 +3,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /conflicts may only be .* but was \[cat\]/ @@ -20,7 +20,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/ @@ -37,7 +37,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] parameter cannot be negative, found \[-4\]/ @@ -55,7 +55,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] set to two different values \[4\] and \[5\]/ @@ -76,7 +76,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /\[max_docs\] should be >= \[slices\]/ @@ -90,7 +90,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: catch: /Failed to parse int parameter \[scroll_size\] with value \[asdf\]/ @@ -110,7 +110,7 @@ - do: index: index: test - id: 1 + id: "1" body: { age: 23 } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/30_new_fields.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/30_new_fields.yml index ba14b34cf0ef5..d5c3404b482e8 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/30_new_fields.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/30_new_fields.yml @@ -11,7 +11,7 @@ - do: index: index: test - id: 1 + id: "1" refresh: true body: { "name": "bob! house" } diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/35_search_failure.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/35_search_failure.yml index 5a22eec88c0f7..1a28ec8b183b5 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/35_search_failure.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/35_search_failure.yml @@ -10,7 +10,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/40_versioning.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/40_versioning.yml index 3aa6c0918800d..477e83452adf3 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/40_versioning.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/40_versioning.yml @@ -3,7 +3,7 @@ - do: index: index: test - id: 1 + id: "1" body: {"text": "test"} - do: indices.refresh: {} @@ -17,7 +17,7 @@ - do: get: index: test - id: 1 + id: "1" - match: {_version: 2} --- @@ -28,7 +28,7 @@ - do: index: index: index1 - id: 1 + id: "1" version: 0 # Starting version is zero version_type: external body: {"update": 0} @@ -45,5 +45,5 @@ - do: get: index: index1 - id: 1 + id: "1" - match: {_version: 0} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/50_consistency.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/50_consistency.yml index 4a067580b54d3..ef71dd36a18b8 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/50_consistency.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/50_consistency.yml @@ -9,7 +9,7 @@ - do: index: index: test - id: 1 + id: "1" body: {"text": "test"} - do: indices.refresh: {} @@ -34,4 +34,4 @@ - do: get: index: test - id: 1 + id: "1" diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/70_slices.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/70_slices.yml index 29330f5b32f58..a6e4005e2db59 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/70_slices.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/70_slices.yml @@ -3,22 +3,22 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} @@ -59,22 +59,22 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} @@ -164,32 +164,32 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: index: index: test - id: 5 + id: "5" body: { "text": "test" } - do: index: index: test - id: 6 + id: "6" body: { "text": "test" } - do: indices.refresh: {} @@ -280,22 +280,22 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "test" } - do: index: index: test - id: 2 + id: "2" body: { "text": "test" } - do: index: index: test - id: 3 + id: "3" body: { "text": "test" } - do: index: index: test - id: 4 + id: "4" body: { "text": "test" } - do: indices.refresh: {} diff --git a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml index 0c297b13dbd81..d73bdcc6b4de5 100644 --- a/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml +++ b/modules/reindex/src/yamlRestTest/resources/rest-api-spec/test/update_by_query/80_scripting.yml @@ -3,7 +3,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -34,7 +34,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -62,12 +62,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "foo" } - do: indices.refresh: {} @@ -108,12 +108,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "foo" } - do: indices.refresh: {} @@ -135,7 +135,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -154,7 +154,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} @@ -173,22 +173,22 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "level": 9, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 2 + id: "2" body: { "level": 10, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 3 + id: "3" body: { "level": 11, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 4 + id: "4" body: { "level": 12, "last_updated": "2016-01-01T12:10:30Z" } - do: indices.refresh: {} @@ -235,22 +235,22 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "level": 9, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 2 + id: "2" body: { "level": 10, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 3 + id: "3" body: { "level": 11, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 4 + id: "4" body: { "level": 12, "last_updated": "2016-01-01T12:10:30Z" } - do: indices.refresh: {} @@ -310,12 +310,12 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: twitter - id: 2 + id: "2" body: { "user": "foo" } - do: indices.refresh: {} @@ -337,22 +337,22 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "level": 9, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 2 + id: "2" body: { "level": 10, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 3 + id: "3" body: { "level": 11, "last_updated": "2016-01-01T12:10:30Z" } - do: index: index: twitter - id: 4 + id: "4" body: { "level": 12, "last_updated": "2016-01-01T12:10:30Z" } - do: indices.refresh: {} @@ -417,7 +417,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} diff --git a/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml b/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml index ffe3c4988f051..299183f26d9dc 100644 --- a/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml +++ b/modules/repository-azure/src/yamlRestTest/resources/rest-api-spec/test/repository_azure/20_repository.yml @@ -45,15 +45,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one - do: @@ -90,19 +90,19 @@ setup: body: - index: _index: docs - _id: 4 + _id: "4" - snapshot: two - index: _index: docs - _id: 5 + _id: "5" - snapshot: two - index: _index: docs - _id: 6 + _id: "6" - snapshot: two - index: _index: docs - _id: 7 + _id: "7" - snapshot: two - do: diff --git a/modules/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml b/modules/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml index 7ce4e44dada35..68d61be4983c5 100644 --- a/modules/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml +++ b/modules/repository-gcs/src/yamlRestTest/resources/rest-api-spec/test/repository_gcs/20_repository.yml @@ -45,15 +45,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one - do: @@ -90,19 +90,19 @@ setup: body: - index: _index: docs - _id: 4 + _id: "4" - snapshot: two - index: _index: docs - _id: 5 + _id: "5" - snapshot: two - index: _index: docs - _id: 6 + _id: "6" - snapshot: two - index: _index: docs - _id: 7 + _id: "7" - snapshot: two - do: diff --git a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml index 60a4133aa58c1..77870697f93ae 100644 --- a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml +++ b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/20_repository_permanent_credentials.yml @@ -69,15 +69,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one - do: @@ -130,15 +130,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one - do: @@ -175,19 +175,19 @@ setup: body: - index: _index: docs - _id: 4 + _id: "4" - snapshot: two - index: _index: docs - _id: 5 + _id: "5" - snapshot: two - index: _index: docs - _id: 6 + _id: "6" - snapshot: two - index: _index: docs - _id: 7 + _id: "7" - snapshot: two - do: diff --git a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml index 148ac94b709fb..4a62d6183470d 100644 --- a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml +++ b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/30_repository_temporary_credentials.yml @@ -41,15 +41,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one - do: @@ -86,19 +86,19 @@ setup: body: - index: _index: docs - _id: 4 + _id: "4" - snapshot: two - index: _index: docs - _id: 5 + _id: "5" - snapshot: two - index: _index: docs - _id: 6 + _id: "6" - snapshot: two - index: _index: docs - _id: 7 + _id: "7" - snapshot: two - do: diff --git a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml index 21112bc99defc..e24ff1ad0e559 100644 --- a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml +++ b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml @@ -41,15 +41,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one - do: @@ -86,19 +86,19 @@ setup: body: - index: _index: docs - _id: 4 + _id: "4" - snapshot: two - index: _index: docs - _id: 5 + _id: "5" - snapshot: two - index: _index: docs - _id: 6 + _id: "6" - snapshot: two - index: _index: docs - _id: 7 + _id: "7" - snapshot: two - do: diff --git a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml index daf5739f6720d..9c332cc7d9301 100644 --- a/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml +++ b/modules/repository-s3/src/yamlRestTest/resources/rest-api-spec/test/repository_s3/50_repository_ecs_credentials.yml @@ -41,15 +41,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one - do: @@ -86,19 +86,19 @@ setup: body: - index: _index: docs - _id: 4 + _id: "4" - snapshot: two - index: _index: docs - _id: 5 + _id: "5" - snapshot: two - index: _index: docs - _id: 6 + _id: "6" - snapshot: two - index: _index: docs - _id: 7 + _id: "7" - snapshot: two - do: diff --git a/modules/repository-url/src/yamlRestTest/resources/rest-api-spec/test/repository_url/10_basic.yml b/modules/repository-url/src/yamlRestTest/resources/rest-api-spec/test/repository_url/10_basic.yml index b932f0d53caad..4508dacbfe7e9 100644 --- a/modules/repository-url/src/yamlRestTest/resources/rest-api-spec/test/repository_url/10_basic.yml +++ b/modules/repository-url/src/yamlRestTest/resources/rest-api-spec/test/repository_url/10_basic.yml @@ -23,15 +23,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - snapshot: one - index: _index: docs - _id: 2 + _id: "2" - snapshot: one - index: _index: docs - _id: 3 + _id: "3" - snapshot: one # Create a first snapshot using the FS repository @@ -48,19 +48,19 @@ setup: body: - index: _index: docs - _id: 4 + _id: "4" - snapshot: two - index: _index: docs - _id: 5 + _id: "5" - snapshot: two - index: _index: docs - _id: 6 + _id: "6" - snapshot: two - index: _index: docs - _id: 7 + _id: "7" - snapshot: two # Create a second snapshot diff --git a/plugins/analysis-icu/src/yamlRestTest/resources/rest-api-spec/test/analysis_icu/20_search.yml b/plugins/analysis-icu/src/yamlRestTest/resources/rest-api-spec/test/analysis_icu/20_search.yml index 90aae30bbb5b5..4a73e628dadb7 100644 --- a/plugins/analysis-icu/src/yamlRestTest/resources/rest-api-spec/test/analysis_icu/20_search.yml +++ b/plugins/analysis-icu/src/yamlRestTest/resources/rest-api-spec/test/analysis_icu/20_search.yml @@ -27,7 +27,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "Bâton enflammé" } - do: indices.refresh: {} diff --git a/plugins/analysis-kuromoji/src/yamlRestTest/resources/rest-api-spec/test/analysis_kuromoji/20_search.yml b/plugins/analysis-kuromoji/src/yamlRestTest/resources/rest-api-spec/test/analysis_kuromoji/20_search.yml index 9a052469c2019..6e0b54d91222a 100644 --- a/plugins/analysis-kuromoji/src/yamlRestTest/resources/rest-api-spec/test/analysis_kuromoji/20_search.yml +++ b/plugins/analysis-kuromoji/src/yamlRestTest/resources/rest-api-spec/test/analysis_kuromoji/20_search.yml @@ -15,7 +15,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "JR新宿駅の近くにビールを飲みに行こうか" } - do: indices.refresh: {} diff --git a/plugins/analysis-nori/src/yamlRestTest/resources/rest-api-spec/test/analysis_nori/20_search.yml b/plugins/analysis-nori/src/yamlRestTest/resources/rest-api-spec/test/analysis_nori/20_search.yml index b7ecd933b6676..c75a182dad11c 100644 --- a/plugins/analysis-nori/src/yamlRestTest/resources/rest-api-spec/test/analysis_nori/20_search.yml +++ b/plugins/analysis-nori/src/yamlRestTest/resources/rest-api-spec/test/analysis_nori/20_search.yml @@ -15,7 +15,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "뿌리가 깊은 나무는" } - do: indices.refresh: {} diff --git a/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/40_search.yml b/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/40_search.yml index 2e6ee7ebd102a..4b664867a39c5 100644 --- a/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/40_search.yml +++ b/plugins/analysis-phonetic/src/yamlRestTest/resources/rest-api-spec/test/analysis_phonetic/40_search.yml @@ -27,7 +27,7 @@ - do: index: index: phonetic_sample - id: 1 + id: "1" body: { "text": "hello world" } - do: indices.refresh: {} diff --git a/plugins/analysis-smartcn/src/yamlRestTest/resources/rest-api-spec/test/analysis_smartcn/20_search.yml b/plugins/analysis-smartcn/src/yamlRestTest/resources/rest-api-spec/test/analysis_smartcn/20_search.yml index 0a1f6e2af42bf..bfd9bda2a4836 100644 --- a/plugins/analysis-smartcn/src/yamlRestTest/resources/rest-api-spec/test/analysis_smartcn/20_search.yml +++ b/plugins/analysis-smartcn/src/yamlRestTest/resources/rest-api-spec/test/analysis_smartcn/20_search.yml @@ -15,7 +15,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "我购买了道具和服装" } - do: indices.refresh: {} diff --git a/plugins/analysis-stempel/src/yamlRestTest/resources/rest-api-spec/test/analysis_stempel/20_search.yml b/plugins/analysis-stempel/src/yamlRestTest/resources/rest-api-spec/test/analysis_stempel/20_search.yml index 7276b6744dfb5..d09b65296e431 100644 --- a/plugins/analysis-stempel/src/yamlRestTest/resources/rest-api-spec/test/analysis_stempel/20_search.yml +++ b/plugins/analysis-stempel/src/yamlRestTest/resources/rest-api-spec/test/analysis_stempel/20_search.yml @@ -15,7 +15,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "studenta był" } - do: indices.refresh: {} diff --git a/plugins/analysis-ukrainian/src/yamlRestTest/resources/rest-api-spec/test/analysis_ukrainian/20_search.yml b/plugins/analysis-ukrainian/src/yamlRestTest/resources/rest-api-spec/test/analysis_ukrainian/20_search.yml index ba860729ebf23..f8993414c96b3 100644 --- a/plugins/analysis-ukrainian/src/yamlRestTest/resources/rest-api-spec/test/analysis_ukrainian/20_search.yml +++ b/plugins/analysis-ukrainian/src/yamlRestTest/resources/rest-api-spec/test/analysis_ukrainian/20_search.yml @@ -15,7 +15,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "text": "Ця п'єса у свою чергу рухається по колу." } - do: indices.refresh: {} diff --git a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml index 51a440142fd5e..28740fc1844ed 100644 --- a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml +++ b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/20_whitelist.yml @@ -4,7 +4,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "num1": 1.0 } - do: indices.refresh: {} diff --git a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml index c6d8048b97961..1430aa19e9ecc 100644 --- a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml +++ b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/30_static.yml @@ -4,7 +4,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "num1": 1 } - do: indices.refresh: {} diff --git a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/40_instance.yml b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/40_instance.yml index 385d576ae48e9..faf1aa77ed51b 100644 --- a/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/40_instance.yml +++ b/plugins/examples/painless-whitelist/src/yamlRestTest/resources/rest-api-spec/test/painless_whitelist/40_instance.yml @@ -4,7 +4,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "num1": 1 } - do: indices.refresh: {} diff --git a/plugins/examples/rescore/src/yamlRestTest/resources/rest-api-spec/test/example-rescore/20_score.yml b/plugins/examples/rescore/src/yamlRestTest/resources/rest-api-spec/test/example-rescore/20_score.yml index bcdc05b4c8612..9657c981977ad 100644 --- a/plugins/examples/rescore/src/yamlRestTest/resources/rest-api-spec/test/example-rescore/20_score.yml +++ b/plugins/examples/rescore/src/yamlRestTest/resources/rest-api-spec/test/example-rescore/20_score.yml @@ -11,12 +11,12 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "test": 1 } - do: index: index: test - id: 2 + id: "2" body: { "test": 2 } - do: indices.refresh: {} diff --git a/plugins/examples/script-expert-scoring/src/yamlRestTest/resources/rest-api-spec/test/script_expert_scoring/20_score.yml b/plugins/examples/script-expert-scoring/src/yamlRestTest/resources/rest-api-spec/test/script_expert_scoring/20_score.yml index c771ba82312a6..89194d162872d 100644 --- a/plugins/examples/script-expert-scoring/src/yamlRestTest/resources/rest-api-spec/test/script_expert_scoring/20_score.yml +++ b/plugins/examples/script-expert-scoring/src/yamlRestTest/resources/rest-api-spec/test/script_expert_scoring/20_score.yml @@ -9,17 +9,17 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "important_field": "foo" } - do: index: index: test - id: 2 + id: "2" body: { "important_field": "foo foo foo" } - do: index: index: test - id: 3 + id: "3" body: { "important_field": "foo foo" } - do: diff --git a/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/20_attachment_processor.yml b/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/20_attachment_processor.yml index 5aba14690ee18..714a434775dbf 100644 --- a/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/20_attachment_processor.yml +++ b/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/20_attachment_processor.yml @@ -19,14 +19,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=" } - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=" } - length: { _source.attachment: 4 } - match: { _source.attachment.content: "This is an english text to test if the pipeline works" } @@ -60,14 +60,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3MK" } - do: get: index: test - id: 1 + id: "1" - match: { _source.field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3MK" } - length: { _source.attachment: 1 } - match: { _source.attachment.language: "en" } @@ -94,14 +94,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=" } - do: get: index: test - id: 1 + id: "1" - length: { _source.attachment: 4 } - match: { _source.attachment.content: "This is an english text to tes" } - match: { _source.attachment.language: "en" } @@ -130,14 +130,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=" } - do: get: index: test - id: 1 + id: "1" - length: { _source.attachment: 4 } - match: { _source.attachment.content: "This is an english text to tes" } - match: { _source.attachment.language: "en" } @@ -146,14 +146,14 @@ - do: index: index: test - id: 2 + id: "2" pipeline: "my_pipeline" body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=", "max_size": 18 } - do: get: index: test - id: 2 + id: "2" - length: { _source.attachment: 4 } - match: { _source.attachment.content: "This is an english" } - match: { _source.attachment.language: "en" } @@ -162,14 +162,14 @@ - do: index: index: test - id: 3 + id: "3" pipeline: "my_pipeline" body: { field1: "VGhpcyBpcyBhbiBlbmdsaXNoIHRleHQgdG8gdGVzdCBpZiB0aGUgcGlwZWxpbmUgd29ya3M=", "max_size": 100000000 } - do: get: index: test - id: 3 + id: "3" - length: { _source.attachment: 4 } - match: { _source.attachment.content: "This is an english text to test if the pipeline works" } - match: { _source.attachment.language: "en" } diff --git a/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yml b/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yml index 324776bc20f87..b16f8701f6364 100644 --- a/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yml +++ b/plugins/ingest-attachment/src/yamlRestTest/resources/rest-api-spec/test/ingest_attachment/30_files_supported.yml @@ -22,14 +22,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { field1: "0M8R4KGxGuEAAAAAAAAAAAAAAAAAAAAAPgADAP7/CQAGAAAAAAAAAAAAAAAEAAAAjAEAAAAAAAAAEAAAjgEAAAEAAAD+////AAAAAIgBAACJAQAAigEAAIsBAAD////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////spcEAg+kMBAAA8BK/AAAAAAABEQABAAEACAAAEwgAAA4AYmpiaoI4gjgAAAAAAAAAAAAAAAAAAAAAAAAMBBYANA4AAOBSAADgUgAAEwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD//w8AAAAAAAAAAAD//w8AAAAAAAAAAAD//w8AAAAAAAAAAAAAAAAAAAAAALcAAAAAAFAHAAAAAAAAUAcAAMcUAAAAAAAAxxQAAAAAAADHFAAAAAAAAMcUAAAAAAAAxxQAABQAAAAAAAAAAAAAAP////8AAAAA2xQAAAAAAADbFAAAAAAAANsUAAAAAAAA2xQAAAwAAADnFAAADAAAANsUAAAAAAAA3hUAADABAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAAVRUAAAIAAABXFQAAAAAAAFcVAAAAAAAAVxUAAAAAAABXFQAAAAAAAFcVAAAAAAAAVxUAACwAAAAOFwAAtgIAAMQZAABaAAAAgxUAABUAAAAAAAAAAAAAAAAAAAAAAAAAxxQAAAAAAADzFAAAAAAAAAAAAAAAAAAAAAAAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAIMVAAAAAAAAGRUAAAAAAADHFAAAAAAAAMcUAAAAAAAA8xQAAAAAAAAAAAAAAAAAAPMUAAAAAAAAmBUAABYAAAAZFQAAAAAAABkVAAAAAAAAGRUAAAAAAADzFAAAFgAAAMcUAAAAAAAA8xQAAAAAAADHFAAAAAAAAPMUAAAAAAAAVRUAAAAAAAAAAAAAAAAAABkVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8xQAAAAAAABVFQAAAAAAAAAAAAAAAAAAGRUAAAAAAAAAAAAAAAAAABkVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGRUAAAAAAAAAAAAAAAAAAP////8AAAAAgI6XYKZ60QEAAAAAAAAAAP////8AAAAACRUAABAAAAAZFQAAAAAAAAAAAAAAAAAAQRUAABQAAACuFQAAMAAAAN4VAAAAAAAAGRUAAAAAAAAeGgAAAAAAABkVAAAAAAAAHhoAAAAAAAAZFQAAAAAAABkVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADHFAAAAAAAABkVAAAoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAAgxUAAAAAAACDFQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGRUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAN4VAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAAAAAAAAAAAAAP////8AAAAA/////wAAAAD/////AAAAAAAAAAAAAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAP////8AAAAA/////wAAAAD/////AAAAAB4aAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAADzFAAAAAAAAPMUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADzFAAAAAAAAPMUAAAAAAAA8xQAAAAAAABQBwAAPQwAAI0TAAA6AQAABwAMAQ8ADQEAAAwEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFRlc3QgZWxhc3RpY3NlYXJjaA0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAABIIAAATCAAA/PgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYWaJVGuQAABhZo3wiGAAIACAAAEwgAAP0AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAATIAMZBoATpwpBeqAB+wfC4gsMhBIbCJBSKwiQUjkIkFJJCJBSWwAAAXsMQCGLDEAgyQxAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALgYPABIAAQB8AQ8ACAADAAMAAwAAAAQACAAAAJgAAACeAAAAngAAAJ4AAACeAAAAngAAAJ4AAACeAAAAngAAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAAHYCAAB2AgAAdgIAAHYCAAB2AgAAdgIAAHYCAAB2AgAAdgIAADYGAAA2BgAANgYAADYGAAA2BgAANgYAAD4CAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAACoAAAANgYAADYGAAAWAAAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAC4AAAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAAaAEAAEgBAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAAHACAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAANgYAADYGAAA2BgAAMgYAABgAAADGAwAA1gMAAOYDAAD2AwAABgQAABYEAAAmBAAANgQAAEYEAABWBAAAZgQAAHYEAACGBAAAlgQAAMYDAADWAwAA5gMAAPYDAAAGBAAAFgQAADIGAAAoAgAA2AEAAOgBAAAmBAAANgQAAEYEAABWBAAAZgQAAHYEAACGBAAAlgQAAMYDAADWAwAA5gMAAPYDAAAGBAAAFgQAACYEAAA2BAAARgQAAFYEAABmBAAAdgQAAIYEAACWBAAAxgMAANYDAADmAwAA9gMAAAYEAAAWBAAAJgQAADYEAABGBAAAVgQAAGYEAAB2BAAAhgQAAJYEAADGAwAA1gMAAOYDAAD2AwAABgQAABYEAAAmBAAANgQAAEYEAABWBAAAZgQAAHYEAACGBAAAlgQAAMYDAADWAwAA5gMAAPYDAAAGBAAAFgQAACYEAAA2BAAARgQAAFYEAABmBAAAdgQAAIYEAACWBAAAxgMAANYDAADmAwAA9gMAAAYEAAAWBAAAJgQAADYEAABGBAAAVgQAAGYEAAB2BAAAhgQAAJYEAAA4AQAAWAEAAPgBAAAIAgAAGAIAAFYCAAB+AgAAkAIAAKACAACwAgAAwAIAANACAACAAgAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAADgAgAA8AIAAAADAAAQAwAAIAMAADADAABAAwAA4AIAAPACAAAAAwAAEAMAACADAAAwAwAAQAMAAOACAADwAgAAAAMAABADAAAgAwAAMAMAAEADAAAgAAAAT0oDAFBKAwBRSgMAX0gBBG1IDARuSAwEc0gMBHRIDAQAAAAAQAAAYPH/AgBAAAwQAAAAAAAAAAAGAE4AbwByAG0AYQBsAAAAAgAAABgAQ0oYAF9IAQRhShgAbUgMBHNIDAR0SAkEAAAAAAAAAAAAAAAAAAAAAAAAOgBBIPL/oQA6AAwNAAAAAAAAEAARAFAAbwBsAGkAYwBlACAAcABhAHIAIABkAOkAZgBhAHUAdAAAAAAAVgBpAPP/swBWAAwNAAAAAAAAMAYOAFQAYQBiAGwAZQBhAHUAIABOAG8AcgBtAGEAbAAAABwAF/YDAAA01gYAAQoDbAA01gYAAQUDAABh9gMAAAIACwAAADIAayD0/8EAMgAADQAAAAAAADAGDABBAHUAYwB1AG4AZQAgAGwAaQBzAHQAZQAAAAIADAAAAAAAUEsDBBQABgAIAAAAIQCb6HBP/AAAABwCAAATAAAAW0NvbnRlbnRfVHlwZXNdLnhtbKyRy2rDMBBF94X+g9C22HK6KKXYzqKPXR+L9AMGeWyL2CMhTULy9x07LpQSAoVuBNLMvffMqFwfxkHtMSbnqdKrvNAKyfrGUVfpz81Ldq9VYqAGBk9Y6SMmva6vr8rNMWBSoqZU6Z45PBiTbI8jpNwHJKm0Po7Aco2dCWC30KG5LYo7Yz0xEmc8eei6fMIWdgOr54M8n0hErtXjqW+KqjSEMDgLLKBmqpqzuohDuiDcU/OLLlvIclHO5ql3Id0sCe+ymugaVB8Q+Q1G4TAsQ+LP8xVIRov5ZeYz0b5tncXG290o68hn48XsTwCr/4n+zjTz39ZfAAAA//8DAFBLAwQUAAYACAAAACEApdan58AAAAA2AQAACwAAAF9yZWxzLy5yZWxzhI/PasMwDIfvhb2D0X1R0sMYJXYvpZBDL6N9AOEof2giG9sb69tPxwYKuwiEpO/3qT3+rov54ZTnIBaaqgbD4kM/y2jhdj2/f4LJhaSnJQhbeHCGo3vbtV+8UNGjPM0xG6VItjCVEg+I2U+8Uq5CZNHJENJKRds0YiR/p5FxX9cfmJ4Z4DZM0/UWUtc3YK6PqMn/s8MwzJ5PwX+vLOVFBG43lExp5GKhqC/jU72QqGWq1B7Qtbj51v0BAAD//wMAUEsDBBQABgAIAAAAIQBreZYWgwAAAIoAAAAcAAAAdGhlbWUvdGhlbWUvdGhlbWVNYW5hZ2VyLnhtbAzMTQrDIBBA4X2hd5DZN2O7KEVissuuu/YAQ5waQceg0p/b1+XjgzfO3xTVm0sNWSycBw2KZc0uiLfwfCynG6jaSBzFLGzhxxXm6XgYybSNE99JyHNRfSPVkIWttd0g1rUr1SHvLN1euSRqPYtHV+jT9yniResrJgoCOP0BAAD//wMAUEsDBBQABgAIAAAAIQBtTVmryAYAAI4aAAAWAAAAdGhlbWUvdGhlbWUvdGhlbWUxLnhtbOxZ3YrbRhS+L/QdhO4d/0n+WeINtmxv2uwmIXbS5nJWHkuTHWmMZrwbEwJ9gkIhLb0p9K6F3gTaN+i7pLTpQ/TMSJZn7HH2hy2E0jUs8vg7Z7455+g7I83dey8T6pzjjBOW9tz6nZrr4DRkM5JGPffpdFzpuA4XKJ0hylLcc1eYu/cOP/3kLjoQMU6wA/YpP0A9NxZicVCt8hCGEb/DFjiF3+YsS5CAr1lUnWXoAvwmtNqo1VrVBJHUdVKUgNtp/PvP4OzRfE5C7B6uvY8oTJEKLgdCmk2kb1yYDJYZRkuFnZ3VJYKveEAz5xzRngsTzdjFFL8UrkMRF/BDz62pP7d6eLeKDgojKvbYanZj9VfYFQazs4aaM4tOy0k9z/da/dK/AlCxixu1R61Rq/SnACgMYaU5F92nP+gOhn6B1UD5pcX3sD1s1g285r+5w7nvy4+BV6Dcv7eDH48DiKKBV6Ac7+/gPa/dCDwDr0A5vrWDb9f6Q69t4BUopiQ920HX/FYzWK+2hMwZvW+Fd31v3G4UzjcoqIayuuQUc5aKfbWWoBcsGwNAAikSJHXEaoHnKIQyDhAlpxlxjkkUQ+EtUMo4DNcatXGtCf/lx1NXKiLoACPNWvICJnxnSPJxeJiRhei5n4NXV4M8XzpHTMQkLGZVTgyL+yiNdIv3P33z9w9fOX/9+uP7N9/mk27juY4f4jT6kqD0QxPAajdhePfd2z9+e/vu+6///OWNxX8/Q6c6fEoSzJ2H+MJ5whJYnGUF+DS7nsU0RkS36KcRRymSs1j8jyB+OvrhClFkwQ0gEjruWQYyYwMeLV8YhCdxthTE4vFBnBjAE8bogGXWKDyQc2lhni7TyD55ttRxTxA6t80doNTI82i5AH0lNpdBjA2ajylKBYpwioUjf2NnGFtW95wQI64nJMwYZ3PhPCfOABFrSKbk1KimjdF9kkBeVjaCkG8jNifPnAGjtlUP8bmJhLsDUQv5KaZGGI/QUqDE5nKKEqoH/BiJ2EZysspCHTfiAjIdYcqc0QxzbrN5lMF6taQ/AImxp/2ErhITmQlyZvN5jBjTkUN2FsQoWdiwE5LGOvYzfgYlipzHTNjgJ8y8Q+R3yAOIx750PyPYSPflavAU1FWntCkQ+csys+TyCDOjficrOkdYSQ2Iv6HpCUkvFfgtaff/PWk/IWkYM8uKbkvU7a6NjFxTzvsZsd5P97dEfB9uW7oDls3Ix6/cQ7RMH2O4WXbb1//C/b9wu/954d53P9++XG8UGsRbbl3zzbrauid7d+5zQulErCg+5mrzzqEvzcYwKO3UYysun+QWMVzKOxkmMHBRhpSNkzHxBRHxJEYL2OHXXekk4oXriDsLxmHjr4atviWeLpMTNssfWOt1+XCaiwdHYjNe88txeNgQObrV3jyEle4V20g9LK8JSNvrkNAmM0k0LSTa60EZJPVoDkGzkFAruxUWXQuLjnS/TtUOC6BWZgU2Tg5st3qu74EJGMEzFaJ4JvOUp3qdXZXM28z0vmAaFQC7iHUFbDLdlVz3Lk+uLi+1K2TaIKGVm0lCRUb1MB6jGS6qU45ehcZ1c93dpNSgJ0Oh5oPS2tBodz7E4qa5BrttbaCprhQ0dS56bqvpQ8mEaNFz5/DgD5fJAmqHyw0vohG8PgtFlt/wN1GWRcbFEPE4D7gSnVwNEiJw5lCS9Fy5/DINNFUaorjVGyAIHy25LsjKx0YOkm4mGc/nOBR62rURGen8Kyh8rhXWX5X5zcHSki0h3ZN4duGc0mX2BEGJ+e26DOCMcHj/U8+jOSPwQrMUsk39bTWmQnb1N4qqhvJxRBcxKjqKLuY5XEl5SUd9K2OgfSvWDAHVQlI0wtNINlg9qEY3LbtGzmFv173cSEZOE81NzzRURXZNu4oZM6zbwFYsb9bkNVbrEIOm6R0+l+5tye2utW5rn1B2CQh4GT9L171CQ9CobSYzqEnGuzIsNbsYNXvHeoGXULtKk9BUv7V2uxW3skdYp4PBG3V+sNuuWhiar/eVKtLq6EM/nGCnL0A8hvAaeEkFV6mEo4cMwYZoovYkuWzALfJSFLcGXDnLjPTcVzW/7wUNP6jUOv6o4jW9WqXj95uVvu836yO/XhsOGq+hsYg4qfv5scsYXkTRVXH4osZ3DmCS9bu2OyFLqkydrFQVcXUAU28YBzD5yYszlQcsrkNAdF61GuNusztoVbrN/rjiDQedSjdoDSrDVtAejoeB3+mOX7vOuQJ7/WbgtUadSqseBBWvVZP0O91K22s0+l673xl5/dfFNgZWnstHEQsIr+J1+A8AAAD//wMAUEsDBBQABgAIAAAAIQAN0ZCftgAAABsBAAAnAAAAdGhlbWUvdGhlbWUvX3JlbHMvdGhlbWVNYW5hZ2VyLnhtbC5yZWxzhI9NCsIwFIT3gncIb2/TuhCRJt2I0K3UA4TkNQ02PyRR7O0NriwILodhvplpu5edyRNjMt4xaKoaCDrplXGawW247I5AUhZOidk7ZLBggo5vN+0VZ5FLKE0mJFIoLjGYcg4nSpOc0IpU+YCuOKOPVuQio6ZByLvQSPd1faDxmwF8xSS9YhB71QAZllCa/7P9OBqJZy8fFl3+UUFz2YUFKKLGzOAjm6pMBMpburrE3wAAAP//AwBQSwECLQAUAAYACAAAACEAm+hwT/wAAAAcAgAAEwAAAAAAAAAAAAAAAAAAAAAAW0NvbnRlbnRfVHlwZXNdLnhtbFBLAQItABQABgAIAAAAIQCl1qfnwAAAADYBAAALAAAAAAAAAAAAAAAAAC0BAABfcmVscy8ucmVsc1BLAQItABQABgAIAAAAIQBreZYWgwAAAIoAAAAcAAAAAAAAAAAAAAAAABYCAAB0aGVtZS90aGVtZS90aGVtZU1hbmFnZXIueG1sUEsBAi0AFAAGAAgAAAAhAG1NWavIBgAAjhoAABYAAAAAAAAAAAAAAAAA0wIAAHRoZW1lL3RoZW1lL3RoZW1lMS54bWxQSwECLQAUAAYACAAAACEADdGQn7YAAAAbAQAAJwAAAAAAAAAAAAAAAADPCQAAdGhlbWUvdGhlbWUvX3JlbHMvdGhlbWVNYW5hZ2VyLnhtbC5yZWxzUEsFBgAAAAAFAAUAXQEAAMoKAAAAADw/eG1sIHZlcnNpb249IjEuMCIgZW5jb2Rpbmc9IlVURi04IiBzdGFuZGFsb25lPSJ5ZXMiPz4NCjxhOmNsck1hcCB4bWxuczphPSJodHRwOi8vc2NoZW1hcy5vcGVueG1sZm9ybWF0cy5vcmcvZHJhd2luZ21sLzIwMDYvbWFpbiIgYmcxPSJsdDEiIHR4MT0iZGsxIiBiZzI9Imx0MiIgdHgyPSJkazIiIGFjY2VudDE9ImFjY2VudDEiIGFjY2VudDI9ImFjY2VudDIiIGFjY2VudDM9ImFjY2VudDMiIGFjY2VudDQ9ImFjY2VudDQiIGFjY2VudDU9ImFjY2VudDUiIGFjY2VudDY9ImFjY2VudDYiIGhsaW5rPSJobGluayIgZm9sSGxpbms9ImZvbEhsaW5rIi8+AAAAABMAAAAUAAAOAAAIAP////8ACAAAEwgAAAUAAAAACAAAEwgAAAYAAAAAAAAABQAAABIAAAAVAAAABwAEAAcAAAAAABIAAAAVAAAABAAHAAQAAAAEAAAACAAAAOUAAAAAAAAAAwAAAN8IhgCkF6oAlUa5AH419AAAAAAAEwAAABUAAAAAAAAAAQAAAP9AAIABABIAAAASAAAAAEBDewEAAQASAAAAAAAAABIAAAAAAAAAAAAAAAAAAAACEAAAAAAAAAATAAAAoAAAEABAAAD//wEAAAAHAFUAbgBrAG4AbwB3AG4A//8BAAgAAAAAAAAAAAAAAP//AQAAAAAA//8AAAIA//8AAAAA//8AAAIA//8AAAAABQAAAEcOkAEAAAICBgMFBAUCAwTvKgDgQXgAwAkAAAAAAAAA/wEAAAAAAABUAGkAbQBlAHMAIABOAGUAdwAgAFIAbwBtAGEAbgAAADUOkAECAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAgAAAAABTAHkAbQBiAG8AbAAAADMOkAEAAAILBgQCAgICAgT/KgDgQ3gAwAkAAAAAAAAA/wEAAAAAAABBAHIAaQBhAGwAAAA3DpABAAACDwUCAgIEAwIE/wIA4P+sAEABAAAAAAAAAJ8BAAAAAAAAQwBhAGwAaQBiAHIAaQAAAEESkAEBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABDAGEAbQBiAHIAaQBhACAATQBhAHQAaAAAACAABADxCIgIAPDEAgAAqQEAAAAAWVJDh1lSQ4cAAAAAAgABAAAAAgAAABEAAAABAAEAAAAEAAOQAQAAAAIAAAARAAAAAQABAAAAAQAAAAAAAAAhAwDwEAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAClBsAHtAC0AIGBcjAAAAAAAAAAAAAAAAAAABIAAAASAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAAAAABAAAAA8BAACAD8/QEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACSFAAAAAACfH/DwAAJFAAABAnAAD///9/////f////3////9/////f////3////9/3wiGAAAEAAAyAAAAAAAAAAAAAAAAAAAAAAAAAAAAIQQAAAAAAAAAAAAAAAAAAAAAAAAQHAAABAAAAAAAAAAAAHgAAAB4AAAAAAAAAAAAAACgBQAAGkjOCAsAAAAAAAAA3AAAAAEAAAD//xIAAAAAAAAAAAAAAAAAAAAMAEQAYQB2AGkAZAAgAFAAaQBsAGEAdABvAAwARABhAHYAaQBkACAAUABpAGwAYQB0AG8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP7/AAADCgEAAAAAAAAAAAAAAAAAAAAAAAEAAADghZ/y+U9oEKuRCAArJ7PZMAAAANzSAgASAAAAAQAAAJgAAAACAAAAoAAAAAMAAACsAAAABAAAALgAAAAFAAAA0AAAAAYAAADcAAAABwAAAOgAAAAIAAAA/AAAAAkAAAAUAQAAEgAAACABAAAKAAAARAEAAAwAAABQAQAADQAAAFwBAAAOAAAAaAEAAA8AAABwAQAAEAAAAHgBAAATAAAAgAEAABEAAACIAQAAAgAAABAnAAAeAAAABAAAAAAAAAAeAAAABAAAAAAAAAAeAAAAEAAAAERhdmlkIFBpbGF0bwAAAAAeAAAABAAAAAAAAAAeAAAABAAAAAAAAAAeAAAADAAAAE5vcm1hbC5kb3RtAB4AAAAQAAAARGF2aWQgUGlsYXRvAAAAAB4AAAAEAAAAMgAAAB4AAAAcAAAATWljcm9zb2Z0IE1hY2ludG9zaCBXb3JkAAAAAEAAAAAARsMjAAAAAEAAAAAAFjZWpnrRAUAAAAAAFjZWpnrRAQMAAAABAAAAAwAAAAIAAAADAAAAEQAAAAMAAAAAAAAARwAAAEzRAgD/////DgAAAAEAAABsAAAAAAAAAAAAAAD/AAAAswAAAAAAAAAAAAAAZhkAANsRAAAgRU1GAAABAETRAgAIAAAAAQAAAAAAAAAAAAAAAAAAAOwEAACxAwAAQAEAAPAAAAAAAAAAAAAAAAAAAAAA4gQAgKkDABEAAAAMAAAACAAAAAoAAAAQAAAAAAAAAAAAAAAJAAAAEAAAAAABAAC0AAAADAAAABAAAAAAAAAAAAAAAAsAAAAQAAAAAAEAALQAAABRAAAAeNACAAAAAAAAAAAA/wAAALMAAAAAAAAAAAAAAAAAAAAAAAAAAAEAALQAAABQAAAAKAAAAHgAAAAA0AIAAAAAACAAzAAAAQAAtAAAACgAAAAAAQAAtAAAAAEAIAAAAAAAANACAAAAAAAAAAAAAAAAAAAAAAD/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////vr6+/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/76+vv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////7vf//+rz7v/Yzc3/0NLY/+DX2f/N4PL/3tXI/8jV4v/Q0cX/1tDI/9ve2f/U0tX/0NLQ/83I0P/I2N7/4tnI/9LZ4v/v6tz/5eXl////9////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////83g9//e3M3/vrG3/8TCxv/Xwrz/vdfu/8W/rv/K1tX/x8bB/8LJxv/Oxb7/yMTE/8vCwv+3scH/zd7Z/9DNyP/BwcT/z97X/82xq/////v////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////u9/v/+/Lu////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////zs7O/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////87Ozv/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////Ozs7///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////++vr7/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/5OTk/+Tk5P/k5OT/vr6+//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////8OAAAAFAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD+/wAAAwoBAAAAAAAAAAAAAAAAAAAAAAABAAAAAtXN1ZwuGxCTlwgAKyz5rjAAAADUAAAACwAAAAEAAABgAAAABQAAAGgAAAAGAAAAcAAAABEAAAB4AAAAFwAAAIAAAAALAAAAiAAAABAAAACQAAAAEwAAAJgAAAAWAAAAoAAAAA0AAACoAAAADAAAALUAAAACAAAAECcAAAMAAAABAAAAAwAAAAEAAAADAAAAEgAAAAMAAAAAAA8ACwAAAAAAAAALAAAAAAAAAAsAAAAAAAAACwAAAAAAAAAeEAAAAQAAAAEAAAAADBAAAAIAAAAeAAAABgAAAFRpdHJlAAMAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAIAAAADAAAABAAAAAUAAAAGAAAABwAAAP7///8JAAAACgAAAAsAAAAMAAAADQAAAA4AAAAPAAAAEAAAABEAAAASAAAAEwAAABQAAAAVAAAA/v///xcAAAAYAAAAGQAAABoAAAAbAAAAHAAAAB0AAAAeAAAAHwAAACAAAAAhAAAAIgAAACMAAAAkAAAAJQAAACYAAAAnAAAAKAAAACkAAAAqAAAAKwAAACwAAAAtAAAALgAAAC8AAAAwAAAAMQAAADIAAAAzAAAANAAAADUAAAA2AAAANwAAADgAAAA5AAAAOgAAADsAAAA8AAAAPQAAAD4AAAA/AAAAQAAAAEEAAABCAAAAQwAAAEQAAABFAAAARgAAAEcAAABIAAAASQAAAEoAAABLAAAATAAAAE0AAABOAAAATwAAAFAAAABRAAAAUgAAAFMAAABUAAAAVQAAAFYAAABXAAAAWAAAAFkAAABaAAAAWwAAAFwAAABdAAAAXgAAAF8AAABgAAAAYQAAAGIAAABjAAAAZAAAAGUAAABmAAAAZwAAAGgAAABpAAAAagAAAGsAAABsAAAAbQAAAG4AAABvAAAAcAAAAHEAAAByAAAAcwAAAHQAAAB1AAAAdgAAAHcAAAB4AAAAeQAAAHoAAAB7AAAAfAAAAH0AAAB+AAAAfwAAAIAAAACBAAAAggAAAIMAAACEAAAAhQAAAIYAAACHAAAAiAAAAIkAAACKAAAAiwAAAIwAAACNAAAAjgAAAI8AAACQAAAAkQAAAJIAAACTAAAAlAAAAJUAAACWAAAAlwAAAJgAAACZAAAAmgAAAJsAAACcAAAAnQAAAJ4AAACfAAAAoAAAAKEAAACiAAAAowAAAKQAAAClAAAApgAAAKcAAACoAAAAqQAAAKoAAACrAAAArAAAAK0AAACuAAAArwAAALAAAACxAAAAsgAAALMAAAC0AAAAtQAAALYAAAC3AAAAuAAAALkAAAC6AAAAuwAAALwAAAC9AAAAvgAAAL8AAADAAAAAwQAAAMIAAADDAAAAxAAAAMUAAADGAAAAxwAAAMgAAADJAAAAygAAAMsAAADMAAAAzQAAAM4AAADPAAAA0AAAANEAAADSAAAA0wAAANQAAADVAAAA1gAAANcAAADYAAAA2QAAANoAAADbAAAA3AAAAN0AAADeAAAA3wAAAOAAAADhAAAA4gAAAOMAAADkAAAA5QAAAOYAAADnAAAA6AAAAOkAAADqAAAA6wAAAOwAAADtAAAA7gAAAO8AAADwAAAA8QAAAPIAAADzAAAA9AAAAPUAAAD2AAAA9wAAAPgAAAD5AAAA+gAAAPsAAAD8AAAA/QAAAP4AAAD/AAAAAAEAAAEBAAACAQAAAwEAAAQBAAAFAQAABgEAAAcBAAAIAQAACQEAAAoBAAALAQAADAEAAA0BAAAOAQAADwEAABABAAARAQAAEgEAABMBAAAUAQAAFQEAABYBAAAXAQAAGAEAABkBAAAaAQAAGwEAABwBAAAdAQAAHgEAAB8BAAAgAQAAIQEAACIBAAAjAQAAJAEAACUBAAAmAQAAJwEAACgBAAApAQAAKgEAACsBAAAsAQAALQEAAC4BAAAvAQAAMAEAADEBAAAyAQAAMwEAADQBAAA1AQAANgEAADcBAAA4AQAAOQEAADoBAAA7AQAAPAEAAD0BAAA+AQAAPwEAAEABAABBAQAAQgEAAEMBAABEAQAARQEAAEYBAABHAQAASAEAAEkBAABKAQAASwEAAEwBAABNAQAATgEAAE8BAABQAQAAUQEAAFIBAABTAQAAVAEAAFUBAABWAQAAVwEAAFgBAABZAQAAWgEAAFsBAABcAQAAXQEAAF4BAABfAQAAYAEAAGEBAABiAQAAYwEAAGQBAABlAQAAZgEAAGcBAABoAQAAaQEAAGoBAABrAQAAbAEAAG0BAABuAQAAbwEAAHABAABxAQAAcgEAAHMBAAB0AQAAdQEAAHYBAAB3AQAAeAEAAHkBAAB6AQAAewEAAHwBAAB9AQAAfgEAAH8BAAD+////gQEAAIIBAACDAQAAhAEAAIUBAACGAQAAhwEAAP7////9/////f////3////9////jQEAAP7////+/////v////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////9SAG8AbwB0ACAARQBuAHQAcgB5AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAFgAFAf//////////AwAAAAYJAgAAAAAAwAAAAAAAAEYAAAAAAAAAAAAAAAAgFZlgpnrRAY8BAACAAAAAAAAAADEAVABhAGIAbABlAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOAAIB/////wUAAAD/////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAAAAB4aAAAAAAAAVwBvAHIAZABEAG8AYwB1AG0AZQBuAHQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABoAAgEBAAAA//////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAFAFMAdQBtAG0AYQByAHkASQBuAGYAbwByAG0AYQB0AGkAbwBuAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKAACAQIAAAAEAAAA/////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABYAAAAM0wIAAAAAAAUARABvAGMAdQBtAGUAbgB0AFMAdQBtAG0AYQByAHkASQBuAGYAbwByAG0AYQB0AGkAbwBuAAAAAAAAAAAAAAA4AAIB////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAEAAAAQAAAAAAAAAQBDAG8AbQBwAE8AYgBqAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABIAAgD///////////////8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAcgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAP///////////////wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA////////////////AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAP7///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////8BAP7/AwoAAP////8GCQIAAAAAAMAAAAAAAABGIAAAAERvY3VtZW50IE1pY3Jvc29mdCBXb3JkIDk3LTIwMDQACgAAAE1TV29yZERvYwAQAAAAV29yZC5Eb2N1bWVudC44APQ5snEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA==" } - do: get: index: test - id: 1 + id: "1" - length: { _source.attachment: 8 } - match: { _source.attachment.content: "Test elasticsearch" } - match: { _source.attachment.language: "et" } @@ -65,14 +65,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { field1: "UEsDBBQABgAIAAAAIQBtiidLZgEAAFQFAAATAAgCW0NvbnRlbnRfVHlwZXNdLnhtbCCiBAIooAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC0lMtugzAQRfeV+g/I2wqcdFFVVUgWfSzbSE0/wLEH4tYv2c7r7ztAgqooAalJNkgwc+89A3hGk41WyQp8kNbkZJgNSAKGWyFNmZOv2Vv6SJIQmRFMWQM52UIgk/HtzWi2dRASVJuQk0WM7onSwBegWcisA4OVwnrNIt76kjrGf1gJ9H4weKDcmggmprHyIOPRCxRsqWLyusHHDQnKSfLc9FVROWHOKclZxDKtqvSozoMKHcKVEQd06Y4sQ2XdExbShbvTCd8OyoMEqavR6gJqPvB1eikgmTIf35nGBrq2XlBh+VKjKOse7gijLQrJodVXbs5bDiHgd9IqayuaSbNnP8kR4lZBuDxF49sfDzGi4BoAO+dehDXMP69G8ce8F6TA3BmbK7g8RmvdCxHx1EJzHZ7NUdt0RWLn1FsXcAv4f4y9P66VOsWBHfgou/+6NhGtz54Pqk0gQBzJpvVOHP8CAAD//wMAUEsDBBQABgAIAAAAIQDHwie8/wAAAN8CAAALAAgCX3JlbHMvLnJlbHMgogQCKKAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAArJLNSgMxEIDvgu8Q5t7NtoqINNuLCL2JrA8wJtPd6OaHZKrt2xtF1IVlEexx/j6+SWa9ObhBvFLKNngFy6oGQV4HY32n4LG9W1yDyIze4BA8KThShk1zfrZ+oAG5DOXexiwKxWcFPXO8kTLrnhzmKkTypbILySGXMHUyon7BjuSqrq9k+s2AZsQUW6Mgbc0FiPYY6X9s6YjRIKPUIdEipjKd2JZdRIupI1Zggr4v6fzZURUyyGmhy78Lhd3OaroNeu/I85QXHZi8ITOvhDHOGS1PaTTu+JF5C8lI85Wes1md9sO437snj3aYeJfvWvUcqfsQkqOzbN4BAAD//wMAUEsDBBQABgAIAAAAIQATqj6H9gAAADEDAAAcAAgBd29yZC9fcmVscy9kb2N1bWVudC54bWwucmVscyCiBAEooAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKySy2rDMBBF94X+g5h9LTt9UELkbEoh29b9AEUeP6gsCc304b+vaEjr0GC68PJeMfeeQbPZfg5WvGOk3jsFRZaDQGd83btWwUv1eHUPgli7WlvvUMGIBNvy8mLzhFZzGqKuDyRSiiMFHXNYS0mmw0FT5gO69NL4OGhOMrYyaPOqW5SrPL+TcZoB5Umm2NUK4q6+BlGNAf+T7ZumN/jgzduAjs9UyA/cPyNzWo5SrI4tsoKJmaVEkOdBbpYEabzjSu8t/mL8WHMQt0tCcJqdAHzLg1nMMRRLMhCPFiefcdBz9atF6/9cw9E5IsiTQy+/AAAA//8DAFBLAwQUAAYACAAAACEA9WKOYGUCAAAOBwAAEQAAAHdvcmQvZG9jdW1lbnQueG1spFXfb9owEH6ftP8h8jtNwijQiFDR0qI+TKpK9zwZx0ksYp9lGyj763dOIGSbVtGSh9j367vv7mJncvsmq2DLjRWgUhJfRSTgikEmVJGSH6+PvTEJrKMqoxUonpI9t+R2+vXLZJdkwDaSKxcghLLJTrOUlM7pJAwtK7mk9koKZsBC7q4YyBDyXDAe7sBkYT+Ko3qnDTBuLea7p2pLLTnASTgPTVJ23PajaIyyUC3Gv4xAc4XGHIykDkVTYIRZb3QPMTV1YiUq4fYea9jCbFOyMSo5YPRaHj4mQQLJVlZHZ3jPtyF6WI4R5hySTcj80PKaXmh4hYRB2VLoU98+i4bG8gjybsGdYnc6Hlw29LmhO1xOgOfQz5ogWTXM30eMozMm4iHaiHMo/JnzyKT78e0+15pOc+PrjwH0/wbQxWXDWRjY6BOauAztSa1bLH+VfADrMORuafYyMsuSajyBkiVPhQJDVxUywpEF2PXAf9ZkilfcCrK9XzWqB4mmhj5lKRmNhg/X9/GI1FrH31yjbR7UJnidZi8piaK7m8Hw5rpVzXlON5XzlvEwGs8f6yzGv9z0lVsX4JG2TjDLqWHlJPR6/65dVgBrf1ktHTUOIQVmjTy2ohLZ/1zAHWVrEnZ9H1TWeoY1lPZmy5l7Nv9nukS7185m8WjW9EIXy19oxdMRxzdRnbfE/XA8qJG9w3fqIR3gIY4HdX8SI4rSncQVOAfyJFc871hLTjOO1+EoGnsxB3Adsdi4WjykY1BZ1FpNGW98ajX+lRZG+KIrofizcAxZfhseq28Kr7fNcMPTj2z6GwAA//8DAFBLAwQUAAYACAAAACEAbU1ZqyEGAACOGgAAFQAAAHdvcmQvdGhlbWUvdGhlbWUxLnhtbOxZy47bNhTdF+g/ENo7lm3Jj0E8gS3bSZuZJMg4abOkJVpihhINkpoZIwjQLyhQIC26KdBdC3QToP2D/kuKNv2IUpRlkzbdQToOEBSxAYuPcy8P7yUPJev2nauUgAvEOKZZ32ncch2AspBGOIv7zpPppNZ1ABcwiyChGeo7S8SdO8effnIbHokEpQhI+4wfwb6TCLE4qtd5KJshv0UXKJN9c8pSKGSVxfWIwUvpNyX1puu26ynEmQMymEq30+T3n6Wzh/M5DpFzXHkfE/mTCV40hISdFb7RymSYMwRzhY3OG8WFL3lAGLiApO/IgSJ6OUVXwgEEciE7+o6rPk79+HZ9bUTEHlvNbqI+K7uVQXTeVHYsnq0NPc/32oO1fwUgYhc37ozb4/banwLAMJQzLbnoWH/YG478FVYDlUWL71Fn1GoYeM1/awc/8IuvgVegsujt4CeTYBNDDVQWfUtMOs3AM/AKVBbbO/iOOxh5HQOvQAnB2fkO2vXbraCa7Royp+SeFd7zvUmnuYJvUHVtdZX2mdi31lL4nLKJBKjkQoEzIJYLNIehxAWQ4BnD4ATHiVx4C5hRLpvdpjtxW/K3+HqqpCICjxDUrMumkO80FXwADxleiL7zufTqaJBnObhLRYLD1ai7FvdgFusWb3/65u8fvgJ//frj21ff2vFcx49QFn+JYfZvAwjd4M13r//47fWb77/+85dXFviAwZkOn+IUcfAAXYLHNJWTswyAZuzdLKYJxLrFIIs5zGBhY0GPZfx09IMlJNCCGyIzkk+ZlAob8G7+3CB8lrBcYAvwfpIawFNKyZAy65zuF2PpUciz2D44y3XcYwgvbGMHW3ke5wu55rHNZZAgg+YjIlMOY5QhAYo+eo6QxewZxkZcT3HIKKdzAZ5hMITYGpIpnhmraWN0D6cyL0sbQZlvIzanT8GQEpv7EbowkXJ3QGJziYgRxrswFzC1MoYp0ZEnUCQ2kmdLFhoB50JmOkaEgnGEOLfZPGRLg+59KTH2tJ+SZWoimcDnNuQJpNTY4PQ8SGC6sHLGWaJjP+PncolC8IgKKwlq7pCiLvMgxWNfup9iZKT7+r39RMqQfYEUPTmzbQlEzf24JHOIlPP6lqanOLtW4Lek3X9/0n6KszChds09iKjboTeR8wHD1v20LeL7cNvSHVAW4Q9fuUcwzx4huVks0I/C/VG4//fCvW8/H16uNwqtbuOrm3XlJt175z7HhJyJJUEnXGk7l9OLJrJRVZTR+kFhkcjiajgDFzOoyoBR8QUWyVkCF3KYhhoh5ivXMQcLyuXpoJqtvosOkqenNCpbG43q2VQaQLFpl6dL1S7PIlG2tjubh7C1e1WL1cNyRaCwfRcS2mAmiZaFRKdqvIaEmtlBWPQsLLqF+70s1GWVFbn/ACz+1/C9kpFcb5CgqMhTaV9l9+CZ3hdMc9pNy/R6BdfDZNogoS03k4S2DBMYoe3mA+e6t0mpQa8IxS6NTvd95LoQkS1tIJlZA5dyz7V86SaEi74zl/eFspgupD9e6CYkcdZ3QrEK9H9RlgXjYgR5UsJUVzn/FAvEAMGpXOt6Gki24dZodoo5fqDkeu6HFzl10ZOM5nMUij0tm6rsK51Ye28ILio0l6TPkugSzEjOHkMZKL/TKAIYYS7W0Yww0xb3JopbcrXaisZ/ZpstCskigasTRRfzEq7KazraPBTT7VmZ9dVkZnGRpBufutcbFR2aaO45QIpT064f7++Q11htdN9gVUr3ttb1Kq3bd0rc/EDQqG0GM6gVjC3UNq0mtQPeEGjDrZfmvjPi0KfB9qotDojqvlLVdl5O0NlzufJH8nY1J4IrquhKPiME1d/KpRKo1kpdrgTIGe47L1x/4AVNP6i5XX9c81qeW+v6g1Zt4PutxthvuKNh86UMikjShl+OPZHPM2S5evmi2ndewKTVbfatkKZ1qt6s1JWxegHTaBovYMo3L2Ba9DsAy8i8aDcnvVZv2K71WoNJzRsNu7Ve0B7WRu2gM5qMAr/bm7x0wIUCe4NW4LXH3Vq7EQQ1r+0W9Lu9WsdrNgdeZ9Ade4OXq1jLmVfXKryK1/E/AAAA//8DAFBLAwQKAAAAAAAAACEAvOgH/fQnAAD0JwAAFwAAAGRvY1Byb3BzL3RodW1ibmFpbC5qcGVn/9j/4AAQSkZJRgABAQAASABIAAD/4QCARXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUAAAABAAAARgEoAAMAAAABAAIAAIdpAAQAAAABAAAATgAAAAAAAABIAAAAAQAAAEgAAAABAAOgAQADAAAAAQABAACgAgAEAAAAAQAAAWmgAwAEAAAAAQAAAgAAAAAA/+0AOFBob3Rvc2hvcCAzLjAAOEJJTQQEAAAAAAAAOEJJTQQlAAAAAAAQ1B2M2Y8AsgTpgAmY7PhCfv/AABEIAgABaQMBEQACEQEDEQH/xAAfAAABBQEBAQEBAQAAAAAAAAAAAQIDBAUGBwgJCgv/xAC1EAACAQMDAgQDBQUEBAAAAX0BAgMABBEFEiExQQYTUWEHInEUMoGRoQgjQrHBFVLR8CQzYnKCCQoWFxgZGiUmJygpKjQ1Njc4OTpDREVGR0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoOEhYaHiImKkpOUlZaXmJmaoqOkpaanqKmqsrO0tba3uLm6wsPExcbHyMnK0tPU1dbX2Nna4eLj5OXm5+jp6vHy8/T19vf4+fr/xAAfAQADAQEBAQEBAQEBAAAAAAAAAQIDBAUGBwgJCgv/xAC1EQACAQIEBAMEBwUEBAABAncAAQIDEQQFITEGEkFRB2FxEyIygQgUQpGhscEJIzNS8BVictEKFiQ04SXxFxgZGiYnKCkqNTY3ODk6Q0RFRkdISUpTVFVWV1hZWmNkZWZnaGlqc3R1dnd4eXqCg4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS09TV1tfY2dri4+Tl5ufo6ery8/T19vf4+fr/2wBDAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/2wBDAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQH/3QAEAC7/2gAMAwEAAhEDEQA/AP7Yfgx8GPg9N8HvhRLL8KPhrLLL8NfAskkkngTws8kkj+F9LZ3d200s7uxLMzHczEk5JNAHpX/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQAf8KU+Df/AESX4Zf+EF4V/wDlbQAf8KU+Df8A0SX4Zf8AhBeFf/lbQAf8KU+Df/RJfhl/4QXhX/5W0AH/AApT4N/9El+GX/hBeFf/AJW0AH/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQAf8KU+Df/AESX4Zf+EF4V/wDlbQAf8KU+Df8A0SX4Zf8AhBeFf/lbQAf8KU+Df/RJfhl/4QXhX/5W0AH/AApT4N/9El+GX/hBeFf/AJW0AH/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQAf8KU+Df/AESX4Zf+EF4V/wDlbQAf8KU+Df8A0SX4Zf8AhBeFf/lbQAf8KU+Df/RJfhl/4QXhX/5W0AH/AApT4N/9El+GX/hBeFf/AJW0AH/ClPg3/wBEl+GX/hBeFf8A5W0AH/ClPg3/ANEl+GX/AIQXhX/5W0AH/ClPg3/0SX4Zf+EF4V/+VtAB/wAKU+Df/RJfhl/4QXhX/wCVtAB/wpT4N/8ARJfhl/4QXhX/AOVtAB/wpT4N/wDRJfhl/wCEF4V/+VtAB/wpT4N/9El+GX/hBeFf/lbQAf8AClPg3/0SX4Zf+EF4V/8AlbQB/Nd/wrT4c/8ARP8AwT/4Sug//INAH//Q/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9H+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/T/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9T+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAeDfEX4/+Hvhf8QfD3gbxN4W8Vx6ZrfgHxz8TNQ+JS33w9sPh34O8G/DOXRk8fav4uvfEHj3RPE1jD4Xt/Evhu/v307wrq0M1hrUU+ny3Z03Xk0oAxf8Ahrr9n+W68P2WneN7nXLrxJ4x0D4f2sHh7wj411x9L8aeI9T13RrPwx4qGmeHrr/hD9esNV8Na1YeJNG8Uto+q+Eriz2+KLLRxNbvKAXNd/am+C2h+MbHwIPFDa34kn8V6n4Q1Ox8OWU+sSaBf6R8P/iz8Q7+7v4YALrVNPt7T4K+PPCs58JW/ia/tfH2nf8ACHXun22rW2qRaWAZN7+2L+z3bRaLNY+N5/ECa/4p0XwRYyeG/DPinWLW38XeIPh9r/xR07w7rupW+jHSvC2qQ+B/Dt5r2vweJ77SB4Ns7zRZvGjeHoNc0qa6ANXSP2r/ANn7WUia2+JWhxg6X4r1q7mlNxLpWl6R4Asbe7+IWsaj4lsobvwrD4f+H91dQeHvGvimLXZ/C3hzxhJH4O1LWovE8sWkOANvf2s/2eNN06x1bUviZpmnadqdvey6fdahpXiSyS7v9N8cH4a6l4YgW50aGR/HenePlfwnf/D0L/wnNlrUctnc+HonikKgGaP2wfgC/wARfCHwzh8arNrHje68eaVoWsLYXkXhaTxJ8O/Gfw8+Hmu+GJtcuY4Ihqs/jj4m+HPB+l3MME+g3njFbzwS+tW/jP8As/QL8A+nKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoA/mXoA//1f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQB5b8Sfgr8MPi/bahZ/EfwrB4otNV+HPxJ+Emo2t1qGsWltefDz4vW2gWfxF8OTw6ZqNlG0XiO18MaJDJf7Rq2mrZE6Pf6c11etcAHk2k/sUfs26Fq/hbXtJ8DavYaz4JstA07wrqNt8R/ifHc6LY+G/Gcfj/TbS0ZfGKqLZ/FKTX2p28ivBrFpqGs6LqsV3omu65p2oAEGtfsOfsueILvx9fal8MAbj4neIfEXivxo1j4y8f6Qmp+IvF3hL4ieCPE+rW0Ok+KLKDRbjXfDvxY+IUOprocenQXWpeJbrxBJEfEFtp+p2gBQ0H9gr9lfwv8AYz4f+HWraW2nW/hSx06S2+J3xZ86w0/wXbeLLLQ9MtLiTx01xBpf2Dx54107WdMSQWHiTT/FWvWXiKDVLbVLtJQB17+wd+y1qWhWfhXU/h9rOqeEdPvfH9/p3g7VPij8W9S8H6dc/FLRPEnh/wCIT6f4WvvHU2g2K+LNM8Y+K01aO10+FJbrxFrGoIqX17PcMAV9M/YB/ZM0jxR4H8Z2PwtuF8S/Di38NWvgzUZ/iF8T7saNF4R+Is3xY0DFjc+M5tNv5LP4gTvr8s2p2l5JfYTS79rnRkTT6AO9sf2UfgVp97a30PhPWJ5dP8S3vizR4NR+IPxI1XTvD+rah8V/AHxwuYvDek6n4vvNL8OaGPin8MPBPiu28LaJZ2Hhmxk0iXSbHSbfQNX1rS9SAPoqgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKACgAoAKAP5l6AP/W/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9f+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/R/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9L+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/U/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9X+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/X/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9D+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/S/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9P+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/V/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9b+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/Q/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9H+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/T/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9T+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/W/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9f+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/R/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9L+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/U/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9X+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/X/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9D+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/S/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9P+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/V/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9b+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/Q/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9H+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0v7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/T/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9T+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1f7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/W/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9f+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0P7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/R/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9L+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/0/7q/gp/yRr4Sf8AZMvAf/qK6VQB6bQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAfzL0Af/U/ur+Cn/JGvhJ/wBky8B/+orpVAHptABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQB/MvQB/9X+6v4Kf8ka+En/AGTLwH/6iulUAem0AFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFABQAUAFAH8y9AH/1v7Yfgx8Z/g9D8HvhRFL8V/hrFLF8NfAsckcnjvwskkcieF9LV0dG1IMjowKsrDcrAg4INAHpX/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQAf8Lr+Df/AEVr4Zf+F74V/wDllQAf8Lr+Df8A0Vr4Zf8Ahe+Ff/llQAf8Lr+Df/RWvhl/4XvhX/5ZUAH/AAuv4N/9Fa+GX/he+Ff/AJZUAH/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQAf8Lr+Df/AEVr4Zf+F74V/wDllQAf8Lr+Df8A0Vr4Zf8Ahe+Ff/llQAf8Lr+Df/RWvhl/4XvhX/5ZUAH/AAuv4N/9Fa+GX/he+Ff/AJZUAH/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQAf8Lr+Df/AEVr4Zf+F74V/wDllQAf8Lr+Df8A0Vr4Zf8Ahe+Ff/llQAf8Lr+Df/RWvhl/4XvhX/5ZUAH/AAuv4N/9Fa+GX/he+Ff/AJZUAH/C6/g3/wBFa+GX/he+Ff8A5ZUAH/C6/g3/ANFa+GX/AIXvhX/5ZUAH/C6/g3/0Vr4Zf+F74V/+WVAB/wALr+Df/RWvhl/4XvhX/wCWVAB/wuv4N/8ARWvhl/4XvhX/AOWVAB/wuv4N/wDRWvhl/wCF74V/+WVAB/wuv4N/9Fa+GX/he+Ff/llQAf8AC6/g3/0Vr4Zf+F74V/8AllQB/Nd/wsv4c/8ARQPBP/hVaD/8nUAf/9kAAFBLAwQUAAYACAAAACEAuN5y8JsDAACACQAAEQAAAHdvcmQvc2V0dGluZ3MueG1stFZLj9s2EL4X6H8wdK5Wj8iOV403sL1xs8E6WazcS2+URNnE8iEMKatO0f/eESWunGYRuA3ii8n55s1vxn7z9k/BJ0cKmim58KKr0JtQWaiSyf3C+3238efeRBsiS8KVpAvvRLX39ubnn960qabGoJqeoAupU1EsvIMxdRoEujhQQfSVqqlEsFIgiMEr7ANB4Kmp/UKJmhiWM87MKYjDcOYNbtTCa0CmgwtfsAKUVpXpTFJVVaygw5ezgEvi9ia3qmgElcZGDIByzEFJfWC1dt7E//WG4ME5OX6riKPgTq+NwgvKbRWUzxaXpNcZ1KAKqjU+kOAuQSbHwMlXjp5jX2HsoUTrCs2j0J7OM5/+NwfxvxxofkklPXTPciDQ82QoQxTp3V4qIDlHVmI5E8zIu0FaflZKTNq0plDg2yCnw9ALOgA7oqrMEEMR1jXl3JK84JSgwzbdAxFITyexNiWtSMPNjuSZUTUqHQnm/Tqc9/DhVB+otCT6A8fD4Uk87fHiQIAUhkJWkwKjrZU0oLjTK9VHZdY4CoAv1VtocqQPQI+Mtg+sMA3Q3pGdl/GU9bOHjiQR2IAv5mmrStoV1AC7/I06A5tU5HJ/MZDCXQGspLuu8Zk5cbrBmjL2mS5l+aHRhqFH25DvyOBbCWC7MfInpMruVNMNJV2P9A8KZh9ow1m9ZQAK7mSJlPphwVhVUcAADCm6RdYxUK3t83tKSlzR3xk3OKcRLvxSu8OjUsaphuF8Fs5vN32mHXoJslxGr5fJS8jqOpldW0oFz1FF2i3LB3CnjkIT0VusiciBkcm2W6dBp5HD04pJh+cU9wM9R7Imd6Dv94AWhPMNjp4D7AoQacl0fUsre+ZbAvvR76ABL0pxDXx49tWtFQq/gWrqHm2B1D01nEqUJIMlk+aeCSfXTZ45K4kb7QxqZPnpCLZPY3va1OAT2xG7J5YqVrcCf/M4UIlD1tGAbkld92zK99HC42x/MFFHAIO3En917SXfxwMWWyzuMXshRVcZag+HURY72ZneKyd7NcoSJ0tG2dTJpqNs5mSzToZLlALu4icktjt28kpxrlpavh/xr0RuSxcMXzw7iXxcrr/0GGcaJ63GPWwUOOxXi0VJWqriDsmKp/655+t3yTxa9vDU7m+zQx49YWsfabUimpYD5kynvelfm+4zj1f+MrqN/WQ2XfnzeP3OX23iZbReXs+m6/jvYQ7cX6ebfwAAAP//AwBQSwMEFAAGAAgAAAAhAPC8NQHcAQAA8QUAABIAAAB3b3JkL2ZvbnRUYWJsZS54bWy8k9tq4zAQhu8LfQej+8ay4vRg6pQ0bWBh6cXSfQBFkW2xOhhJiTdvvyPZcQMhbJallUHI/4x+jT40j0+/lUx23DphdImyCUYJ18xshK5L9PN9dXOPEuep3lBpNC/Rnjv0NL++euyKymjvEtivXaFYiRrv2yJNHWu4om5iWq4hWBmrqIdfW6eK2l/b9oYZ1VIv1kIKv08JxrdosLGXuJiqEoy/GLZVXPu4P7VcgqPRrhGtO7h1l7h1xm5aaxh3Du6sZO+nqNCjTZafGCnBrHGm8hO4zFBRtILtGY4rJT8MZv9mQEYDxYpvtTaWriXAh0oSMEPzgX7SFZoqCCypFGsrYqCl2jieQWxHZYkwwSs8gzl8OZ6GGaUhkTXUOh5M+kTcyxVVQu4PKt160+ut8Kw5yDtqRaipDzlRQ2Dr1rhErxgGWa1Qr2QlykFYLEeFhKPiyAZlOio4KCz69BkPcReLPmMOnJn2AE5AvAvFXfLGu+SHUVSfAULwLYCYAY4AZvr5QMji9QjIEpS7+/xw/Q8gD38H0mO8HMgCypJnMDwDhnx4GfF1fD6G43cxYJh+BYahQZLvom782TYJzfFFbbIIFZPjVxHahOC75xMc8fL/2SbDws3/AAAA//8DAFBLAwQUAAYACAAAACEA4IvKVR8BAAARAgAAFAAAAHdvcmQvd2ViU2V0dGluZ3MueG1slNFRS8MwEAfwd8HvUPK+pRs6tKwbgkz2MgbVD5Cl1zWY5EIua7dv71nnRHyZbzku9+P+3Hx5dDbrIJJBX4rJOBcZeI218ftSvL2uRg8io6R8rSx6KMUJSCwXtzfzvuhhV0FK/JMyVjwVTpeiTSkUUpJuwSkaYwDPzQajU4nLuJdOxfdDGGl0QSWzM9akk5zm+UycmXiNgk1jNDyjPjjwaZiXESyL6Kk1gb61/hqtx1iHiBqIOI+zX55Txl+Yyd0fyBkdkbBJYw5z3migeHySDy9nf4D7/wHTC+B0sd57jGpn+QS8ScaYWPANlLXYbzcv8rOocYOpUh08UcUpLKyMhaETzBEsbSGuvW6zvuiULcXjTHBT/jrk4gMAAP//AwBQSwMEFAAGAAgAAAAhABZNBGBtAQAA7wIAABEACAFkb2NQcm9wcy9jb3JlLnhtbCCiBAEooAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJySUW+CMBSF35fsP5C+Q4suxhDAZDM+zcRkLlv21rVX7YS2aavIv18BxbH5tLd7e757uJw2nZ3KIjiCsULJDMURQQFIpriQ2wy9rhfhFAXWUclpoSRkqAaLZvn9Xcp0wpSBlVEajBNgA+8kbcJ0hnbO6QRjy3ZQUht5Qnpxo0xJnW/NFmvK9nQLeETIBJfgKKeO4sYw1L0jOlty1lvqgylaA84wFFCCdBbHUYyvrANT2psDrfKDLIWrNdxEL2JPn6zowaqqomrcon7/GL8vn1/aXw2FbLJigPKUs8QJV0Ce4mvpK3v4/ALmuuO+8TUzQJ0y+ZweBQ9WovBdC12EJvI91JUy3PrxQecxDpYZoZ2/yM58cODpglq39De7EcAf61/f+as3IwaOonkZedwSfZueY+52Ax74eJIuzIvyNn6arxcoH5F4EpJxGJM1mSajh4SQj2a9wfzVsDwv8G/Hi0GX0PCJ5t8AAAD//wMAUEsDBBQABgAIAAAAIQCBlv05MgsAAGRyAAAPAAAAd29yZC9zdHlsZXMueG1svJ3bctu6FYbvO9N34OiqvXB8jJ14trPHduLaUzvbO3Kaa4iEJNQgofLgQ5++IEhJkBdBcQGrvrIlan0A8eMHsEBS+u33l1RGTzwvhMrORvsf9kYRz2KViGx2Nvr5cLXzaRQVJcsSJlXGz0avvBj9/uWvf/nt+bQoXyUvIg3IitM0PhvNy3JxurtbxHOesuKDWvBMH5yqPGWlfpnPdlOWP1aLnVilC1aKiZCifN092Ns7HrWYfAhFTaci5l9VXKU8K038bs6lJqqsmItFsaQ9D6E9qzxZ5CrmRaFPOpUNL2UiW2H2jwAoFXGuCjUtP+iTaWtkUDp8f8/8l8o14CMOcLACpPHpzSxTOZtI3fq6JpGGjb7o5k9U/JVPWSXLon6Z3+fty/aV+XOlsrKInk9ZEQvxoEvWkFRo3vV5VoiRPsJZUZ4XgnUenNf/dB6Ji9J6+0IkYrRbl1j8Vx98YvJsdHC0fOeyrsHGe5Jls+V703zn6oddk7MRz3Z+juu3Jpp7NmL5zvi8DtxtT6z5a53uYvWq+dSbttFdQ3eUcdNf9VE+vVXxI0/GpT5wNtqri9Jv/ry5z4XKdZ88G33+3L455qm4FknCM+uD2Vwk/NecZz8Lnqzf//PK9Kv2jVhVmf7/8NOe0UsWybeXmC/qXqqPZqxuve91gKw/XYl14Sb8P0vYfttmXfFzzmqrRvtvEab6KMRBHVFYZ9vNrN6cu/kUqqDD9yro6L0K+vheBR2/V0En71XQp/cqyGD+nwWJLOEvjRFhMYC6jeNwI5rjMBua4/ASmuOwCprjcAKa4+joaI6jH6M5jm6K4JQqdvVCq7MfOnp7P3f7HOHH3T4l+HG3zwB+3O0Dvh93+/jux90+nPtxt4/eftztgzWe2yy1ohtts6wMdtlUqTJTJY9K/hJOY5lmmfyFhldPejwnOUkCTDOytRNxMC1m5vX2HmJM6j+fl3XKFalpNBWzKtdpb2jFefbEpU5AI5YkmkcIzHlZ5Y4W8enTOZ/ynGcxp+zYdFApMh5lVToh6JsLNiNj8Swhbr4lkWRQWHVoVpXz2iSCoFOnLM5VeNUUIxsfbkUR3lY1JLqopORErO80XcywwnMDgwlPDQwmPDMwmPDEwNKMqolaGlFLtTSiBmtpRO3W9E+qdmtpRO3W0ojaraWFt9uDKKUZ4u1Vx/7wvbtLqeod5+B6jMUsY3oBED7dtHum0T3L2Sxni3lU7x93Y+1zxpZzoZLX6IFiTluRqNb1potc6rMWWRXeoBs0KnOteET2WvGIDLbihVvsTi+T6wXaNU0+M64mZadpDWmQacdMVs2CNtxtrAzvYWsDXIm8ILNBN5agB3+vl7O1nBQj37qW4RVbs8Jt9XZUIq1eiySopVTxI80wfP264LlOyx6DSVdKSvXMEzriuMxV09dsyx8YSQZZ/lu6mLNCmFxpAzF8ql9eq47u2CL4hO4lExmNbt92UiZkRLeCuH64u40e1KJOM+uGoQFeqLJUKRmz3Qn82y8++TtNBc91Epy9Ep3tOdH2kIFdCoJJpiGphIikl5kiEyRzqOH9k79OFMsTGtp9zpvbQ0pORByzdNEsOgi8pcfFZz3+EKyGDO9fLBf1vhCVqR5IYNa2YVFN/s3j8KHuu4pIdob+qEqz/2iWuiaaDhe+TNjAhS8RjJp6eqj7L8HJbuDCT3YDR3Wyl5IVhXBeQvXmUZ3ukkd9vuHJX8tTUuXTStI14BJI1oJLIFkTKlmlWUF5xoZHeMKGR32+hF3G8Ai25AzvH7lIyMQwMColDIxKBgOj0sDASAUIv0PHgoXfpmPBwu/VaWBESwALRtXPSKd/oqs8FoyqnxkYVT8zMKp+ZmBU/ezwa8SnU70IpptiLCRVn7OQdBNNVvJ0oXKWvxIhv0k+YwQbpA3tPlfT+rkBlTU3cRMg6z1qSbjYbnBUIv/iE7Kq1SzKehHsiDIplSLaW1tPOCZy8961bWHmmYvgKpjN9lv+xClW4xaM6DJAAwuXzYKFT1MWLHyasmDh05QFC5+mLFj4NGXBwu9fvpcs5nMlE547jNhXkWi8YHF7bQlcox60V38rZvMyGs9Xl6hszPHe1sjlLtNG2PYCuwaK44OesDueiCpdVhQ+AXR8ODzYGHojePmgVk/wevm7EflxYCQs83h75Dq124g8GRgJy/w0MNKMUhuRfYP4V5Y/dnaEk77+s9qYcHS+k75etAruLLavI60iu7rgSV8v2rBKdB7H9SUuqM4wz7jjh5nHHY9xkZuCsZObMthXbkSfwX7wJ1EvRzGDpilvdcvP2+IOzZQ6aOT8s1LNxaaNq6TDn0S80av9rOBRJ+dw+NXWjVHG3Y6Dhxs3YvC440YMHoDciEEjkTMcNSS5KYPHJjdi8CDlRqBHKzgj4EYrGI8brWC8z2gFKT6jVcAqwI0YvBxwI9BGhQi0UQNWCm4Eyqgg3MuokII2KkSgjQoRaKPCBRjOqDAeZ1QY72NUSPExKqSgjQoRaKNCBNqoEIE2KkSgjeq5tneGexkVUtBGhQi0USECbVSzXgwwKozHGRXG+xgVUnyMCiloo0IE2qgQgTYqRKCNChFoo0IEyqgg3MuokII2KkSgjQoRaKM2z8f6GxXG44wK432MCik+RoUUtFEhAm1UiEAbFSLQRoUItFEhAmVUEO5lVEhBGxUi0EaFCLRRzaWDAKPCeJxRYbyPUSHFx6iQgjYqRKCNChFoo0IE2qgQgTYqRKCMCsK9jAopaKNCBNqoENHXP9vr6q5nQ/bxu57Ox0yGX7pqK/XD/v4BG3U4HLWslZs1/AGaC6Ueo86nZQ9NvjEMIiZSKLNF7bgXxOaaC6Soq/V/XPY/lmbTA78prH2Ax1zoB/CjoZFgT+Wor8vbkSDJO+rr6XYkWHUe9Y2+diSYBo/6Bl3jy+WdVHo6AsF9w4wVvO8I7xutrXDYxH1jtBUIW7hvZLYCYQP3jcdW4MeoHpzfRn8c2E7Hq5uiAaGvO1qEEzehr1tCrZbDMTTGUNHchKHquQlDZXQTUHo6MXhh3Si0wm6Un9TQZlip/Y3qJmClhgQvqQHGX2qI8pYaovykhgMjVmpIwErtPzi7CV5SA4y/1BDlLTVE+UkNpzKs1JCAlRoSsFIHTshOjL/UEOUtNUT5SQ0Xd1ipIQErNSRgpYYEL6kBxl9qiPKWGqL8pAZZMlpqSMBKDQlYqSHBS2qA8Zcaorylhqg+qc0uyobUKIWtcNwizArETchWIG5wtgI9siUr2jNbsgie2RLUaqk5LluyRXMThqrnJgyV0U1A6enE4IV1o9AKu1F+UuOypS6p/Y3qJmClxmVLTqlx2VKv1LhsqVdqXLbklhqXLXVJjcuWuqT2H5zdBC+pcdlSr9S4bKlXaly25JYaly11SY3LlrqkxmVLXVIHTshOjL/UuGypV2pctuSWGpctdUmNy5a6pMZlS11S47Ilp9S4bKlXaly21Cs1LltyS43LlrqkxmVLXVLjsqUuqXHZklNqXLbUKzUuW+qV2pEt7T5v/GpYzTa/d6c/XL4ueP3F8dYDM0nzxbntRUDzwZtk9etedXBdk6j9xbP2bVPh9oJhU6IJhEXFc11W3H7ll6OoeyWFPm+WJ/pwCYp0fLOvqcL65JefbhtzfRG0+dzGBc/eGpd1Y/fU1ojBqt72aRRzVfFz2wW31VHXaCKbH8PT/9xkiQY8t7+w1tQ1eWENSh+/5FLesebTauH+qOTTsjm6v2cen31zfNJ8YaEzPjeDhBOwu1mZ5mX7w3eOFm9+wqC9eu1o9fMqrjIutRt4R5ub+ylCm3tdweV/xZf/AQAA//8DAFBLAwQUAAYACAAAACEAQP7QLGkBAAC3AgAAEAAIAWRvY1Byb3BzL2FwcC54bWwgogQBKKAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACcUk1LxTAQvAv+h9K7L32CH8i+iCjiQUV4Vc8h2bbBNAnJKr5/78ZqrXgzp92ZZHZmCZy/j656w5Rt8Jt6vWrqCr0Oxvp+Uz+21wendZVJeaNc8Lipd5jrc7m/Bw8pRExkMVcs4fOmHojimRBZDziqvGLaM9OFNCriNvUidJ3VeBX064iexGHTHAt8J/QGzUGcBetJ8eyN/itqgi7+8lO7i6wnocUxOkUo78tLtzKBRhAzCm0g5Vo7omwYnht4UD1muQYxFfAcksnyEMRUwOWgktLE+5PrExCLFi5idFYr4sXKO6tTyKGj6k5p6ynkoSoKIJa3gENsUb8mS7viY9nCrfWTk6lgZ0n1ScXhy97cwVYrh5ccX3bKZQTxAxSVl/wY23BVYn/xv8FFpmdLwzYqXQafLtMtCNgyioa9zuNmAG54/ckVeX7rezTfd/4SZV9P0z+U66NVw+dzO98YZ5w/iPwAAAD//wMAUEsBAi0AFAAGAAgAAAAhAG2KJ0tmAQAAVAUAABMAAAAAAAAAAAAAAAAAAAAAAFtDb250ZW50X1R5cGVzXS54bWxQSwECLQAUAAYACAAAACEAx8InvP8AAADfAgAACwAAAAAAAAAAAAAAAACfAwAAX3JlbHMvLnJlbHNQSwECLQAUAAYACAAAACEAE6o+h/YAAAAxAwAAHAAAAAAAAAAAAAAAAADPBgAAd29yZC9fcmVscy9kb2N1bWVudC54bWwucmVsc1BLAQItABQABgAIAAAAIQD1Yo5gZQIAAA4HAAARAAAAAAAAAAAAAAAAAAcJAAB3b3JkL2RvY3VtZW50LnhtbFBLAQItABQABgAIAAAAIQBtTVmrIQYAAI4aAAAVAAAAAAAAAAAAAAAAAJsLAAB3b3JkL3RoZW1lL3RoZW1lMS54bWxQSwECLQAKAAAAAAAAACEAvOgH/fQnAAD0JwAAFwAAAAAAAAAAAAAAAADvEQAAZG9jUHJvcHMvdGh1bWJuYWlsLmpwZWdQSwECLQAUAAYACAAAACEAuN5y8JsDAACACQAAEQAAAAAAAAAAAAAAAAAYOgAAd29yZC9zZXR0aW5ncy54bWxQSwECLQAUAAYACAAAACEA8Lw1AdwBAADxBQAAEgAAAAAAAAAAAAAAAADiPQAAd29yZC9mb250VGFibGUueG1sUEsBAi0AFAAGAAgAAAAhAOCLylUfAQAAEQIAABQAAAAAAAAAAAAAAAAA7j8AAHdvcmQvd2ViU2V0dGluZ3MueG1sUEsBAi0AFAAGAAgAAAAhABZNBGBtAQAA7wIAABEAAAAAAAAAAAAAAAAAP0EAAGRvY1Byb3BzL2NvcmUueG1sUEsBAi0AFAAGAAgAAAAhAIGW/TkyCwAAZHIAAA8AAAAAAAAAAAAAAAAA40MAAHdvcmQvc3R5bGVzLnhtbFBLAQItABQABgAIAAAAIQBA/tAsaQEAALcCAAAQAAAAAAAAAAAAAAAAAEJPAABkb2NQcm9wcy9hcHAueG1sUEsFBgAAAAAMAAwABgMAAOFRAAAAAA==" } - do: get: index: test - id: 1 + id: "1" - length: { _source.attachment: 8 } - match: { _source.attachment.content: "Test elasticsearch" } - match: { _source.attachment.language: "et" } diff --git a/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml b/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml index a876a4babe328..6ef35999307c4 100644 --- a/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml +++ b/plugins/mapper-annotated-text/src/yamlRestTest/resources/rest-api-spec/test/mapper_annotatedtext/10_basic.yml @@ -91,13 +91,13 @@ - do: index: index: annotated - id: 1 + id: "1" body: "my_field" : "[A](~MARK0&~MARK0) [B](~MARK1)" - do: index: index: annotated - id: 2 + id: "2" body: "my_field" : "[A](~MARK0) [C](~MARK2)" refresh: true @@ -157,13 +157,13 @@ - do: index: index: annotated - id: 1 + id: "1" body: "my_field" : "[Jeff Beck](Beck) plays a strat" - do: index: index: annotated - id: 2 + id: "2" body: "my_field" : "[Kimchy](Beck) plays a strat" refresh: true @@ -190,19 +190,19 @@ - do: index: index: annotated - id: 1 + id: "1" body: "my_field" : "[Apple](Apple+Inc) launched the iphone 12" - do: index: index: annotated - id: 2 + id: "2" body: "my_field" : "[They](Apple+Inc) make iphone accessories" - do: index: index: annotated - id: 3 + id: "3" body: "my_field" : "[Apple](Apple+Inc) have a new iphone coming" refresh: true diff --git a/plugins/mapper-murmur3/src/yamlRestTest/resources/rest-api-spec/test/mapper_murmur3/10_basic.yml b/plugins/mapper-murmur3/src/yamlRestTest/resources/rest-api-spec/test/mapper_murmur3/10_basic.yml index 3ed6e6a97c2c2..f82de6fc21110 100644 --- a/plugins/mapper-murmur3/src/yamlRestTest/resources/rest-api-spec/test/mapper_murmur3/10_basic.yml +++ b/plugins/mapper-murmur3/src/yamlRestTest/resources/rest-api-spec/test/mapper_murmur3/10_basic.yml @@ -12,7 +12,7 @@ setup: - do: index: index: test - id: 0 + id: "0" body: { "foo": null } - do: @@ -30,25 +30,25 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "foo": "bar" } - do: index: index: test - id: 2 + id: "2" body: { "foo": "baz" } - do: index: index: test - id: 3 + id: "3" body: { "foo": "quux" } - do: index: index: test - id: 4 + id: "4" body: { "foo": "bar" } - do: @@ -67,7 +67,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "foo": "foo" } - do: diff --git a/plugins/mapper-size/src/yamlRestTest/resources/rest-api-spec/test/mapper_size/10_basic.yml b/plugins/mapper-size/src/yamlRestTest/resources/rest-api-spec/test/mapper_size/10_basic.yml index bc33818e10e62..434368ed2f5b2 100644 --- a/plugins/mapper-size/src/yamlRestTest/resources/rest-api-spec/test/mapper_size/10_basic.yml +++ b/plugins/mapper-size/src/yamlRestTest/resources/rest-api-spec/test/mapper_size/10_basic.yml @@ -14,7 +14,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "foo": "bar" } - do: @@ -23,7 +23,7 @@ - do: get: index: test - id: 1 + id: "1" stored_fields: "_size" - gt: { _size: 0 } diff --git a/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml b/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml index fbbdcb8f153e0..0d91a931519ed 100644 --- a/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml +++ b/plugins/store-smb/src/yamlRestTest/resources/rest-api-spec/test/store_smb/15_index_creation.yml @@ -10,13 +10,13 @@ - do: index: index: smb-test - id: 1 + id: "1" body: { foo: bar } - do: get: index: smb-test - id: 1 + id: "1" - match: { _index: smb-test } - match: { _id: "1"} diff --git a/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml b/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml index 7a0cdcbef0786..ed3c5f6f9228c 100644 --- a/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml +++ b/qa/smoke-test-ingest-disabled/src/test/resources/rest-api-spec/test/ingest_mustache/10_ingest_disabled.yml @@ -73,7 +73,7 @@ catch: /There are no ingest nodes in this cluster, unable to forward request to an ingest node./ index: index: test - id: 1 + id: "1" pipeline: "my_pipeline_1" body: { field1: "1", diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml index e6a2a3d52e116..a8f7e1e5877c8 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/10_pipeline_with_mustache_templates.yml @@ -30,14 +30,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline_1" body: {} - do: get: index: test - id: 1 + id: "1" - length: { _source: 2 } - match: { _source.index_type_id: "test/1" } - match: { _source.metadata: ["test", "1"] } @@ -108,7 +108,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline_1" body: { metadata: "0", @@ -120,7 +120,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 5 } - match: { _source.field1: "1" } - match: { _source.field2: "2" } @@ -131,7 +131,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline_2" body: { field1: "field2" @@ -140,7 +140,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 2 } - match: { _source.field1: "field2" } - match: { _source.field2: "value" } @@ -148,7 +148,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline_3" body: { field_to_remove: "field2", @@ -158,7 +158,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 1 } - match: { _source.field_to_remove: "field2" } @@ -196,7 +196,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_handled_pipeline" body: { do_nothing: "foo", @@ -205,7 +205,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 2 } - match: { _source.do_nothing: "foo" } - match: { _source.error: "processor first_processor [remove]: field [field_to_remove] not present as part of path [field_to_remove]" } @@ -236,7 +236,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "_id" body: { values_flat : [], @@ -257,7 +257,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 2 } - match: { _source.values_flat: ["foo_bar", "foo_baz"] } diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml index 14a1c71bed52d..9a7444c4ffc6c 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/20_combine_processors.yml @@ -43,7 +43,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "_id" body: { log: "89.160.20.128 - - [08/Sep/2014:02:54:42 +0000] \"GET /presentations/logstash-scale11x/images/ahhh___rage_face_by_samusmmx-d5g5zap.png HTTP/1.1\" 200 175208 \"http://mobile.rivals.com/board_posts.asp?SID=880&mid=198829575&fid=2208&tid=198829575&Team=&TeamId=&SiteId=\" \"Mozilla/5.0 (Linux; Android 4.2.2; VS980 4G Build/JDQ39B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.135 Mobile Safari/537.36\"" @@ -52,7 +52,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 13 } - match: { _source.request: "/presentations/logstash-scale11x/images/ahhh___rage_face_by_samusmmx-d5g5zap.png" } - match: { _source.agent: "\"Mozilla/5.0 (Linux; Android 4.2.2; VS980 4G Build/JDQ39B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.135 Mobile Safari/537.36\"" } @@ -101,7 +101,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "_id" body: { log: "89.160.20.128 - - [08/Sep/2014:02:54:42 +0000] \"GET /presentations/logstash-scale11x/images/ahhh___rage_face_by_samusmmx-d5g5zap.png HTTP/1.1\" 200 175208 \"http://mobile.rivals.com/board_posts.asp?SID=880&mid=198829575&fid=2208&tid=198829575&Team=&TeamId=&SiteId=\" \"Mozilla/5.0 (Linux; Android 4.2.2; VS980 4G Build/JDQ39B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.135 Mobile Safari/537.36\"" @@ -110,7 +110,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 7 } - match: { _source.url.original: "/presentations/logstash-scale11x/images/ahhh___rage_face_by_samusmmx-d5g5zap.png" } - match: { _source.user_agent.original: "Mozilla/5.0 (Linux; Android 4.2.2; VS980 4G Build/JDQ39B) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/33.0.1750.135 Mobile Safari/537.36" } @@ -187,7 +187,7 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "_id" body: { "age" : 33, @@ -227,7 +227,7 @@ - do: get: index: test - id: 1 + id: "1" - length: { _source: 11 } - is_false: _source.friends.0.id - is_false: _source.friends.1.id diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml index 5ba68cb932a17..255918261a896 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/30_update_by_query_with_ingest.yml @@ -18,7 +18,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_reindex_with_ingest.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_reindex_with_ingest.yml index 61f290f91bc42..3fca85dbe1eeb 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_reindex_with_ingest.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/40_reindex_with_ingest.yml @@ -18,7 +18,7 @@ - do: index: index: twitter - id: 1 + id: "1" body: { "user": "kimchy" } - do: indices.refresh: {} diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml index eaf6b24030a06..6b7114d572ac2 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/50_script_processor_using_painless.yml @@ -22,14 +22,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { bytes_in: 1234, bytes_out: 4321 } - do: get: index: test - id: 1 + id: "1" - match: { _source.bytes_in: 1234 } - match: { _source.bytes_out: 4321 } - match: { _source.bytes_total: 55550 } @@ -70,14 +70,14 @@ - do: index: index: test - id: 1 + id: "1" pipeline: "my_pipeline" body: { bytes_in: 1234, bytes_out: 4321 } - do: get: index: test - id: 1 + id: "1" - match: { _source.bytes_in: 1234 } - match: { _source.bytes_out: 4321 } - match: { _source.bytes_total: 5555 } diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml index 0f514f2213492..90c5f5f9a837a 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/resources/rest-api-spec/test/ingest/60_pipeline_timestamp_date_mapping.yml @@ -31,7 +31,7 @@ - do: index: index: timetest - id: 1 + id: "1" pipeline: "my_timely_pipeline" body: {} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/80_cas.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/80_cas.yml index 87d3d237d42cb..6cec656e543ed 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/80_cas.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/bulk/80_cas.yml @@ -3,7 +3,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _version: 1} - set: { _seq_no: seqno } @@ -14,7 +14,7 @@ body: - index: _index: test_1 - _id: 1 + _id: "1" if_seq_no: 10000 if_primary_term: $primary_term - foo: bar2 @@ -28,7 +28,7 @@ body: - index: _index: test_1 - _id: 1 + _id: "1" if_seq_no: $seqno if_primary_term: $primary_term - foo: bar2 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.count/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.count/10_basic.yml index 7a6a29032cf74..9cbec3e33e589 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.count/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.count/10_basic.yml @@ -24,7 +24,7 @@ - do: index: index: index1 - id: 1 + id: "1" body: { foo: bar } refresh: true @@ -39,7 +39,7 @@ - do: index: index: index2 - id: 1 + id: "1" body: { foo: bar } refresh: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.recovery/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.recovery/10_basic.yml index c00837e47f81c..a8c0808782272 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.recovery/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.recovery/10_basic.yml @@ -14,7 +14,7 @@ - do: index: index: index1 - id: 1 + id: "1" body: { foo: bar } refresh: true - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/10_basic.yml index a8d40276c9800..22b0eb48c8877 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/10_basic.yml @@ -5,7 +5,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/20_query_string.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/20_query_string.yml index 66b0699a184d2..7b109cd104a10 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/20_query_string.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/count/20_query_string.yml @@ -12,7 +12,7 @@ - do: index: index: test - id: 1 + id: "1" body: { field: foo bar} - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/10_with_id.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/10_with_id.yml index f69e3600a43d3..f114805207787 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/10_with_id.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/10_with_id.yml @@ -4,7 +4,7 @@ - do: create: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _index: test_1 } @@ -14,7 +14,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _index: test_1 } - match: { _id: "1"} @@ -25,5 +25,5 @@ catch: conflict create: index: test_1 - id: 1 + id: "1" body: { foo: bar } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/35_external_version.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/35_external_version.yml index 86d0d4b59e06b..65f81b3247bd1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/35_external_version.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/35_external_version.yml @@ -5,7 +5,7 @@ catch: bad_request create: index: test - id: 1 + id: "1" body: { foo: bar } version_type: external version: 0 @@ -18,7 +18,7 @@ catch: bad_request create: index: test - id: 2 + id: "2" body: { foo: bar } version_type: external version: 5 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/40_routing.yml index 6fb845f4fa869..5731da79a04fd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/40_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/40_routing.yml @@ -18,14 +18,14 @@ - do: create: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } - do: get: index: test_1 - id: 1 + id: "1" routing: "5" stored_fields: [_routing] @@ -36,5 +36,5 @@ catch: missing get: index: test_1 - id: 1 + id: "1" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/60_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/60_refresh.yml index d9c4ab4602d1c..2659b4650f14f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/60_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/60_refresh.yml @@ -13,7 +13,7 @@ - do: create: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: @@ -21,14 +21,14 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 1 }} + query: { term: { _id: "1" }} - match: { hits.total: 0 } - do: create: index: test_1 - id: 2 + id: "2" refresh: true body: { foo: bar } - is_true: forced_refresh @@ -38,7 +38,7 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 2 }} + query: { term: { _id: "2" }} - match: { hits.total: 1 } @@ -48,7 +48,7 @@ - do: create: index: test_1 - id: 1 + id: "1" refresh: "" body: { foo: bar } - is_true: forced_refresh @@ -58,7 +58,7 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 1 }} + query: { term: { _id: "1" }} - match: { hits.total: 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/70_nested.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/70_nested.yml index cc9a82cbcbc9e..858ba8840c83f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/70_nested.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/create/70_nested.yml @@ -18,7 +18,7 @@ setup: - do: create: index: test_1 - id: 1 + id: "1" body: "nested1" : [ { "foo": "bar" }, { "foo": "bar2" } ] - match: { _version: 1} @@ -30,6 +30,6 @@ setup: catch: /The number of nested documents has exceeded the allowed limit of \[2\]. This limit can be set by changing the \[index.mapping.nested_objects.limit\] index level setting\./ create: index: test_1 - id: 1 + id: "1" body: "nested1" : [ { "foo": "bar" }, { "foo": "bar2" }, { "foo": "bar3" } ] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/10_basic.yml index f58f1435046fc..d656aff035916 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/10_basic.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _version: 1 } @@ -14,6 +14,6 @@ - do: delete: index: test_1 - id: 1 + id: "1" - match: { _version: 2 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/11_shard_header.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/11_shard_header.yml index fea1779b99d21..4f7493c42f61e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/11_shard_header.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/11_shard_header.yml @@ -18,13 +18,13 @@ - do: index: index: foobar - id: 1 + id: "1" body: { foo: bar } - do: delete: index: foobar - id: 1 + id: "1" - match: { _index: foobar } - match: { _id: "1"} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/12_result.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/12_result.yml index dba565179cded..081477532508c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/12_result.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/12_result.yml @@ -6,13 +6,13 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: delete: index: test_1 - id: 1 + id: "1" - match: { result: deleted } @@ -20,6 +20,6 @@ catch: missing delete: index: test_1 - id: 1 + id: "1" - match: { result: not_found } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/20_cas.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/20_cas.yml index a739e3f53cd44..04f009da788b6 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/20_cas.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/20_cas.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _seq_no: 0 } @@ -15,14 +15,14 @@ catch: conflict delete: index: test_1 - id: 1 + id: "1" if_seq_no: 2 if_primary_term: 1 - do: delete: index: test_1 - id: 1 + id: "1" if_seq_no: 0 if_primary_term: 1 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/25_external_version.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/25_external_version.yml index e076dbded6f0c..54e5df1f2ed8f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/25_external_version.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/25_external_version.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external version: 5 @@ -17,14 +17,14 @@ catch: conflict delete: index: test_1 - id: 1 + id: "1" version_type: external version: 4 - do: delete: index: test_1 - id: 1 + id: "1" version_type: external version: 6 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/26_external_gte_version.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/26_external_gte_version.yml index 03adef4a75fa9..e85c61a436bb0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/26_external_gte_version.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/26_external_gte_version.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external_gte version: 5 @@ -17,14 +17,14 @@ catch: conflict delete: index: test_1 - id: 1 + id: "1" version_type: external_gte version: 4 - do: delete: index: test_1 - id: 1 + id: "1" version_type: external_gte version: 6 @@ -33,7 +33,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external_gte version: 6 @@ -43,7 +43,7 @@ - do: delete: index: test_1 - id: 1 + id: "1" version_type: external_gte version: 6 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/30_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/30_routing.yml index 2bbafe5e04416..122f29a402646 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/30_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/30_routing.yml @@ -12,7 +12,7 @@ - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } @@ -20,12 +20,12 @@ catch: missing delete: index: test_1 - id: 1 + id: "1" routing: "4" - do: delete: index: test_1 - id: 1 + id: "1" routing: "5" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/50_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/50_refresh.yml index 3aea111f07a66..6577f9a6cebf0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/50_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/50_refresh.yml @@ -21,7 +21,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } refresh: true @@ -31,7 +31,7 @@ - do: index: index: test_1 - id: 3 + id: "3" body: { foo: bar } refresh: true - is_true: forced_refresh @@ -48,7 +48,7 @@ - do: delete: index: test_1 - id: 1 + id: "1" - do: search: @@ -62,7 +62,7 @@ - do: delete: index: test_1 - id: 3 + id: "3" refresh: true # If a replica shard where doc 1 is located gets initialized at this point, doc 1 @@ -86,7 +86,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } refresh: true - is_true: forced_refresh @@ -96,13 +96,13 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 1 }} + query: { term: { _id: "1" }} - match: { hits.total: 1 } - do: delete: index: test_1 - id: 1 + id: "1" refresh: "" - do: @@ -110,7 +110,7 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 1 }} + query: { term: { _id: "1" }} - match: { hits.total: 0 } --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/60_missing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/60_missing.yml index ae4e61c075ae8..991bd12bc989f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/60_missing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/delete/60_missing.yml @@ -7,7 +7,7 @@ catch: missing delete: index: test_1 - id: 1 + id: "1" --- "Missing document with ignore": @@ -17,5 +17,5 @@ - do: delete: index: test_1 - id: 1 + id: "1" ignore: 404 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/10_basic.yml index 141e2898bb21b..e1e366b0a4059 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/10_basic.yml @@ -5,14 +5,14 @@ - do: exists: index: test_1 - id: 1 + id: "1" - is_false: '' - do: index: index: test_1 - id: 1 + id: "1" body: { "foo": "bar" } - is_true: '' @@ -20,14 +20,14 @@ - do: exists: index: test_1 - id: 1 + id: "1" - is_true: '' - do: exists: index: test_1 - id: 1 + id: "1" version: 1 - is_true: '' diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/40_routing.yml index 039eeb87d1f0e..e23a71ae42301 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/40_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/40_routing.yml @@ -19,14 +19,14 @@ - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } - do: exists: index: test_1 - id: 1 + id: "1" routing: "5" - is_true: '' @@ -34,6 +34,6 @@ - do: exists: index: test_1 - id: 1 + id: "1" - is_false: '' diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/60_realtime_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/60_realtime_refresh.yml index 6aebaa78b8a0a..3e60cbbf4968f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/60_realtime_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/60_realtime_refresh.yml @@ -18,13 +18,13 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: exists: index: test_1 - id: 1 + id: "1" realtime: false - is_false: '' @@ -32,7 +32,7 @@ - do: exists: index: test_1 - id: 1 + id: "1" realtime: true - is_true: '' @@ -40,7 +40,7 @@ - do: exists: index: test_1 - id: 1 + id: "1" realtime: false refresh: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/70_defaults.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/70_defaults.yml index a042888d66d1c..771767abfcb95 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/70_defaults.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/exists/70_defaults.yml @@ -5,12 +5,12 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "foo": "bar" } - do: exists: index: test_1 - id: 1 + id: "1" - is_true: '' diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/20_source_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/20_source_filtering.yml index 1708034df8d4d..f9e185a6c2a59 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/20_source_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/20_source_filtering.yml @@ -5,40 +5,40 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } - do: indices.refresh: index: test_1 - do: - explain: { index: test_1, id: 1, _source: false, body: { query: { match_all: {}} } } + explain: { index: test_1, id: "1", _source: false, body: { query: { match_all: {}} } } - match: { _index: test_1 } - match: { _id: "1" } - is_false: get._source - do: - explain: { index: test_1, id: 1, _source: true, body: { query: { match_all: {}} } } + explain: { index: test_1, id: "1", _source: true, body: { query: { match_all: {}} } } - match: { get._source.include.field1: v1 } - do: - explain: { index: test_1, id: 1, _source: include.field1, body: { query: { match_all: {}} } } + explain: { index: test_1, id: "1", _source: include.field1, body: { query: { match_all: {}} } } - match: { get._source.include.field1: v1 } - is_false: get._source.include.field2 - do: - explain: { index: test_1, id: 1, _source_includes: include.field1, body: { query: { match_all: {}} } } + explain: { index: test_1, id: "1", _source_includes: include.field1, body: { query: { match_all: {}} } } - match: { get._source.include.field1: v1 } - is_false: get._source.include.field2 - do: - explain: { index: test_1, id: 1, _source_includes: "include.field1,include.field2", body: { query: { match_all: {}} } } + explain: { index: test_1, id: "1", _source_includes: "include.field1,include.field2", body: { query: { match_all: {}} } } - match: { get._source.include.field1: v1 } - match: { get._source.include.field2: v2 } - is_false: get._source.count - do: - explain: { index: test_1, id: 1, _source_includes: include, _source_excludes: "*.field2", body: { query: { match_all: {}} } } + explain: { index: test_1, id: "1", _source_includes: include, _source_excludes: "*.field2", body: { query: { match_all: {}} } } - match: { get._source.include.field1: v1 } - is_false: get._source.include.field2 - is_false: get._source.count diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/30_query_string.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/30_query_string.yml index 61321c05548ce..a78735641226c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/30_query_string.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/explain/30_query_string.yml @@ -14,7 +14,7 @@ - do: index: index: test - id: 1 + id: "1" body: { field: foo bar} - do: @@ -24,7 +24,7 @@ - do: explain: index: test - id: 1 + id: "1" q: bar df: field @@ -33,7 +33,7 @@ - do: explain: index: test - id: 1 + id: "1" q: field:foo field:xyz - is_true: matched @@ -41,7 +41,7 @@ - do: explain: index: test - id: 1 + id: "1" q: field:foo field:xyz default_operator: AND @@ -50,7 +50,7 @@ - do: explain: index: test - id: 1 + id: "1" q: field:BA* - is_true: matched @@ -58,7 +58,7 @@ - do: explain: index: test - id: 1 + id: "1" q: number:foo lenient: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/15_default_values.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/15_default_values.yml index 2717a365ff328..5908fa69478fe 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/15_default_values.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/15_default_values.yml @@ -5,13 +5,13 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "foo": "bar" } - do: get: index: test_1 - id: 1 + id: "1" - match: { _index: test_1 } - match: { _id: '1' } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/20_stored_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/20_stored_fields.yml index 69383a0b7b2b4..e67a167916eb4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/20_stored_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/20_stored_fields.yml @@ -17,12 +17,12 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "foo": "bar", "count": 1 } - do: get: index: test_1 - id: 1 + id: "1" stored_fields: foo - match: { _index: test_1 } @@ -33,7 +33,7 @@ - do: get: index: test_1 - id: 1 + id: "1" stored_fields: [foo, count] - match: { fields.foo: [bar] } @@ -43,7 +43,7 @@ - do: get: index: test_1 - id: 1 + id: "1" stored_fields: [foo, count, _source] - match: { fields.foo: [bar] } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/40_routing.yml index 78102c4411ca8..defd738b9c205 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/40_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/40_routing.yml @@ -20,14 +20,14 @@ - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } - do: get: index: test_1 - id: 1 + id: "1" routing: "5" stored_fields: [_routing] @@ -38,5 +38,5 @@ catch: missing get: index: test_1 - id: 1 + id: "1" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/50_with_headers.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/50_with_headers.yml index 9e143de5d1b63..c23119e895a2d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/50_with_headers.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/50_with_headers.yml @@ -5,7 +5,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "body": "foo" } - do: @@ -13,7 +13,7 @@ Accept: application/yaml get: index: test_1 - id: 1 + id: "1" - match: {_index: "test_1"} - match: {_id: "1"} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/60_realtime_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/60_realtime_refresh.yml index 7f35bcae063df..3b56471134b93 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/60_realtime_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/60_realtime_refresh.yml @@ -19,20 +19,20 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: catch: missing get: index: test_1 - id: 1 + id: "1" realtime: false - do: get: index: test_1 - id: 1 + id: "1" realtime: true - is_true: found @@ -40,7 +40,7 @@ - do: get: index: test_1 - id: 1 + id: "1" realtime: false refresh: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/70_source_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/70_source_filtering.yml index b154f020c5b60..60895d24a7061 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/70_source_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/70_source_filtering.yml @@ -15,37 +15,37 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } - do: - get: { index: test_1, id: 1, _source: false } + get: { index: test_1, id: "1", _source: false } - match: { _index: test_1 } - match: { _id: "1" } - is_false: _source - do: - get: { index: test_1, id: 1, _source: true } + get: { index: test_1, id: "1", _source: true } - match: { _source.include.field1: v1 } - do: - get: { index: test_1, id: 1, _source: include.field1 } + get: { index: test_1, id: "1", _source: include.field1 } - match: { _source.include.field1: v1 } - is_false: _source.include.field2 - do: - get: { index: test_1, id: 1, _source_includes: include.field1 } + get: { index: test_1, id: "1", _source_includes: include.field1 } - match: { _source.include.field1: v1 } - is_false: _source.include.field2 - do: - get: { index: test_1, id: 1, _source_includes: "include.field1,include.field2" } + get: { index: test_1, id: "1", _source_includes: "include.field1,include.field2" } - match: { _source.include.field1: v1 } - match: { _source.include.field2: v2 } - is_false: _source.count - do: - get: { index: test_1, id: 1, _source_includes: include, _source_excludes: "*.field2" } + get: { index: test_1, id: "1", _source_includes: include, _source_excludes: "*.field2" } - match: { _source.include.field1: v1 } - is_false: _source.include.field2 - is_false: _source.count @@ -54,7 +54,7 @@ - do: get: index: test_1 - id: 1 + id: "1" stored_fields: count _source: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/80_missing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/80_missing.yml index 1e60246f97941..e2a2413027ec9 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/80_missing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/80_missing.yml @@ -5,7 +5,7 @@ catch: missing get: index: test_1 - id: 1 + id: "1" --- "Missing document with ignore": @@ -13,5 +13,5 @@ - do: get: index: test_1 - id: 1 + id: "1" ignore: 404 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/90_versions.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/90_versions.yml index cafe6f86193f3..45d6bfe69cbce 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/90_versions.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get/90_versions.yml @@ -6,21 +6,21 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _version: 1} - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _version: 2} - do: get: index: test_1 - id: 1 + id: "1" version: 2 - match: { _id: "1" } @@ -28,13 +28,13 @@ catch: conflict get: index: test_1 - id: 1 + id: "1" version: 1 - do: get: index: test_1 - id: 1 + id: "1" version: 2 version_type: external - match: { _id: "1" } @@ -43,7 +43,7 @@ catch: conflict get: index: test_1 - id: 1 + id: "1" version: 10 version_type: external @@ -51,14 +51,14 @@ catch: conflict get: index: test_1 - id: 1 + id: "1" version: 1 version_type: external - do: get: index: test_1 - id: 1 + id: "1" version: 2 version_type: external_gte - match: { _id: "1" } @@ -67,7 +67,7 @@ catch: conflict get: index: test_1 - id: 1 + id: "1" version: 10 version_type: external_gte @@ -75,7 +75,7 @@ catch: conflict get: index: test_1 - id: 1 + id: "1" version: 1 version_type: external_gte diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/10_basic.yml index 7318602bb66d7..2969f2eb65e85 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/10_basic.yml @@ -6,19 +6,19 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "foo": "bar" } - do: get_source: index: test_1 - id: 1 + id: "1" - match: { '': { foo: bar } } - do: get_source: index: test_1 - id: 1 + id: "1" - match: { '': { foo: bar } } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/15_default_values.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/15_default_values.yml index 0e53f92ce4eaa..8a1e453acb721 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/15_default_values.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/15_default_values.yml @@ -7,12 +7,12 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "foo": "bar" } - do: get_source: index: test_1 - id: 1 + id: "1" - match: { '': { foo: bar } } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/40_routing.yml index 74752f1d95f56..6046ac2abbe37 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/40_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/40_routing.yml @@ -21,14 +21,14 @@ - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } - do: get_source: index: test_1 - id: 1 + id: "1" routing: "5" - match: { '': {foo: bar}} @@ -37,4 +37,4 @@ catch: missing get_source: index: test_1 - id: 1 + id: "1" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/60_realtime_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/60_realtime_refresh.yml index 7891e7f84d92f..8820a8921ceec 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/60_realtime_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/60_realtime_refresh.yml @@ -18,20 +18,20 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: catch: missing get_source: index: test_1 - id: 1 + id: "1" realtime: false - do: get_source: index: test_1 - id: 1 + id: "1" realtime: true - match: { '': {foo: bar}} @@ -39,7 +39,7 @@ - do: get_source: index: test_1 - id: 1 + id: "1" realtime: false refresh: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/70_source_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/70_source_filtering.yml index 6570524630e5a..869b15dd1ab96 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/70_source_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/70_source_filtering.yml @@ -7,22 +7,22 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } - do: - get_source: { index: test_1, id: 1, _source_includes: include.field1 } + get_source: { index: test_1, id: "1", _source_includes: include.field1 } - match: { include.field1: v1 } - is_false: include.field2 - do: - get_source: { index: test_1, id: 1, _source_includes: "include.field1,include.field2" } + get_source: { index: test_1, id: "1", _source_includes: "include.field1,include.field2" } - match: { include.field1: v1 } - match: { include.field2: v2 } - is_false: count - do: - get_source: { index: test_1, id: 1, _source_includes: include, _source_excludes: "*.field2" } + get_source: { index: test_1, id: "1", _source_includes: include, _source_excludes: "*.field2" } - match: { include.field1: v1 } - is_false: include.field2 - is_false: count diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/80_missing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/80_missing.yml index d7d2975790606..661bdadd2b464 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/80_missing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/80_missing.yml @@ -8,7 +8,7 @@ catch: missing get_source: index: test_1 - id: 1 + id: "1" --- "Missing document with ignore": @@ -19,5 +19,5 @@ - do: get_source: index: test_1 - id: 1 + id: "1" ignore: 404 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/85_source_missing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/85_source_missing.yml index 2ec0585b0f7bc..dfbc4e0ea1fe7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/85_source_missing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/get_source/85_source_missing.yml @@ -13,7 +13,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } @@ -24,7 +24,7 @@ setup: catch: missing get_source: index: test_1 - id: 1 + id: "1" --- "Missing document source with ignore": @@ -32,5 +32,5 @@ setup: - do: get_source: index: test_1 - id: 1 + id: "1" ignore: 404 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/10_with_id.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/10_with_id.yml index 06d9eda9b2732..e48bd63079f17 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/10_with_id.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/10_with_id.yml @@ -6,7 +6,7 @@ - do: index: index: test-weird-index-中文 - id: 1 + id: "1" body: { foo: bar } - match: { _index: test-weird-index-中文 } @@ -16,7 +16,7 @@ - do: get: index: test-weird-index-中文 - id: 1 + id: "1" - match: { _index: test-weird-index-中文 } - match: { _id: "1"} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/12_result.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/12_result.yml index 626ff1f443d64..cce77e794b4ce 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/12_result.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/12_result.yml @@ -5,7 +5,7 @@ - do: index: index: test_index - id: 1 + id: "1" body: { foo: bar } - match: { result: created } @@ -13,7 +13,7 @@ - do: index: index: test_index - id: 1 + id: "1" body: { foo: bar } op_type: index diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/20_optype.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/20_optype.yml index aea2af3860365..4c2865458a7ed 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/20_optype.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/20_optype.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" op_type: create body: { foo: bar } @@ -14,14 +14,14 @@ catch: conflict index: index: test_1 - id: 1 + id: "1" op_type: create body: { foo: bar } - do: index: index: test_1 - id: 1 + id: "1" op_type: index body: { foo: bar } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/30_cas.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/30_cas.yml index 27534131782a5..41f1dfd369b51 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/30_cas.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/30_cas.yml @@ -3,7 +3,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _version: 1} - set: { _seq_no: seqno } @@ -12,7 +12,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _seq_no: $seqno } - match: { _primary_term: $primary_term } @@ -20,7 +20,7 @@ catch: conflict index: index: test_1 - id: 1 + id: "1" if_seq_no: 10000 if_primary_term: $primary_term body: { foo: bar2 } @@ -29,7 +29,7 @@ catch: conflict index: index: test_1 - id: 1 + id: "1" if_seq_no: $seqno if_primary_term: 1000 body: { foo: bar2 } @@ -37,7 +37,7 @@ - do: index: index: test_1 - id: 1 + id: "1" if_seq_no: $seqno if_primary_term: $primary_term body: { foo: bar2 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/35_external_version.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/35_external_version.yml index 857c9d3c39c92..3f00bd449dd51 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/35_external_version.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/35_external_version.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external version: 0 @@ -16,7 +16,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external version: 5 @@ -27,7 +27,7 @@ catch: conflict index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external version: 5 @@ -36,7 +36,7 @@ catch: conflict index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external version: 0 @@ -44,7 +44,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external version: 6 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/36_external_gte_version.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/36_external_gte_version.yml index 30a8cf453a7d1..eed80fea3b884 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/36_external_gte_version.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/36_external_gte_version.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external_gte version: 0 @@ -16,7 +16,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external_gte version: 5 @@ -27,7 +27,7 @@ catch: conflict index: index: test_1 - id: 1 + id: "1" body: { foo: bar } version_type: external_gte version: 0 @@ -35,7 +35,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar2 } version_type: external_gte version: 5 @@ -45,7 +45,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar2 } version_type: external_gte version: 6 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/40_routing.yml index dcf2224e5807d..3229701365a57 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/40_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/40_routing.yml @@ -19,14 +19,14 @@ - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } - do: get: index: test_1 - id: 1 + id: "1" routing: "5" stored_fields: [_routing] @@ -37,5 +37,5 @@ catch: missing get: index: test_1 - id: 1 + id: "1" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/60_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/60_refresh.yml index 8a76930ac266c..290e7910dd6de 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/60_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/index/60_refresh.yml @@ -14,7 +14,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: @@ -22,14 +22,14 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 1 }} + query: { term: { _id: "1" }} - match: { hits.total: 0 } - do: index: index: test_1 - id: 2 + id: "2" refresh: true body: { foo: bar } - is_true: forced_refresh @@ -39,7 +39,7 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 2 }} + query: { term: { _id: "2" }} - match: { hits.total: 1 } @@ -51,7 +51,7 @@ - do: index: index: test_1 - id: 1 + id: "1" refresh: "" body: { foo: bar } - is_true: forced_refresh @@ -61,7 +61,7 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 1 }} + query: { term: { _id: "1" }} - match: { hits.total: 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.flush/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.flush/10_basic.yml index 89b8236225c0a..6df2b6acf4e55 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.flush/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.flush/10_basic.yml @@ -18,7 +18,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "message": "a long message to make a periodic flush happen after this index operation" } - do: indices.stats: { index: test } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/10_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/10_index.yml index a00282e586f49..db23c4fad375f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/10_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/10_index.yml @@ -18,13 +18,13 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: { "foo": "bar" } - do: index: index: test2 - id: 1 + id: "1" body: { "foo": "baz" } --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/11_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/11_metric.yml index a8e8a3d0b0db5..ba8c61a4aa607 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/11_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/11_metric.yml @@ -4,13 +4,13 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: { "foo": "bar" } - do: index: index: test2 - id: 1 + id: "1" body: { "foo": "baz" } --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/12_level.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/12_level.yml index e9bd219a3e0ab..268c5d40a012b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/12_level.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/12_level.yml @@ -4,13 +4,13 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: { "foo": "bar" } - do: index: index: test2 - id: 1 + id: "1" body: { "foo": "baz" } --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/13_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/13_fields.yml index 42a11e467ccb3..f21103f8c70bf 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/13_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/13_fields.yml @@ -34,13 +34,13 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: { "bar": "bar", "baz": "baz" } - do: index: index: test1 - id: 2 + id: "2" body: { "bar": "foo", "baz": "foo" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/14_groups.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/14_groups.yml index daf55b38919b2..8905bcee2465a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/14_groups.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/14_groups.yml @@ -4,7 +4,7 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: { "bar": "bar", "baz": "baz" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/20_translog.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/20_translog.yml index 92506334a1f7b..49f57ac5df03f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/20_translog.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/20_translog.yml @@ -25,7 +25,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "foo": "bar" } - do: @@ -77,7 +77,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "foo": "bar" } - do: @@ -101,7 +101,7 @@ - do: index: index: test - id: 1 + id: "1" body: { "foo": "bar" } - do: indices.stats: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/30_segments.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/30_segments.yml index 1a105bc00e51b..72fab59cc6cd7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/30_segments.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/30_segments.yml @@ -27,7 +27,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "foo": "bar" } - do: @@ -73,7 +73,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "foo": "bar" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/40_updates_on_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/40_updates_on_refresh.yml index 73c58211c189e..0ee1bb78a95b4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/40_updates_on_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.stats/40_updates_on_refresh.yml @@ -29,13 +29,13 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: { "bar": "bar" } - do: index: index: test1 - id: 2 + id: "2" body: { "bar": "foo" } - do: @@ -54,7 +54,7 @@ setup: - do: index: index: test1 - id: 3 + id: "3" body: { "bar": "foo", "baz": "foo" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/10_basic.yml index dc14cac06227f..ea5b43435ad2c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/10_basic.yml @@ -8,7 +8,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: @@ -18,9 +18,9 @@ mget: body: docs: - - { _index: test_2, _id: 1} - - { _index: test_1, _id: 2} - - { _index: test_1, _id: 1} + - { _index: test_2, _id: "1"} + - { _index: test_1, _id: "2"} + - { _index: test_1, _id: "1"} - is_false: docs.0.found - match: { docs.0._index: test_2 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/12_non_existent_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/12_non_existent_index.yml index 49d75fbc739e2..5ac47ba38a466 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/12_non_existent_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/12_non_existent_index.yml @@ -5,14 +5,14 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: mget: body: docs: - - { _index: test_2, _id: 1} + - { _index: test_2, _id: "1"} - is_false: docs.0.found - match: { docs.0._index: test_2 } @@ -22,7 +22,7 @@ mget: body: docs: - - { _index: test_1, _id: 1} + - { _index: test_1, _id: "1"} - is_true: docs.0.found - match: { docs.0._index: test_1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/13_missing_metadata.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/13_missing_metadata.yml index 7ef1f11df1cae..dc6b5c639c2d2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/13_missing_metadata.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/13_missing_metadata.yml @@ -5,7 +5,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: @@ -20,7 +20,7 @@ mget: body: docs: - - { _id: 1 } + - { _id: "1" } - do: catch: /action_request_validation_exception.+ no documents to get/ @@ -37,7 +37,7 @@ mget: body: docs: - - { _index: test_1, _id: 1} + - { _index: test_1, _id: "1"} - is_true: docs.0.found - match: { docs.0._index: test_1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml index 825dc256d786a..8e0bd87905e0a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/14_alias_to_multiple_indices.yml @@ -29,8 +29,8 @@ mget: body: docs: - - { _index: test_1, _id: 1} - - { _index: test_two_and_three, _id: 2} + - { _index: test_1, _id: "1"} + - { _index: test_two_and_three, _id: "2"} - is_true: docs.0.found - match: { docs.0._index: test_1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/15_ids.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/15_ids.yml index cf8a79223dfba..e8e1bc3c64e19 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/15_ids.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/15_ids.yml @@ -9,13 +9,13 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: test_1 - id: 2 + id: "2" body: { foo: baz } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/17_default_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/17_default_index.yml index 8ff660cf4ce33..4af1a9a9c4c51 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/17_default_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/17_default_index.yml @@ -8,7 +8,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: @@ -16,9 +16,9 @@ index: test_1 body: docs: - - { _index: test_2, _id: 1} - - { _id: 2} - - { _id: 1} + - { _index: test_2, _id: "1"} + - { _id: "2"} + - { _id: "1"} - is_false: docs.0.found - match: { docs.0._index: test_2 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/20_stored_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/20_stored_fields.yml index 01548972f9604..b93ed5b731755 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/20_stored_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/20_stored_fields.yml @@ -18,7 +18,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: @@ -26,10 +26,10 @@ index: test_1 body: docs: - - { _id: 1 } - - { _id: 1, stored_fields: foo } - - { _id: 1, stored_fields: [foo] } - - { _id: 1, stored_fields: [foo, _source] } + - { _id: "1" } + - { _id: "1", stored_fields: foo } + - { _id: "1", stored_fields: [foo] } + - { _id: "1", stored_fields: [foo, _source] } - is_false: docs.0.fields - match: { docs.0._source: { foo: bar }} @@ -49,10 +49,10 @@ stored_fields: foo body: docs: - - { _id: 1 } - - { _id: 1, stored_fields: foo } - - { _id: 1, stored_fields: [foo] } - - { _id: 1, stored_fields: [foo, _source] } + - { _id: "1" } + - { _id: "1", stored_fields: foo } + - { _id: "1", stored_fields: [foo] } + - { _id: "1", stored_fields: [foo, _source] } - match: { docs.0.fields.foo: [bar] } - is_false: docs.0._source @@ -72,10 +72,10 @@ stored_fields: [foo] body: docs: - - { _id: 1 } - - { _id: 1, stored_fields: foo } - - { _id: 1, stored_fields: [foo] } - - { _id: 1, stored_fields: [foo, _source] } + - { _id: "1" } + - { _id: "1", stored_fields: foo } + - { _id: "1", stored_fields: [foo] } + - { _id: "1", stored_fields: [foo, _source] } - match: { docs.0.fields.foo: [bar] } - is_false: docs.0._source @@ -95,10 +95,10 @@ stored_fields: [foo, _source] body: docs: - - { _id: 1 } - - { _id: 1, stored_fields: foo } - - { _id: 1, stored_fields: [foo] } - - { _id: 1, stored_fields: [foo, _source] } + - { _id: "1" } + - { _id: "1", stored_fields: foo } + - { _id: "1", stored_fields: [foo] } + - { _id: "1", stored_fields: [foo, _source] } - match: { docs.0.fields.foo: [bar] } - match: { docs.0._source: { foo: bar }} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/40_routing.yml index 45f107d0ef1df..7169c0ec25001 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/40_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/40_routing.yml @@ -17,7 +17,7 @@ routing: - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } @@ -27,9 +27,9 @@ routing: stored_fields: [_routing] body: docs: - - { _id: 1 } - - { _id: 1, routing: "4" } - - { _id: 1, routing: "5" } + - { _id: "1" } + - { _id: "1", routing: "4" } + - { _id: "1", routing: "5" } - is_false: docs.0.found - is_false: docs.1.found @@ -59,7 +59,7 @@ requires routing: - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar } @@ -73,9 +73,9 @@ requires routing: stored_fields: [_routing] body: docs: - - { _id: 1, _index: test_1 } - - { _id: 1, _index: alias } - - { _id: 1, _index: test_1, routing: "5" } + - { _id: "1", _index: test_1 } + - { _id: "1", _index: alias } + - { _id: "1", _index: test_1, routing: "5" } - is_false: docs.0.found - match: { docs.0.error.reason: "routing is required for [test_1]/[1]" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/60_realtime_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/60_realtime_refresh.yml index 8ae390943c6b5..2f2036217d8dc 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/60_realtime_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/60_realtime_refresh.yml @@ -18,7 +18,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/70_source_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/70_source_filtering.yml index a70151fd2e756..b9c720436a62a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/70_source_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/70_source_filtering.yml @@ -4,12 +4,12 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } - do: index: index: test_1 - id: 2 + id: "2" body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1 } --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/80_deprecated.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/80_deprecated.yml index b6b7a84100dd7..f57d9500375f1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/80_deprecated.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mget/80_deprecated.yml @@ -7,13 +7,13 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: test_1 - id: 2 + id: "2" body: { foo: baz } - do: @@ -21,13 +21,13 @@ mget: body: docs: - - { _index: test_1, _id: 1, _routing : test1 } - - { _index: test_1, _id: 2, _routing : test1 } + - { _index: test_1, _id: "1", _routing : test1 } + - { _index: test_1, _id: "2", _routing : test1 } - do: catch: bad_request mget: body: docs: - - { _index: test_1, _id: 1, _version : 1 } - - { _index: test_1, _id: 2, _version : 1 } + - { _index: test_1, _id: "1", _version : 1 } + - { _index: test_1, _id: "2", _version : 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/10_basic.yml index 243d953811336..47679b7f1058a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/10_basic.yml @@ -17,7 +17,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar, title: howdy } - do: @@ -36,7 +36,7 @@ more_like_this: like: - - _id: 1 + _id: "1" fields: ["title"] - match: {hits.total: 0} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/20_docs.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/20_docs.yml index 50eb344d99048..188e817ce7592 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/20_docs.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/20_docs.yml @@ -9,19 +9,19 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: test_1 - id: 2 + id: "2" body: { foo: baz } - do: index: index: test_1 - id: 3 + id: "3" body: { foo: foo } - do: @@ -45,9 +45,9 @@ foo: bar - _index: test_1 - _id: 2 + _id: "2" - - _id: 3 + _id: "3" include: true min_doc_freq: 0 min_term_freq: 0 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/30_unlike.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/30_unlike.yml index a0f96eb6b2d1f..c913268d807dd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/30_unlike.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mlt/30_unlike.yml @@ -9,19 +9,19 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar baz selected } - do: index: index: test_1 - id: 2 + id: "2" body: { foo: bar } - do: index: index: test_1 - id: 3 + id: "3" body: { foo: bar baz } - do: @@ -40,10 +40,10 @@ more_like_this: like: _index: test_1 - _id: 1 + _id: "1" unlike: _index: test_1 - _id: 3 + _id: "3" include: true min_doc_freq: 0 min_term_freq: 0 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/10_basic.yml index 9f6f2e70ae46d..1052508ca2b88 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/msearch/10_basic.yml @@ -4,25 +4,25 @@ setup: - do: index: index: index_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: index_1 - id: 2 + id: "2" body: { foo: baz } - do: index: index: index_1 - id: 3 + id: "3" body: { foo: foo } - do: index: index: index_2 - id: 1 + id: "1" body: { foo: foo } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/30_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/30_routing.yml index 36374cfa2daac..a9e1893ea764c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/30_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/mtermvectors/30_routing.yml @@ -16,7 +16,7 @@ routing: - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar baz } @@ -26,9 +26,9 @@ routing: fields: foo body: docs: - - { _id: 1 } - - { _id: 1, routing: "4" } - - { _id: 1, routing: "5" } + - { _id: "1" } + - { _id: "1", routing: "4" } + - { _id: "1", routing: "5" } - is_false: docs.0.found - is_false: docs.1.found @@ -60,7 +60,7 @@ requires routing: - do: index: index: test_1 - id: 1 + id: "1" routing: "5" body: { foo: bar baz } @@ -74,9 +74,9 @@ requires routing: fields: foo body: docs: - - { _id: 1, _index: test_1 } - - { _id: 1, _index: alias } - - { _id: 1, _index: test_1, routing: "5" } + - { _id: "1", _index: test_1 } + - { _id: "1", _index: alias } + - { _id: "1", _index: test_1, routing: "5" } - is_false: docs.0.found - match: { docs.0.error.reason: "routing is required for [test_1]/[1]" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/10_basic.yml index 20dd6fc614694..6dc62b24a39df 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/range/10_basic.yml @@ -26,25 +26,25 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "integer_range" : { "gte": 1, "lte": 5 } } - do: index: index: test - id: 2 + id: "2" body: { "integer_range" : { "gte": 1, "lte": 3 } } - do: index: index: test - id: 3 + id: "3" body: { "integer_range" : { "gte": 4, "lte": 5 } } - do: index: index: test - id: 4 + id: "4" body: { "integer_range" : null } - do: @@ -98,19 +98,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "long_range" : { "gte": 1, "lte": 5 } } - do: index: index: test - id: 2 + id: "2" body: { "long_range" : { "gte": 1, "lte": 3 } } - do: index: index: test - id: 3 + id: "3" body: { "long_range" : { "gte": 4, "lte": 5 } } @@ -158,19 +158,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "float_range" : { "gte": 1, "lte": 5 } } - do: index: index: test - id: 2 + id: "2" body: { "float_range" : { "gte": 1, "lte": 3 } } - do: index: index: test - id: 3 + id: "3" body: { "float_range" : { "gte": 4, "lte": 5 } } @@ -218,19 +218,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "double_range" : { "gte": 1, "lte": 5 } } - do: index: index: test - id: 2 + id: "2" body: { "double_range" : { "gte": 1, "lte": 3 } } - do: index: index: test - id: 3 + id: "3" body: { "double_range" : { "gte": 4, "lte": 5 } } @@ -278,19 +278,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "ip_range" : { "gte": "192.168.0.1", "lte": "192.168.0.5" } } - do: index: index: test - id: 2 + id: "2" body: { "ip_range" : { "gte": "192.168.0.1", "lte": "192.168.0.3" } } - do: index: index: test - id: 3 + id: "3" body: { "ip_range" : { "gte": "192.168.0.4", "lte": "192.168.0.5" } } @@ -338,19 +338,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "date_range" : { "gte": "2017-09-01", "lte": "2017-09-05" } } - do: index: index: test - id: 2 + id: "2" body: { "date_range" : { "gte": "2017-09-01", "lte": "2017-09-03" } } - do: index: index: test - id: 3 + id: "3" body: { "date_range" : { "gte": "2017-09-04", "lte": "2017-09-05" } } @@ -401,19 +401,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "date_range" : { "gte": "2019-12-14T12:00:00.000Z", "lte": "2019-12-14T13:00:00.000Z" } } - do: index: index: test - id: 2 + id: "2" body: { "date_range" : { "gte": "2019-12-15T12:00:00.000Z", "lte": "2019-12-15T13:00:00.000Z" } } - do: index: index: test - id: 3 + id: "3" body: { "date_range" : { "gte": "2019-12-16T12:00:00.000Z", "lte": "2019-12-16T13:00:00.000Z" } } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/10_basic.yml index 957d1dc20fbb7..d0ab5783d96a6 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/10_basic.yml @@ -6,13 +6,13 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: 1 } - do: index: index: test_scroll - id: 43 + id: "43" body: { foo: 2 } - do: @@ -37,7 +37,7 @@ - do: index: index: test_scroll - id: 44 + id: "44" body: { foo: 3 } - do: @@ -78,13 +78,13 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: 1 } - do: index: index: test_scroll - id: 43 + id: "43" body: { foo: 2 } - do: @@ -109,7 +109,7 @@ - do: index: index: test_scroll - id: 44 + id: "44" body: { foo: 3 } - do: @@ -145,13 +145,13 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: 1 } - do: index: index: test_scroll - id: 43 + id: "43" body: { foo: 2 } - do: @@ -176,7 +176,7 @@ - do: index: index: test_scroll - id: 44 + id: "44" body: { foo: 3 } - do: @@ -240,13 +240,13 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: 1 } - do: index: index: test_scroll - id: 43 + id: "43" body: { foo: 2 } - do: @@ -285,13 +285,13 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: 1 } - do: index: index: test_scroll - id: 43 + id: "43" body: { foo: 2 } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/11_clear.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/11_clear.yml index 97a13dd0c2c5f..dbc3cdc62171f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/11_clear.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/11_clear.yml @@ -6,7 +6,7 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: bar } - do: @@ -46,7 +46,7 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: bar } - do: @@ -87,7 +87,7 @@ - do: index: index: test_scroll - id: 42 + id: "42" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/12_slices.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/12_slices.yml index e7b1086499b5a..6cc590a36d6c1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/12_slices.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/12_slices.yml @@ -11,25 +11,25 @@ setup: - do: index: index: test_sliced_scroll - id: 1 + id: "1" body: { foo: 1 } - do: index: index: test_sliced_scroll - id: 2 + id: "2" body: { foo: 2 } - do: index: index: test_sliced_scroll - id: 3 + id: "3" body: { foo: 3 } - do: index: index: test_sliced_scroll - id: 4 + id: "4" body: { foo: 4 } - do: @@ -45,7 +45,7 @@ setup: sort: foo body: slice: - id: 0 + id: "0" max: 2 query: match_all: {} @@ -78,7 +78,7 @@ setup: sort: foo body: slice: - id: 1 + id: "1" max: 2 query: match_all: {} @@ -113,7 +113,7 @@ setup: scroll: 1m body: slice: - id: 0 + id: "0" max: 1025 query: match_all: {} @@ -132,7 +132,7 @@ setup: scroll: 1m body: slice: - id: 0 + id: "0" max: 1025 query: match_all: {} @@ -154,7 +154,7 @@ setup: body: slice: field: foo - id: 0 + id: "0" max: 2 query: match_all: {} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/20_keep_alive.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/20_keep_alive.yml index 40c91128d1c76..16d934041c5c0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/20_keep_alive.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/scroll/20_keep_alive.yml @@ -14,13 +14,13 @@ - do: index: index: test_scroll - id: 1 + id: "1" body: { foo: 1 } - do: index: index: test_scroll - id: 2 + id: "2" body: { foo: 1 } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/100_avg_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/100_avg_metric.yml index 0e57bb9abd667..6656fced57cd0 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/100_avg_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/100_avg_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/110_max_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/110_max_metric.yml index 4235679746115..466dc0cf9461b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/110_max_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/110_max_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/120_min_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/120_min_metric.yml index eb68357258507..30c226f554c3d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/120_min_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/120_min_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/130_sum_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/130_sum_metric.yml index 3221543276115..218168ac4cb80 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/130_sum_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/130_sum_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/140_value_count_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/140_value_count_metric.yml index b5ac7d2e5db01..2c46a2035b386 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/140_value_count_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/140_value_count_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/150_stats_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/150_stats_metric.yml index 2afad21e61421..74440e039eb6e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/150_stats_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/150_stats_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/160_extended_stats_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/160_extended_stats_metric.yml index c70ca3356767a..b5a6046b4d7a1 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/160_extended_stats_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/160_extended_stats_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/170_cardinality_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/170_cardinality_metric.yml index d7ca13eef4008..f35c85034f0b8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/170_cardinality_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/170_cardinality_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/180_percentiles_tdigest_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/180_percentiles_tdigest_metric.yml index 9ed414f6b8439..9a126730c63c6 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/180_percentiles_tdigest_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/180_percentiles_tdigest_metric.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/190_percentiles_hdr_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/190_percentiles_hdr_metric.yml index 32c349c5e46b6..21f3ad31558af 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/190_percentiles_hdr_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/190_percentiles_hdr_metric.yml @@ -22,25 +22,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo @@ -422,7 +422,7 @@ setup: - do: index: index: test_1 - id: 5 + id: "5" refresh: true body: { int_field: -10 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml index f7d8619a48de1..5b05382eaa292 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/200_top_hits_metric.yml @@ -14,7 +14,7 @@ setup: - do: index: index: my-index - id: 1 + id: "1" refresh: true body: | { @@ -34,7 +34,7 @@ setup: - do: index: index: my-index - id: 2 + id: "2" refresh: true body: | { @@ -100,7 +100,7 @@ setup: - do: index: index: disabled-source - id: 1 + id: "1" refresh: true body: users: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/20_terms.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/20_terms.yml index eb871da38db0b..119c5c8234441 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/20_terms.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/20_terms.yml @@ -43,19 +43,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "str" : "abc" } - do: index: index: test_1 - id: 2 + id: "2" body: { "str": "abc" } - do: index: index: test_1 - id: 3 + id: "3" body: { "str": "bcd" } - do: @@ -87,19 +87,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "ip": "::1" } - do: index: index: test_1 - id: 2 + id: "2" body: { "ip": "127.0.0.1" } - do: index: index: test_1 - id: 3 + id: "3" body: { "ip": "::1" } - do: @@ -162,19 +162,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "boolean": true } - do: index: index: test_1 - id: 2 + id: "2" body: { "boolean": false } - do: index: index: test_1 - id: 3 + id: "3" body: { "boolean": true } - do: @@ -206,19 +206,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "integer": 1234 } - do: index: index: test_1 - id: 2 + id: "2" body: { "integer": 5678 } - do: index: index: test_1 - id: 3 + id: "3" body: { "integer": 1234 } - do: @@ -250,19 +250,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "double": 1234.5 } - do: index: index: test_1 - id: 2 + id: "2" body: { "double": 5678.5 } - do: index: index: test_1 - id: 3 + id: "3" body: { "double": 1234.5 } - do: @@ -294,19 +294,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "date": "2016-05-03" } - do: index: index: test_1 - id: 2 + id: "2" body: { "date": "2014-09-01" } - do: index: index: test_1 - id: 3 + id: "3" body: { "date": "2016-05-03" } - do: @@ -365,19 +365,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "str" : "abc" } - do: index: index: test_1 - id: 2 + id: "2" body: { "str": "abc" } - do: index: index: test_1 - id: 3 + id: "3" body: { "str": "bcd" } - do: @@ -419,19 +419,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "integer": 1234 } - do: index: index: test_1 - id: 2 + id: "2" body: { "integer": 5678 } - do: index: index: test_1 - id: 3 + id: "3" body: { "integer": 1234 } - do: @@ -469,7 +469,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -494,7 +494,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -521,7 +521,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -548,7 +548,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -573,7 +573,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -598,31 +598,31 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {"number": 100} - do: index: index: test_1 - id: 2 + id: "2" body: {"number": 10} - do: index: index: test_2 - id: 3 + id: "3" body: {"number": 100.0} - do: index: index: test_2 - id: 1 + id: "1" body: {"number": 10.0} - do: index: index: test_2 - id: 2 + id: "2" body: {"number": 14.6} - do: @@ -656,7 +656,7 @@ setup: index: refresh: true index: test_1 - id: 1 + id: "1" routing: "1" body: { "str": "abc" } @@ -664,7 +664,7 @@ setup: index: refresh: true index: test_1 - id: 2 + id: "2" routing: "1" body: { "str": "abc" } @@ -672,7 +672,7 @@ setup: index: refresh: true index: test_1 - id: 3 + id: "3" routing: "1" body: { "str": "bcd" } @@ -702,7 +702,7 @@ setup: index: refresh: true index: test_1 - id: 1 + id: "1" routing: "1" body: { "str": "abc" } @@ -710,7 +710,7 @@ setup: index: refresh: true index: test_1 - id: 2 + id: "2" routing: "1" body: { "str": "abc" } @@ -718,7 +718,7 @@ setup: index: refresh: true index: test_1 - id: 3 + id: "3" routing: "1" body: { "str": "bcd" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml index 2d691ba3e73e9..b8f90bc836691 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/220_filters_bucket.yml @@ -20,25 +20,25 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 string_field: foo - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 51 double_field: 51.0 string_field: foo - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 101 double_field: 101.0 string_field: foo - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 151 double_field: 151.0 string_field: foo @@ -287,7 +287,7 @@ setup: body: - index: _index: test_1 - _id: 100 + _id: "100" - int_field: 1 double_field: 1.0 string_field: foo bar diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/230_composite.yml index f405c985f9ee4..cf38f2a6dcfe5 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/230_composite.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/230_composite.yml @@ -65,25 +65,25 @@ setup: - do: index: index: nonesting - id: 1 + id: "1" body: { "kw": "one", "num": 1 } - do: index: index: nonesting - id: 2 + id: "2" body: { "kw": "two", "num": 2 } - do: index: index: nonesting - id: 3 + id: "3" body: { "kw": "three", "num": 3 } - do: index: index: verynested - id: 1 + id: "1" body: { "department": "compsci", "staff": 12, @@ -122,7 +122,7 @@ setup: - do: index: index: verynested - id: 2 + id: "2" body: { "department": "math", "staff": 20, @@ -161,43 +161,43 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "keyword": "foo", "long": [10, 20], "geo_point": "37.2343,-115.8067"} - do: index: index: test - id: 2 + id: "2" body: { "keyword": ["foo", "bar"], "geo_point": "41.12,-71.34" } - do: index: index: test - id: 3 + id: "3" body: { "keyword": "bar", "long": [100, 0], "geo_point": "90.0,0.0"} - do: index: index: test - id: 4 + id: "4" body: { "keyword": "bar", "long": [1000, 0], "geo_point": "41.12,-71.34"} - do: index: index: test - id: 5 + id: "5" body: { "date": "2017-10-20T03:08:45" } - do: index: index: test - id: 6 + id: "6" body: { "date": "2017-10-21T07:00:00" } - do: index: index: other - id: 0 + id: "0" body: { "date": "2017-10-20T03:08:45" } - do: @@ -808,7 +808,7 @@ setup: - do: index: index: test_2 - id: 1 + id: "1" body: { "f": "192.168.0.1" } refresh: true @@ -841,7 +841,7 @@ setup: - do: index: index: test - id: 7 + id: "7" body: { "date": "2017-10-22T01:00:00" } refresh: true - do: @@ -911,7 +911,7 @@ setup: - do: index: index: test - id: 7 + id: "7" body: { "date": "2017-10-22T01:00:00" } refresh: true - do: @@ -979,19 +979,19 @@ setup: - do: index: index: test - id: 7 + id: "7" body: { "date_nanos": "2017-11-21T01:00:00" } refresh: true - do: index: index: test - id: 8 + id: "8" body: { "date_nanos": "2017-11-22T01:00:00" } refresh: true - do: index: index: test - id: 9 + id: "9" body: { "date_nanos": "2017-11-22T02:00:00" } refresh: true - do: @@ -1041,7 +1041,7 @@ setup: - do: index: index: sorted_test - id: 2 + id: "2" refresh: true body: { "keyword": "foo", "long": 1 } @@ -1085,7 +1085,7 @@ setup: - do: index: index: sorted_test - id: 2 + id: "2" refresh: true body: { "keyword": "foo", "long": 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml index 7f94aeac64de3..bc52f78bbcccc 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/240_max_buckets.yml @@ -14,49 +14,49 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "date": "2014-03-03T00:00:00", "keyword": "dgx" } - do: index: index: test - id: 2 + id: "2" body: { "date": "2015-03-03T00:00:00", "keyword": "dfs" } - do: index: index: test - id: 3 + id: "3" body: { "date": "2016-03-03T00:00:00", "keyword": "foobar" } - do: index: index: test - id: 4 + id: "4" body: { "date": "2017-03-03T00:00:00", "keyword": "foo" } - do: index: index: test - id: 5 + id: "5" body: { "date": "2018-03-03T00:00:00", "keyword": "bar" } - do: index: index: test - id: 6 + id: "6" body: { "date": "2019-03-03T00:00:00", "keyword": "baz" } - do: index: index: test - id: 7 + id: "7" body: { "date": "2020-03-03T00:00:00", "keyword": "qux" } - do: index: index: test - id: 8 + id: "8" body: { "date": "2021-03-03T00:00:00", "keyword": "quux" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml index 6b17132c751de..019dffa81960e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/260_weighted_avg.yml @@ -20,22 +20,22 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 double_field: 1.0 - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 2 double_field: 2.0 - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 3 double_field: 3.0 - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 4 double_field: 4.0 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml index 1368c87a77d7e..3257db80c7d49 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_geohash_grid.yml @@ -18,27 +18,27 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - location: "52.374081,4.912350" - index: _index: test_1 - _id: 2 + _id: "2" - location: "52.369219,4.901618" - index: _index: test_1 - _id: 3 + _id: "3" - location: "52.371667,4.914722" - index: _index: test_1 - _id: 4 + _id: "4" - location: "51.222900,4.405200" - index: _index: test_1 - _id: 5 + _id: "5" - location: "48.861111,2.336389" - index: _index: test_1 - _id: 6 + _id: "6" - location: "48.860000,2.327000" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_rare_terms.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_rare_terms.yml index 9048914174133..a2e74fdbd58b7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_rare_terms.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/280_rare_terms.yml @@ -33,19 +33,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "str" : "abc" } - do: index: index: test_1 - id: 2 + id: "2" body: { "str": "abc" } - do: index: index: test_1 - id: 3 + id: "3" body: { "str": "bcd" } - do: @@ -66,19 +66,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "ip": "::1" } - do: index: index: test_1 - id: 2 + id: "2" body: { "ip": "127.0.0.1" } - do: index: index: test_1 - id: 3 + id: "3" body: { "ip": "::1" } - do: @@ -124,19 +124,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "boolean": true } - do: index: index: test_1 - id: 2 + id: "2" body: { "boolean": false } - do: index: index: test_1 - id: 3 + id: "3" body: { "boolean": true } - do: @@ -157,19 +157,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "integer": 1234 } - do: index: index: test_1 - id: 2 + id: "2" body: { "integer": 5678 } - do: index: index: test_1 - id: 3 + id: "3" body: { "integer": 1234 } - do: @@ -192,19 +192,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "date": "2016-05-03" } - do: index: index: test_1 - id: 2 + id: "2" body: { "date": "2014-09-01" } - do: index: index: test_1 - id: 3 + id: "3" body: { "date": "2016-05-03" } - do: @@ -243,7 +243,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -262,7 +262,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -281,7 +281,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -300,7 +300,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: {} - do: @@ -323,21 +323,21 @@ setup: index: refresh: true index: test_1 - id: 1 + id: "1" body: { "str" : "abc", "number": 1 } - do: index: refresh: true index: test_1 - id: 2 + id: "2" body: { "str": "abc", "number": 2 } - do: index: refresh: true index: test_1 - id: 3 + id: "3" body: { "str": "bcd", "number": 3 } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml index 35a2330819034..e4054979656c7 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/290_geotile_grid.yml @@ -19,27 +19,27 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - location: "52.374081,4.912350" - index: _index: test_1 - _id: 2 + _id: "2" - location: "52.369219,4.901618" - index: _index: test_1 - _id: 3 + _id: "3" - location: "52.371667,4.914722" - index: _index: test_1 - _id: 4 + _id: "4" - location: "51.222900,4.405200" - index: _index: test_1 - _id: 5 + _id: "5" - location: "48.861111,2.336389" - index: _index: test_1 - _id: 6 + _id: "6" - location: "48.860000,2.327000" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/300_pipeline.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/300_pipeline.yml index a5c575ab5acee..d826a6a7debaa 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/300_pipeline.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/300_pipeline.yml @@ -20,19 +20,19 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - int_field: 1 - index: _index: test_1 - _id: 2 + _id: "2" - int_field: 2 - index: _index: test_1 - _id: 3 + _id: "3" - int_field: 3 - index: _index: test_1 - _id: 4 + _id: "4" - int_field: 4 --- diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yml index 62bfffb87dadf..147048c8dce93 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/30_sig_terms.yml @@ -17,37 +17,37 @@ - do: index: index: goodbad - id: 1 + id: "1" body: { text: "good", class: "good" } - do: index: index: goodbad - id: 2 + id: "2" body: { text: "good", class: "good" } - do: index: index: goodbad - id: 3 + id: "3" body: { text: "bad", class: "bad" } - do: index: index: goodbad - id: 4 + id: "4" body: { text: "bad", class: "bad" } - do: index: index: goodbad - id: 5 + id: "5" body: { text: "good bad", class: "good" } - do: index: index: goodbad - id: 6 + id: "6" body: { text: "good bad", class: "bad" } - do: index: index: goodbad - id: 7 + id: "7" body: { text: "bad", class: "bad" } @@ -108,33 +108,33 @@ - do: index: index: goodbad-2 - id: 1 + id: "1" body: { class: "bad" } - do: index: index: goodbad-2 - id: 2 + id: "2" body: { class: "bad" } - do: index: index: goodbad - id: 1 + id: "1" body: { text: "good", class: "good" } - do: index: index: goodbad - id: 2 + id: "2" body: { text: "good", class: "good" } - do: index: index: goodbad - id: 3 + id: "3" body: { text: "bad", class: "bad" } - do: index: index: goodbad - id: 4 + id: "4" body: { text: "bad", class: "bad" } - do: @@ -167,12 +167,12 @@ - do: index: index: ip_index - id: 1 + id: "1" body: { ip: "::1" } - do: index: index: ip_index - id: 2 + id: "2" body: { } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/310_date_agg_per_day_of_week.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/310_date_agg_per_day_of_week.yml index 4d0ae1e56df38..b7e16109826b4 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/310_date_agg_per_day_of_week.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/310_date_agg_per_day_of_week.yml @@ -16,7 +16,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "date": "2009-11-15T14:12:12" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/380_nested.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/380_nested.yml index 43623846c87c3..7502f73fa6e2d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/380_nested.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/380_nested.yml @@ -26,13 +26,13 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "department": "compsci", "staff": 12, "courses": [ { "name": "Object Oriented Programming", "credits": 3, "sessions": [ { "semester": "spr2021", "students": 37 }, { "semester": "fall2020", "students": 45} ] }, { "name": "Theory of Computation", "credits": 4, "sessions": [ { "semester": "spr2021", "students": 19 }, { "semester": "fall2020", "students": 14 } ] } ] } - do: index: index: test - id: 2 + id: "2" body: { "department": "math", "staff": 20, "courses": [ { "name": "Precalculus", "credits": 1, "sessions": [ { "semester": "spr2021", "students": 100 }, { "semester": "fall2020", "students": 134 } ] }, { "name": "Linear Algebra", "credits": 3, "sessions": [ { "semester": "spr2021", "students": 29 }, { "semester": "fall2020", "students": 23 } ] } ] } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/390_geo_bounds_centroid.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/390_geo_bounds_centroid.yml index fe80dc411f308..bfd1763aaba8f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/390_geo_bounds_centroid.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/390_geo_bounds_centroid.yml @@ -21,27 +21,27 @@ setup: body: - index: _index: test_1 - _id: 1 + _id: "1" - location: "52.374081,4.912350" - index: _index: test_1 - _id: 2 + _id: "2" - location: "52.369219,4.901618" - index: _index: test_1 - _id: 3 + _id: "3" - location: "52.371667,4.914722" - index: _index: test_1 - _id: 4 + _id: "4" - location: "51.222900,4.405200" - index: _index: test_1 - _id: 5 + _id: "5" - location: "48.861111,2.336389" - index: _index: test_1 - _id: 6 + _id: "6" - location: "48.860000,2.327000" - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/49_range_timezone_bug.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/49_range_timezone_bug.yml index 481c32f688be6..dba480e31985a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/49_range_timezone_bug.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/49_range_timezone_bug.yml @@ -18,7 +18,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "mydate": "2021-08-12T01:00:00.000000000+02:00" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml index 54d176f7675ed..3916386abc244 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/10_unified.yml @@ -17,7 +17,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: "text" : "The quick brown fox is brown." - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/20_fvh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/20_fvh.yml index a7f4ea6248c9f..adb361098c942 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/20_fvh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/20_fvh.yml @@ -47,7 +47,7 @@ setup: body: query: term: - id: 1 + id: "1" highlight: type: fvh fields: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml index 828293be114e6..edc69b4e22a35 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.highlight/30_max_analyzed_offset.yml @@ -18,7 +18,7 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: "field1" : "The quick brown fox went to the forest and saw another fox." "field2" : "The quick brown fox went to the forest and saw another fox." diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/10_basic.yml index cd4e7909cc92f..ee81e9b90341d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/10_basic.yml @@ -15,7 +15,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: "nested_field" : [ { "foo": "bar" } ] @@ -42,7 +42,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: "nested_field" : [ { "foo": "bar" } ] - do: @@ -64,7 +64,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: "nested_field" : [ { "foo": "baz" } ] - do: @@ -102,7 +102,7 @@ setup: - do: index: index: disabled_source - id: 1 + id: "1" body: nested_field: field: value diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml index 9f12c31b04c75..17f328046833e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.inner_hits/20_highlighting.yml @@ -28,7 +28,7 @@ setup: - do: index: index: test - id: 1 + id: "1" refresh: true body: nested: @@ -108,7 +108,7 @@ setup: - do: index: index: disabled_source - id: 1 + id: "1" refresh: true body: nested: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/100_stored_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/100_stored_fields.yml index d2933a44e586d..3f8697e899400 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/100_stored_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/100_stored_fields.yml @@ -5,7 +5,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { foo: bar } - do: indices.refresh: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/10_source_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/10_source_filtering.yml index 1d740f192447d..86a87adc35e89 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/10_source_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/10_source_filtering.yml @@ -13,7 +13,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1, "bigint": 72057594037927936, d: 3.14 } - do: indices.refresh: {} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml index d476426312147..a2d19a16ab85f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/110_field_collapsing.yml @@ -11,42 +11,42 @@ setup: - do: index: index: test - id: 1 + id: "1" version_type: external version: 11 body: { numeric_group: 1, tag: A, sort: 10 } - do: index: index: test - id: 2 + id: "2" version_type: external version: 22 body: { numeric_group: 1, tag: B, sort: 6 } - do: index: index: test - id: 3 + id: "3" version_type: external version: 33 body: { numeric_group: 1, tag: A, sort: 24 } - do: index: index: test - id: 4 + id: "4" version_type: external version: 44 body: { numeric_group: 25, tag: B, sort: 10 } - do: index: index: test - id: 5 + id: "5" version_type: external version: 55 body: { numeric_group: 25, tag: A, sort: 5 } - do: index: index: test - id: 6 + id: "6" version_type: external version: 66 body: { numeric_group: 3, tag: B, sort: 36 } @@ -438,12 +438,12 @@ setup: - do: index: index: alias-test - id: 1 + id: "1" body: { other_numeric_group: 1, sort: 6 } - do: index: index: alias-test - id: 2 + id: "2" body: { other_numeric_group: 25, sort: 10 } - do: indices.refresh: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/120_batch_reduce_size.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/120_batch_reduce_size.yml index 9c23899fc12dc..2177668c81ced 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/120_batch_reduce_size.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/120_batch_reduce_size.yml @@ -26,19 +26,19 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { "str" : "abc" } - do: index: index: test_1 - id: 2 + id: "2" body: { "str": "abc" } - do: index: index: test_1 - id: 3 + id: "3" body: { "str": "bcd" } - do: indices.refresh: {} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml index c6509546ca94b..ac8194cdff7dd 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/140_pre_filter_search_shards.yml @@ -60,18 +60,18 @@ setup: - do: index: index: index_1 - id: 1 + id: "1" body: { "created_at": "2016-01-01"} - do: index: index: index_2 - id: 2 + id: "2" body: { "created_at": "2017-01-01" } - do: index: index: index_3 - id: 3 + id: "3" body: { "created_at": "2018-01-01" } - do: indices.refresh: {} @@ -244,18 +244,18 @@ setup: - do: index: index: index_1 - id: 1 + id: "1" body: { "created_at_not_indexed": "2016-01-01"} - do: index: index: index_2 - id: 2 + id: "2" body: { "created_at_not_indexed": "2017-01-01" } - do: index: index: index_3 - id: 3 + id: "3" body: { "created_at_not_indexed": "2018-01-01" } - do: indices.refresh: {} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml index 84ec1ebee4b7d..7e402f14d6809 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/150_rewrite_on_coordinator.yml @@ -12,19 +12,19 @@ - do: index: index: search_index - id: 1 + id: "1" body: { "user": "1" } - do: index: index: search_index - id: 2 + id: "2" body: { "user": "2" } - do: index: index: search_index - id: 3 + id: "3" body: { "user": "3" } - do: @@ -49,7 +49,7 @@ - do: index: index: lookup_index - id: 1 + id: "1" body: { "followers" : ["1", "3"] } - do: indices.refresh: {} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml index 0c4ab603420f2..7efdf521d3c3c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/160_exists_query.yml @@ -51,7 +51,7 @@ setup: Content-Type: application/json index: index: "test" - id: 1 + id: "1" body: binary: "YWJjZGUxMjM0" boolean: true @@ -76,7 +76,7 @@ setup: Content-Type: application/json index: index: "test" - id: 2 + id: "2" body: binary: "YWJjZGUxMjM0" boolean: false @@ -100,7 +100,7 @@ setup: Content-Type: application/json index: index: "test" - id: 3 + id: "3" routing: "route_me" body: binary: "YWJjZGUxMjM0" @@ -123,7 +123,7 @@ setup: - do: index: index: "test" - id: 4 + id: "4" body: {} - do: @@ -190,7 +190,7 @@ setup: Content-Type: application/json index: index: "test-no-dv" - id: 1 + id: "1" body: binary: "YWJjZGUxMjM0" boolean: true @@ -215,7 +215,7 @@ setup: Content-Type: application/json index: index: "test-no-dv" - id: 2 + id: "2" body: binary: "YWJjZGUxMjM0" boolean: false @@ -239,7 +239,7 @@ setup: Content-Type: application/json index: index: "test-no-dv" - id: 3 + id: "3" routing: "route_me" body: binary: "YWJjZGUxMjM0" @@ -262,7 +262,7 @@ setup: - do: index: index: "test-no-dv" - id: 4 + id: "4" body: {} - do: @@ -278,7 +278,7 @@ setup: - do: index: index: "test-unmapped" - id: 1 + id: "1" body: unrelated: "foo" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/161_exists_query_within_nested_query.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/161_exists_query_within_nested_query.yml index 592147c0c1d93..4dfba3652d7db 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/161_exists_query_within_nested_query.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/161_exists_query_within_nested_query.yml @@ -54,7 +54,7 @@ setup: Content-Type: application/json index: index: "test" - id: 1 + id: "1" body: nested: - binary: "YWJjZGUxMjM0" @@ -80,7 +80,7 @@ setup: Content-Type: application/json index: index: "test" - id: 2 + id: "2" body: nested: - binary: "YWJjZGUxMjM0" @@ -105,7 +105,7 @@ setup: Content-Type: application/json index: index: "test" - id: 3 + id: "3" routing: "route_me" body: nested: @@ -129,7 +129,7 @@ setup: - do: index: index: "test" - id: 4 + id: "4" body: {} - do: @@ -199,7 +199,7 @@ setup: Content-Type: application/json index: index: "test-no-dv" - id: 1 + id: "1" body: nested: - binary: "YWJjZGUxMjM0" @@ -225,7 +225,7 @@ setup: Content-Type: application/json index: index: "test-no-dv" - id: 2 + id: "2" body: nested: - binary: "YWJjZGUxMjM0" @@ -250,7 +250,7 @@ setup: Content-Type: application/json index: index: "test-no-dv" - id: 3 + id: "3" routing: "route_me" body: nested: @@ -274,7 +274,7 @@ setup: - do: index: index: "test-no-dv" - id: 4 + id: "4" body: {} - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/190_index_prefix_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/190_index_prefix_search.yml index f667786f557f9..085a0396d5576 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/190_index_prefix_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/190_index_prefix_search.yml @@ -14,7 +14,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { text: some short words with a stupendously long one } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_ignore_malformed.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_ignore_malformed.yml index e48289a53909d..b08f532585fde 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_ignore_malformed.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_ignore_malformed.yml @@ -18,19 +18,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { "my_date": "2018-05-11", "my_ip": ":::1" } - do: index: index: test - id: 2 + id: "2" body: { "my_date": "bar", "my_ip": "192.168.1.42" } - do: index: index: test - id: 3 + id: "3" body: { "my_date": "bar", "my_ip": "quux" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_index_phrase_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_index_phrase_search.yml index d37c0c8cb3f72..1aa776c4709b2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_index_phrase_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/200_index_phrase_search.yml @@ -14,7 +14,7 @@ - do: index: index: test - id: 1 + id: "1" body: { text: "peter piper picked a peck of pickled peppers" } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/20_default_values.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/20_default_values.yml index b2ec345a6fe8f..d775c47926fc6 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/20_default_values.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/20_default_values.yml @@ -8,13 +8,13 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: test_2 - id: 42 + id: "42" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/220_total_hits_object.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/220_total_hits_object.yml index 965d18844e3cc..0f352de9c414b 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/220_total_hits_object.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/220_total_hits_object.yml @@ -10,43 +10,43 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: test_1 - id: 3 + id: "3" body: { foo: baz } - do: index: index: test_1 - id: 2 + id: "2" body: { foo: bar } - do: index: index: test_1 - id: 4 + id: "4" body: { foo: bar } - do: index: index: test_2 - id: 42 + id: "42" body: { foo: bar } - do: index: index: test_2 - id: 24 + id: "24" body: { foo: baz } - do: index: index: test_2 - id: 36 + id: "36" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/300_sequence_numbers.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/300_sequence_numbers.yml index 56871bfe02645..60ce254169155 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/300_sequence_numbers.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/300_sequence_numbers.yml @@ -6,14 +6,14 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { foo: foo } ## we index again in order to make the seq# 1 (so we can check for the field existence with is_false) - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/30_limits.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/30_limits.yml index b4ff84e13353f..286e203ef5c7d 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/30_limits.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/30_limits.yml @@ -10,7 +10,7 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar, foo2: bar, foo3: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/310_match_bool_prefix.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/310_match_bool_prefix.yml index f92b0ffda80e3..e9e39ed15ea56 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/310_match_bool_prefix.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/310_match_bool_prefix.yml @@ -17,7 +17,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: my_field1: "brown fox jump" my_field2: "xylophone" @@ -25,7 +25,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: my_field1: "brown emu jump" my_field2: "xylophone" @@ -33,7 +33,7 @@ setup: - do: index: index: test - id: 3 + id: "3" body: my_field1: "jumparound" my_field2: "emu" @@ -41,7 +41,7 @@ setup: - do: index: index: test - id: 4 + id: "4" body: my_field1: "dog" my_field2: "brown fox jump lazy" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml index c18af5d7d4188..14a24d5f20933 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/330_fetch_fields.yml @@ -19,7 +19,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: keyword: [ "a", "b" ] integer_range: @@ -63,7 +63,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: keyword: "value" date: "1990-12-29T22:30:00.000Z" @@ -111,7 +111,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: keyword: [ "a" ] @@ -148,7 +148,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: keyword: "a" integer: 42 @@ -156,7 +156,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: keyword: "b" integer: "not an integer" @@ -194,7 +194,7 @@ setup: - do: index: index: test - id: 1 + id: "1" refresh: true body: keyword: "a" @@ -236,7 +236,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: date: "1990-12-29T22:30:00.000Z" @@ -282,7 +282,7 @@ setup: - do: index: index: test - id: 1 + id: "1" refresh: true body: count: "some text" @@ -317,7 +317,7 @@ Test unmapped field: - do: index: index: test - id: 1 + id: "1" refresh: true body: f1: some text @@ -378,7 +378,7 @@ Test unmapped fields inside disabled objects: - do: index: index: test - id: 1 + id: "1" refresh: true body: f1: @@ -426,7 +426,7 @@ Test nested fields: - do: index: index: test - id: 1 + id: "1" refresh: true body: products: @@ -505,7 +505,7 @@ Test nested field inside object structure: - do: index: index: test - id: 1 + id: "1" refresh: true body: obj: @@ -516,7 +516,7 @@ Test nested field inside object structure: - do: index: index: test - id: 2 + id: "2" refresh: true body: obj: @@ -619,7 +619,7 @@ Test doubly nested fields: - do: index: index: test - id: 1 + id: "1" refresh: true body: id: abcd1234 @@ -691,7 +691,7 @@ Test nested fields with unmapped subfields: - do: index: index: test - id: 1 + id: "1" refresh: true body: id: abcd1234 @@ -752,7 +752,7 @@ Test nested fields with ignored subfields: - do: index: index: test - id: 1 + id: "1" refresh: true body: malformed_outside : "bad_value_1" @@ -796,7 +796,7 @@ Test nested field with sibling field resolving to DocValueFetcher: - do: index: index: test - id: 1 + id: "1" refresh: true body: owner: "Anna Ott" @@ -838,7 +838,7 @@ Test nested field with sibling field resolving to DocValueFetcher: - do: index: index: test - id: 1 + id: "1" refresh: true body: number: [ 1, 2, "3", "four", 5, 6 ] @@ -880,7 +880,7 @@ Test token_count inside nested field doesn't fail: - do: index: index: test - id: 1 + id: "1" refresh: true body: user: @@ -916,7 +916,7 @@ error includes field name: - do: index: index: test - id: 1 + id: "1" refresh: true body: keyword: "value" @@ -953,7 +953,7 @@ error includes glob pattern: - do: index: index: test - id: 1 + id: "1" refresh: true body: dkeyword: "value" @@ -992,7 +992,7 @@ error for flattened includes whole path: - do: index: index: test - id: 1 + id: "1" refresh: true body: flattened: @@ -1031,7 +1031,7 @@ test fetching metadata fields: - do: index: index: test - id: 1 + id: "1" refresh: true body: field: foo diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/340_flattened.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/340_flattened.yml index d7a9020e7e9f6..b461a6c4defb2 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/340_flattened.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/340_flattened.yml @@ -16,7 +16,7 @@ setup: - do: index: index: flattened_test - id: 1 + id: "1" body: flattened: key: some_value @@ -66,7 +66,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: headers: content-type: application/javascript @@ -76,7 +76,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: headers: content-type: text/plain @@ -122,7 +122,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: flattened: some_field: some_value @@ -164,7 +164,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: flattened: some_field: some_value @@ -198,7 +198,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: flattened: some_field: some_value diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml index 6fbdb575fccb5..d58e1f7dc2c18 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_binary_field.yml @@ -21,7 +21,7 @@ index: index: test refresh: true - id: 1 + id: "1" body: binary: U29tZSBiaW5hcnkgYmxvYg== diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_point_in_time.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_point_in_time.yml index b3ad192710cdf..bc3479b705180 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_point_in_time.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/350_point_in_time.yml @@ -5,19 +5,19 @@ setup: - do: index: index: test - id: 1 + id: "1" body: { id: 1, foo: bar, age: 18 } - do: index: index: test - id: 42 + id: "42" body: { id: 42, foo: bar, age: 18 } - do: index: index: test - id: 172 + id: "172" body: { id: 172, foo: bar, age: 24 } - do: @@ -27,7 +27,7 @@ setup: - do: index: index: test2 - id: 45 + id: "45" body: { id: 45, foo: bar, age: 19 } - do: @@ -67,7 +67,7 @@ setup: - do: index: index: test - id: 100 + id: "100" body: { id: 100, foo: bar, age: 23 } - do: indices.refresh: @@ -148,7 +148,7 @@ setup: search: body: slice: - id: 0 + id: "0" max: 2 size: 1 query: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/360_combined_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/360_combined_fields.yml index de1baae05097d..2b38a72c70a06 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/360_combined_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/360_combined_fields.yml @@ -15,7 +15,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: title: "Time, Clocks and the Ordering of Events in a Distributed System" abstract: "The concept of one event happening before another..." diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml index 0c4b1089b8122..f16b1dd71f188 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/370_profile.yml @@ -14,7 +14,7 @@ setup: - do: index: index: test - id: 1 + id: "1" refresh: true body: keyword: [ "a", "b" ] @@ -103,7 +103,7 @@ fetch nested source: - do: index: index: test_nested - id: 1 + id: "1" refresh: true body: keyword: [ "a", "b" ] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/390_doc_values_search.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/390_doc_values_search.yml index b817c5dfdb2cb..cef4bbc187c84 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/390_doc_values_search.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/390_doc_values_search.yml @@ -52,7 +52,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: byte : 1 double : 1.0 @@ -70,7 +70,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: byte : 2 double : 2.0 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/40_indices_boost.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/40_indices_boost.yml index 42951e868c6b8..9e6b4582d8f22 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/40_indices_boost.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/40_indices_boost.yml @@ -19,13 +19,13 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: index: index: test_2 - id: 1 + id: "1" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/60_query_string.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/60_query_string.yml index 131c8f92a231e..a264f6c699a41 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/60_query_string.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/60_query_string.yml @@ -12,7 +12,7 @@ - do: index: index: test - id: 1 + id: "1" body: { field: foo bar} - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/70_response_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/70_response_filtering.yml index e38f5f862a273..825bd9ce6de8e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/70_response_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/70_response_filtering.yml @@ -6,13 +6,13 @@ - do: index: index: test - id: 1 + id: "1" body: { foo: bar } - do: index: index: test - id: 2 + id: "2" body: { foo: bar } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/90_search_after.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/90_search_after.yml index 3d326d75fbf70..e1f367e8c217e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/90_search_after.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/90_search_after.yml @@ -5,19 +5,19 @@ - do: index: index: test - id: 1 + id: "1" body: { id: 1, foo: bar, age: 18 } - do: index: index: test - id: 42 + id: "42" body: { id: 42, foo: bar, age: 18 } - do: index: index: test - id: 172 + id: "172" body: { id: 172, foo: bar, age: 24 } - do: @@ -231,7 +231,7 @@ - do: index: index: test - id: 1 + id: "1" body: { id: 1, foo: bar, age: 18 } - do: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/issue4895.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/issue4895.yml index 4d8b1484c74ac..cc032a400060f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/issue4895.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search/issue4895.yml @@ -7,7 +7,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: user : foo amount : 35 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/20_completion.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/20_completion.yml index ff6ecfb0c34cf..cf88ced4dd740 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/20_completion.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/20_completion.yml @@ -32,14 +32,14 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_1: "bar" - do: index: index: test - id: 2 + id: "2" body: suggest_1: "baz" @@ -65,7 +65,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_2: ["bar", "foo"] @@ -106,7 +106,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_3: input: "bar" @@ -115,7 +115,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_3: input: "baz" @@ -145,7 +145,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_4: - input: "bar" @@ -156,7 +156,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_4: - input: "baz" @@ -203,7 +203,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_5a: "bar" suggest_5b: "baz" @@ -245,7 +245,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_6: input: "bar" @@ -256,7 +256,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_6: input: "baz" @@ -294,14 +294,14 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_1: "bar" - do: index: index: test - id: 2 + id: "2" body: suggest_1: "bar" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml index 6f3bae7f8e46f..f88726469f51c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/30_context.yml @@ -50,7 +50,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_context: input: "foo red" @@ -60,7 +60,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_context: input: "foo blue" @@ -92,7 +92,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_context_with_path: input: "Foo red" @@ -102,7 +102,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_context_with_path: "Foo blue" color: "blue" @@ -163,7 +163,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_geo: input: "Marriot in Amsterdam" @@ -175,7 +175,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_geo: input: "Marriot in Berlin" @@ -214,7 +214,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_multi_contexts: "Marriot in Amsterdam" location: @@ -225,7 +225,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_multi_contexts: "Marriot in Berlin" location: @@ -281,7 +281,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_context: input: "foo" @@ -291,7 +291,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_context: input: "foo" @@ -301,7 +301,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: suggest_context: input: "foo" @@ -334,7 +334,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: suggest_context: input: "foo" @@ -349,7 +349,7 @@ setup: catch: /Contexts are mandatory in context enabled completion field \[suggest_context\]/ index: index: test - id: 2 + id: "2" body: suggest_context: input: "foo" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml index 8e793e4beb6f0..8bbda56db7e53 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/suggest/50_completion_with_multi_fields.yml @@ -17,14 +17,14 @@ - do: index: index: completion_with_sub_keyword - id: 1 + id: "1" body: suggest_1: "bar" - do: index: index: completion_with_sub_keyword - id: 2 + id: "2" body: suggest_1: "baz" @@ -75,14 +75,14 @@ - do: index: index: completion_with_sub_completion - id: 1 + id: "1" body: suggest_1: "bar" - do: index: index: completion_with_sub_completion - id: 2 + id: "2" body: suggest_1: "baz" @@ -130,7 +130,7 @@ - do: index: index: completion_with_context - id: 1 + id: "1" body: suggest_1: input: "foo red" @@ -140,7 +140,7 @@ - do: index: index: completion_with_context - id: 2 + id: "2" body: suggest_1: input: "foo blue" @@ -186,7 +186,7 @@ - do: index: index: completion_with_weight - id: 1 + id: "1" body: suggest_1: input: "bar" @@ -195,7 +195,7 @@ - do: index: index: completion_with_weight - id: 2 + id: "2" body: suggest_1: input: "baz" @@ -238,7 +238,7 @@ - do: index: index: geofield_with_completion - id: 1 + id: "1" body: geofield: "hgjhrwysvqw7" #41.12,-72.34,12 @@ -246,7 +246,7 @@ - do: index: index: geofield_with_completion - id: 1 + id: "1" body: geofield: "hgm4psywmkn7" #41.12,-71.34,12 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/20_issue7121.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/20_issue7121.yml index e3ba834de074f..6b03428332932 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/20_issue7121.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/20_issue7121.yml @@ -22,14 +22,14 @@ - do: index: index: testidx - id: 1 + id: "1" body: text : "foo bar" - do: termvectors: index: testidx - id: 1 + id: "1" realtime: false - match: { _index: "testidx" } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/30_realtime.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/30_realtime.yml index 8bfbee483690c..cc2272f813f32 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/30_realtime.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/termvectors/30_realtime.yml @@ -16,13 +16,13 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - do: termvectors: index: test_1 - id: 1 + id: "1" realtime: false - is_false: found @@ -30,7 +30,7 @@ - do: termvectors: index: test_1 - id: 1 + id: "1" realtime: true - is_true: found diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml index a742d0a8148a8..af48b28c75c4a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/90_unsupported_operations.yml @@ -111,7 +111,7 @@ delete: catch: /delete is not supported because the destination index \[test\] is in time series mode/ delete: index: test - id: 1 + id: "1" --- delete over _bulk: @@ -144,7 +144,7 @@ noop update: catch: /update is not supported because the destination index \[test\] is in time series mode/ update: index: test - id: 1 + id: "1" body: doc: {} @@ -160,7 +160,7 @@ update: catch: /update is not supported because the destination index \[test\] is in time series mode/ update: index: test - id: 1 + id: "1" body: doc: "@timestamp": "2021-04-28T18:35:24.467Z" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/10_doc.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/10_doc.yml index dda545d56e350..2a8734210432e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/10_doc.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/10_doc.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: foo: bar count: 1 @@ -15,7 +15,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: foo: baz @@ -29,7 +29,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: baz } - match: { _source.count: 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/11_shard_header.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/11_shard_header.yml index 5a0dc0485b103..85ba488ac9e98 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/11_shard_header.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/11_shard_header.yml @@ -18,13 +18,13 @@ - do: index: index: foobar - id: 1 + id: "1" body: { foo: bar } - do: update: index: foobar - id: 1 + id: "1" body: doc: foo: baz diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/12_result.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/12_result.yml index 6c69bc2aa993b..fe5371dfeea96 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/12_result.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/12_result.yml @@ -5,7 +5,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar } doc_as_upsert: true @@ -16,7 +16,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar } doc_as_upsert: true @@ -27,7 +27,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar } doc_as_upsert: true @@ -39,7 +39,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: baz } doc_as_upsert: true diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/13_legacy_doc.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/13_legacy_doc.yml index a97c68ba6ee3f..e4ebf83608b60 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/13_legacy_doc.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/13_legacy_doc.yml @@ -4,7 +4,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: foo: bar count: 1 @@ -13,7 +13,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: foo: baz @@ -27,7 +27,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: baz } - match: { _source.count: 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/16_noop.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/16_noop.yml index 12f118ac28d01..ae21e68b160f5 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/16_noop.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/16_noop.yml @@ -6,7 +6,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { foo: bar } - match: { _seq_no: 0 } @@ -17,7 +17,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar } @@ -29,7 +29,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar } detect_noop: false diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/20_doc_upsert.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/20_doc_upsert.yml index 39e2273d5cafb..5f649ed497475 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/20_doc_upsert.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/20_doc_upsert.yml @@ -5,7 +5,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar, count: 1 } upsert: { foo: baz } @@ -13,7 +13,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: baz } - is_false: _source.count @@ -22,7 +22,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar, count: 1 } upsert: { foo: baz } @@ -30,7 +30,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: bar } - match: { _source.count: 1 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/22_doc_as_upsert.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/22_doc_as_upsert.yml index 0d695cb754056..9d94f7cf8e93e 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/22_doc_as_upsert.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/22_doc_as_upsert.yml @@ -5,7 +5,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: bar, count: 1 } doc_as_upsert: true @@ -13,7 +13,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: bar } - match: { _source.count: 1 } @@ -22,7 +22,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { count: 2 } doc_as_upsert: true @@ -30,7 +30,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source.foo: bar } - match: { _source.count: 2 } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/35_if_seq_no.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/35_if_seq_no.yml index 318ecd3a7c0ce..12a0c88e5a103 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/35_if_seq_no.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/35_if_seq_no.yml @@ -4,7 +4,7 @@ catch: missing update: index: test_1 - id: 1 + id: "1" if_seq_no: 1 if_primary_term: 1 body: @@ -13,7 +13,7 @@ - do: index: index: test_1 - id: 1 + id: "1" body: foo: baz @@ -21,7 +21,7 @@ catch: conflict update: index: test_1 - id: 1 + id: "1" if_seq_no: 234 if_primary_term: 1 body: @@ -30,7 +30,7 @@ - do: update: index: test_1 - id: 1 + id: "1" if_seq_no: 0 if_primary_term: 1 body: @@ -39,7 +39,7 @@ - do: get: index: test_1 - id: 1 + id: "1" - match: { _source: { foo: bar } } @@ -48,7 +48,7 @@ body: - update: _index: test_1 - _id: 1 + _id: "1" if_seq_no: 100 if_primary_term: 200 - doc: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/40_routing.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/40_routing.yml index 8b67ca512f326..403debb08a64a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/40_routing.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/40_routing.yml @@ -20,7 +20,7 @@ - do: update: index: test_1 - id: 1 + id: "1" routing: "5" body: doc: { foo: baz } @@ -29,7 +29,7 @@ - do: get: index: test_1 - id: 1 + id: "1" routing: "5" stored_fields: _routing @@ -39,14 +39,14 @@ catch: missing update: index: test_1 - id: 1 + id: "1" body: doc: { foo: baz } - do: update: index: test_1 - id: 1 + id: "1" routing: "5" _source: foo body: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/60_refresh.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/60_refresh.yml index 77888fcbb2710..873cdfde05b76 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/60_refresh.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/60_refresh.yml @@ -14,7 +14,7 @@ - do: update: index: test_1 - id: 1 + id: "1" body: doc: { foo: baz } upsert: { foo: bar } @@ -24,14 +24,14 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 1 }} + query: { term: { _id: "1" }} - match: { hits.total: 0 } - do: update: index: test_1 - id: 2 + id: "2" refresh: true body: doc: { foo: baz } @@ -43,7 +43,7 @@ rest_total_hits_as_int: true index: test_1 body: - query: { term: { _id: 2 }} + query: { term: { _id: "2" }} - match: { hits.total: 1 } @@ -55,7 +55,7 @@ - do: index: index: test_1 - id: 1 + id: "1" refresh: true body: { foo: bar } - is_true: forced_refresh @@ -63,7 +63,7 @@ - do: update: index: test_1 - id: 1 + id: "1" refresh: "" body: doc: {cat: dog} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/80_source_filtering.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/80_source_filtering.yml index c8eeba967a6f7..866bf3c6c195a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/80_source_filtering.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/80_source_filtering.yml @@ -5,7 +5,7 @@ - do: update: index: test_1 - id: 1 + id: "1" _source: [foo, bar] body: doc: { foo: baz } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml index fe76ab5299cda..d9a0f65f36170 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/85_fields_meta.yml @@ -12,7 +12,7 @@ - do: update: index: test_1 - id: 1 + id: "1" parent: 5 fields: [ _routing ] body: @@ -24,7 +24,7 @@ - do: get: index: test_1 - id: 1 + id: "1" parent: 5 stored_fields: [ _routing ] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/90_error.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/90_error.yml index 9a48d24783b44..4dd48f3462a4f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/90_error.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/90_error.yml @@ -7,7 +7,7 @@ catch: /\[UpdateRequest\] unknown field \[dac\] did you mean \[doc\]\?/ update: index: test - id: 1 + id: "1" body: dac: { foo: baz } upsert: { foo: bar } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/95_require_alias.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/95_require_alias.yml index f6c95ebd2463e..b8cd9c6eb0a38 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/95_require_alias.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/update/95_require_alias.yml @@ -7,7 +7,7 @@ catch: missing update: index: test_require_alias - id: 1 + id: "1" require_alias: true body: doc: { foo: bar, count: 1 } @@ -28,7 +28,7 @@ - do: update: index: test_require_alias - id: 1 + id: "1" require_alias: true body: doc: { foo: bar, count: 1 } diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.sort/10_nested_path_filter.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.sort/10_nested_path_filter.yml index 536ad86378e69..323a5b9abbf1e 100644 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.sort/10_nested_path_filter.yml +++ b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search.sort/10_nested_path_filter.yml @@ -18,7 +18,7 @@ setup: - do: index: index: "my-index" - id: 1 + id: "1" refresh: true body: offer: @@ -51,7 +51,7 @@ setup: - do: index: index: "my-locations" - id: 1 + id: "1" refresh: true body: offer: diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_cutoff_frequency.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_cutoff_frequency.yml index 2d645a9419171..591f60578b819 100644 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_cutoff_frequency.yml +++ b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_cutoff_frequency.yml @@ -24,7 +24,7 @@ setup: - do: index: index: "test" - id: 1 + id: "1" body: my_field1: "brown fox jump" my_field2: "xylophone" diff --git a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_type_query.yml b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_type_query.yml index fa4e20fdfa6fe..fdaebbb2b81e7 100644 --- a/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_type_query.yml +++ b/rest-api-spec/src/yamlRestTestV7Compat/resources/rest-api-spec/test/search/10_type_query.yml @@ -9,7 +9,7 @@ type query throws exception when used: - do: index: index: "test1" - id: 1 + id: "1" type: "cat" refresh: true body: diff --git a/test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yml b/test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yml index 5e08112253ef0..053370bad7e50 100644 --- a/test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yml +++ b/test/framework/src/test/resources/rest-api-spec/test/suite1/20_another_test.yml @@ -5,14 +5,14 @@ index: index: test_1 type: test - id: 1 + id: "1" body: { "foo": "bar" } - do: get: index: test_1 type: _all - id: 1 + id: "1" - match: { _index: test_1 } - match: { _type: test } diff --git a/test/framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yml b/test/framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yml index 745e111740285..7594da3703fff 100644 --- a/test/framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yml +++ b/test/framework/src/test/resources/rest-api-spec/test/suite2/10_basic.yml @@ -5,7 +5,7 @@ index: index: test-weird-index-中文 type: weird.type - id: 1 + id: "1" body: { foo: bar } - match: { _index: test-weird-index-中文 } @@ -17,7 +17,7 @@ get: index: test-weird-index-中文 type: weird.type - id: 1 + id: "1" - match: { _index: test-weird-index-中文 } - match: { _type: weird.type } diff --git a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml index 866b9dfda214c..e35282bb6bfde 100644 --- a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml +++ b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/10_basic.yml @@ -20,7 +20,7 @@ setup: body: - index: _index: eql_test - _id: 1 + _id: "1" - event: - category: process "@timestamp": 2020-02-03T12:34:56Z @@ -30,7 +30,7 @@ setup: some_keyword: longer than normal - index: _index: eql_test - _id: 2 + _id: "2" - event: - category: process "@timestamp": 2020-02-04T12:34:56Z @@ -39,7 +39,7 @@ setup: valid: true - index: _index: eql_test - _id: 3 + _id: "3" - event: - category: process "@timestamp": 2020-02-05T12:34:56Z @@ -48,7 +48,7 @@ setup: valid: true - index: _index: eql_test - _id: 4 + _id: "4" - event: - category: network "@timestamp": 2020-02-06T12:34:56Z @@ -57,7 +57,7 @@ setup: valid: true - index: _index: eql_test - _id: 5 + _id: "5" - event: - category: network "@timestamp": 2020-02-07T12:34:56Z @@ -66,7 +66,7 @@ setup: valid: true - index: _index: eql_test - _id: 6 + _id: "6" - event: - category: network "@timestamp": 2020-02-08T12:34:56Z @@ -75,7 +75,7 @@ setup: valid: true - index: _index: eql_test - _id: 7 + _id: "7" - event: - category: network "@timestamp": 2020-02-09T12:34:56Z diff --git a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/20_runtime_mappings.yml b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/20_runtime_mappings.yml index e5cbebbb7af76..292f69e3d6bef 100644 --- a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/20_runtime_mappings.yml +++ b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/20_runtime_mappings.yml @@ -16,7 +16,7 @@ setup: body: - index: _index: eql_test - _id: 1 + _id: "1" - event: - category: process "@timestamp": 2020-02-03T12:34:56Z @@ -26,7 +26,7 @@ setup: raw_message: "199.72.81.55 - - [01/Jul/1995:00:00:01 -0400] GET /history/apollo/ HTTP/1.0 200 6245" - index: _index: eql_test - _id: 2 + _id: "2" - event: - category: process "@timestamp": 2020-02-04T12:34:56Z @@ -36,7 +36,7 @@ setup: raw_message: "199.72.81.123 - - [01/Jul/1995:00:00:02 -0400] GET /history/apollo/a HTTP/1.0 200 500" - index: _index: eql_test - _id: 3 + _id: "3" - event: - category: process "@timestamp": 2020-02-05T12:34:56Z @@ -46,7 +46,7 @@ setup: raw_message: "199.72.81.34 - - [01/Jul/1995:00:00:03 -0400] GET /history/apollo/b HTTP/1.0 200 1500" - index: _index: eql_test - _id: 4 + _id: "4" - event: - category: process "@timestamp": 2020-02-05T12:34:57Z @@ -88,7 +88,7 @@ setup: query: 'process where true' fields: ["address"] runtime_mappings: {"address": {"type": "ip","script": "if (doc[\"raw_message.keyword\"].size() == 0) return; else {Matcher m = /\\d+\\.\\d+\\.\\d+\\.\\d+/.matcher(doc[\"raw_message.keyword\"].value);if (m.find()) emit(m.group());}"}} - + - match: {hits.events.0._id: "1"} - match: {hits.events.0.fields.address: ["199.72.81.55"]} - match: {hits.events.0._source.raw_message: "199.72.81.55 - - [01/Jul/1995:00:00:01 -0400] GET /history/apollo/ HTTP/1.0 200 6245"} @@ -165,7 +165,7 @@ setup: - match: {hits.sequences.1.events.2._id: "4"} - match: {hits.sequences.1.events.2._source.@timestamp: "2020-02-05T12:34:57Z"} - match: {hits.sequences.1.events.2.fields.day_of_week: [3]} - + --- "Validate valid runtime mappings request": - do: diff --git a/x-pack/plugin/rollup/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/rollup/10_basic.yml b/x-pack/plugin/rollup/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/rollup/10_basic.yml index 1bab67e6c55e7..22b3da13b5e72 100644 --- a/x-pack/plugin/rollup/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/rollup/10_basic.yml +++ b/x-pack/plugin/rollup/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/rollup/10_basic.yml @@ -23,25 +23,25 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - timestamp: "2020-01-01T05:10:00Z" color: "blue" price: 10 - index: _index: docs - _id: 2 + _id: "2" - timestamp: "2020-01-01T05:30:00Z" color: "blue" price: 20 - index: _index: docs - _id: 3 + _id: "3" - timestamp: "2020-01-01T06:10:00Z" color: "red" price: 30 - index: _index: docs - _id: 4 + _id: "4" - timestamp: "2020-01-01T06:30:00Z" color: "green" price: 40 diff --git a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/clear_cache.yml b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/clear_cache.yml index fd0f931d84f0d..907ee4c975877 100644 --- a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/clear_cache.yml +++ b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/clear_cache.yml @@ -14,15 +14,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - field: doc - index: _index: docs - _id: 2 + _id: "2" - field: doc - index: _index: docs - _id: 3 + _id: "3" - field: other - do: diff --git a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/indices_stats.yml b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/indices_stats.yml index 648c6cc9f467f..3ca2f7238beec 100644 --- a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/indices_stats.yml +++ b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/indices_stats.yml @@ -17,15 +17,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - field: foo - index: _index: docs - _id: 2 + _id: "2" - field: bar - index: _index: docs - _id: 3 + _id: "3" - field: baz - do: diff --git a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/pit.yml b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/pit.yml index d2172e49031bc..de7ec7619df62 100644 --- a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/pit.yml +++ b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/pit.yml @@ -14,15 +14,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - field: foo - index: _index: docs - _id: 2 + _id: "2" - field: bar - index: _index: docs - _id: 3 + _id: "3" - field: baz - do: diff --git a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/repository.yml b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/repository.yml index e620301dc3fae..8c43c0682c33b 100644 --- a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/repository.yml +++ b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/repository.yml @@ -14,15 +14,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - field: foo - index: _index: docs - _id: 2 + _id: "2" - field: bar - index: _index: docs - _id: 3 + _id: "3" - field: baz - do: diff --git a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/shared_cache_stats.yml b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/shared_cache_stats.yml index 2d595e8983711..809a45bf938b1 100644 --- a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/shared_cache_stats.yml +++ b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/shared_cache_stats.yml @@ -17,15 +17,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - field: foo - index: _index: docs - _id: 2 + _id: "2" - field: bar - index: _index: docs - _id: 3 + _id: "3" - field: baz - do: diff --git a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/stats.yml b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/stats.yml index 9016644790490..11e5e052b871c 100644 --- a/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/stats.yml +++ b/x-pack/plugin/searchable-snapshots/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/stats.yml @@ -14,15 +14,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - field: foo - index: _index: docs - _id: 2 + _id: "2" - field: bar - index: _index: docs - _id: 3 + _id: "3" - field: baz - do: diff --git a/x-pack/plugin/spatial/src/test/resources/rest-api-spec/test/50_geoline.yml b/x-pack/plugin/spatial/src/test/resources/rest-api-spec/test/50_geoline.yml index b2593f92290d4..ca4a0873c33c4 100644 --- a/x-pack/plugin/spatial/src/test/resources/rest-api-spec/test/50_geoline.yml +++ b/x-pack/plugin/spatial/src/test/resources/rest-api-spec/test/50_geoline.yml @@ -17,15 +17,15 @@ body: - index: _index: locations - _id: 1 + _id: "1" - '{"location": [13.37139831, 47.82930284], "rank": 2.0 }' - index: _index: locations - _id: 2 + _id: "2" - '{"location": [13.3784208402, 47.88832084022], "rank": 0.0 }' - index: _index: locations - _id: 3 + _id: "3" - '{"location": [13.371830148701, 48.2084200148], "rank": 1.2 }' - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml index aef14870d7cc5..5e6307774d91d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/aggregate-metrics/10_basic.yml @@ -18,7 +18,7 @@ - do: index: index: aggregate_metric_test - id: 1 + id: "1" body: metric: min: 18.2 @@ -75,7 +75,7 @@ - do: index: index: test - id: 1 + id: "1" body: metric: min: 18.2 @@ -85,7 +85,7 @@ - do: index: index: test - id: 2 + id: "2" body: metric: min: 50 @@ -151,7 +151,7 @@ - do: index: index: test - id: 1 + id: "1" body: metric: min: 18.2 @@ -161,7 +161,7 @@ - do: index: index: test - id: 2 + id: "2" body: metric: min: 50 @@ -228,7 +228,7 @@ - do: index: index: test - id: 1 + id: "1" body: metric: min: 18.2 @@ -238,7 +238,7 @@ - do: index: index: test - id: 2 + id: "2" body: metric: min: 50 @@ -248,7 +248,7 @@ - do: index: index: test - id: 3 + id: "3" body: metric: min: 150 diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml index 7a6c2a0d53b0d..be971c00005d9 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/10_basic.yml @@ -31,19 +31,19 @@ setup: - do: index: index: test1 - id: 1 + id: "1" body: {} - do: index: index: test1 - id: 2 + id: "2" body: { "foo": "bar" } - do: index: index: test2 - id: 1 + id: "1" body: {} - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/20_dynamic_mapping.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/20_dynamic_mapping.yml index 4357b4ecf171a..09dd1c1bdb6e5 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/20_dynamic_mapping.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/constant_keyword/20_dynamic_mapping.yml @@ -13,7 +13,7 @@ - do: index: index: test1 - id: 1 + id: "1" body: {} - do: @@ -26,7 +26,7 @@ - do: index: index: test1 - id: 1 + id: "1" body: {} - do: @@ -67,7 +67,7 @@ - do: index: index: test1 - id: 1 + id: "1" body: foo: bar diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml index eacbb90bcd058..4d464cfcb4548 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/data_streams/10_data_stream_resolvability.yml @@ -280,14 +280,14 @@ - do: index: index: simple-data-stream1 - id: 1 + id: "1" op_type: create body: { keys: [1,2,3], '@timestamp': '2020-12-12' } - do: index: index: simple-data-stream1 - id: 2 + id: "2" op_type: create body: { keys: [4,5,6], '@timestamp': '2020-12-12' } @@ -526,7 +526,7 @@ body: - create: _index: simple-data-stream1 - _id: 1 + _id: "1" - event: - category: process "@timestamp": 2020-02-03T12:34:56Z diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/graph/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/graph/10_basic.yml index 6c691f4f60e14..29e67b3f53479 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/graph/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/graph/10_basic.yml @@ -18,13 +18,13 @@ setup: - do: index: index: test_1 - id: 1 + id: "1" body: { keys: [1,2,3] } - do: index: index: test_1 - id: 2 + id: "2" body: { keys: [4,5,6] } - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/preview_datafeed.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/preview_datafeed.yml index bd8b567c2a514..4cff060d25356 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/preview_datafeed.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/preview_datafeed.yml @@ -17,7 +17,7 @@ setup: - do: index: index: airline-data - id: 1 + id: "1" body: > { "time": "2017-02-18T00:00:00Z", @@ -29,7 +29,7 @@ setup: - do: index: index: airline-data - id: 2 + id: "2" body: > { "time": "2017-02-18T00:30:00Z", @@ -41,7 +41,7 @@ setup: - do: index: index: airline-data - id: 3 + id: "3" body: > { "time": "2017-02-18T01:00:00Z", @@ -53,7 +53,7 @@ setup: - do: index: index: airline-data - id: 4 + id: "4" body: > { "time": "2017-02-18T01:01:00Z", @@ -774,7 +774,7 @@ setup: - do: index: index: index-airline-data-date-nanos - id: 1 + id: "1" body: > { "time": "2017-02-18T00:00:00Z", @@ -786,7 +786,7 @@ setup: - do: index: index: index-airline-data-date-nanos - id: 2 + id: "2" body: > { "time": "2017-02-18T00:30:00Z", @@ -798,7 +798,7 @@ setup: - do: index: index: index-airline-data-date-nanos - id: 3 + id: "3" body: > { "time": "2017-02-18T01:00:00Z", @@ -810,7 +810,7 @@ setup: - do: index: index: index-airline-data-date-nanos - id: 4 + id: "4" body: > { "time": "2017-02-18T01:01:00Z", diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml index 53afc52043d19..21badf967b7aa 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/roles/11_idx_arrays.yml @@ -21,7 +21,7 @@ teardown: - do: delete: index: foo - id: 1 + id: "1" ignore: 404 --- @@ -55,7 +55,7 @@ teardown: - do: index: index: foo - id: 1 + id: "1" body: { foo: bar } - do: @@ -63,7 +63,7 @@ teardown: Authorization: "Basic am9lOnMza3JpdC1wYXNzd29yZA==" get: index: foo - id: 1 + id: "1" - match: { _index: foo } - match: { _id: "1"} - match: { _source: { foo: bar }} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/searchable_snapshots/10_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/searchable_snapshots/10_usage.yml index 4c280d5313c76..1f56a8c373968 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/searchable_snapshots/10_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/searchable_snapshots/10_usage.yml @@ -14,15 +14,15 @@ setup: body: - index: _index: docs - _id: 1 + _id: "1" - field: foo - index: _index: docs - _id: 2 + _id: "2" - field: bar - index: _index: docs - _id: 3 + _id: "3" - field: baz - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/10_index_doc.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/10_index_doc.yml index 5732b19f93665..dab154917b17b 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/10_index_doc.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/10_index_doc.yml @@ -98,7 +98,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 1 + id: "1" index: only_index body: > { @@ -108,7 +108,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 2 + id: "2" index: everything body: > { @@ -162,7 +162,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: refresh: true - id: 7 + id: "7" index: only_read body: > { @@ -174,7 +174,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: refresh: true - id: 8 + id: "8" index: only_delete body: > { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/11_delete_doc.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/11_delete_doc.yml index 32e4694311f88..5c19aa3bbfcad 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/11_delete_doc.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/11_delete_doc.yml @@ -140,14 +140,14 @@ teardown: delete: refresh: true index: only_delete - id: 3 + id: "3" - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user delete: refresh: true index: everything - id: 8 + id: "8" - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user @@ -172,10 +172,10 @@ teardown: body: # The rest test won't send streaming content unless it has multiple bodies, so we send the same delete twice - delete: _index: everything - _id: 10 + _id: "10" - delete: _index: everything - _id: 10 + _id: "10" - do: # superuser search: @@ -199,7 +199,7 @@ teardown: delete: refresh: true index: only_read - id: 1 + id: "1" - do: catch: forbidden @@ -207,7 +207,7 @@ teardown: delete: refresh: true index: only_index - id: 2 + id: "2" - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/12_index_alias.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/12_index_alias.yml index 1f490ec08dac1..e1901ced2817e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/12_index_alias.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/12_index_alias.yml @@ -127,7 +127,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 1 + id: "1" index: can_write_1 body: > { @@ -137,7 +137,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 2 + id: "2" index: can_write_2 body: > { @@ -195,7 +195,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: refresh: true - id: 7 + id: "7" index: can_read_1 body: > { @@ -207,7 +207,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: refresh: true - id: 8 + id: "8" index: can_read_2 body: > { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/13_index_datemath.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/13_index_datemath.yml index 2651519e5f785..db1dd72553b3f 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/13_index_datemath.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/13_index_datemath.yml @@ -46,7 +46,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: - id: 1 + id: "1" index: "" body: > { @@ -81,7 +81,7 @@ teardown: catch: forbidden headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: - id: 4 + id: "4" index: "" body: > { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/14_cat_indices.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/14_cat_indices.yml index 63ab3527c58b5..09d0d416e54da 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/14_cat_indices.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/14_cat_indices.yml @@ -135,19 +135,19 @@ teardown: - do: index: index: index_to_monitor - id: 0 + id: "0" body: { foo: bar } - do: index: index: index_to_monitor - id: 1 + id: "1" body: { foo: bar } - do: index: index: index_to_monitor - id: 2 + id: "2" body: { foo: bar } - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/20_get_doc.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/20_get_doc.yml index 4e9367c238ae7..6e22bb4b8b43e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/20_get_doc.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/20_get_doc.yml @@ -126,7 +126,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user get: - id: 1 + id: "1" index: only_read - match: { _index: only_read } @@ -136,7 +136,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user get: - id: 4 + id: "4" index: read_write - match: { _index: read_write } - match: { _id: "4" } @@ -145,7 +145,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user get: - id: 5 + id: "5" index: everything - match: { _index: everything } - match: { _id: "5" } @@ -206,14 +206,14 @@ teardown: catch: forbidden headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user get: - id: 2 + id: "2" index: only_index - do: catch: forbidden headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user get: - id: 3 + id: "3" index: only_delete - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/30_dynamic_put_mapping.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/30_dynamic_put_mapping.yml index 8176a276b0301..7b50942478751 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/30_dynamic_put_mapping.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/30_dynamic_put_mapping.yml @@ -70,7 +70,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 1 + id: "1" index: write_alias body: > { @@ -80,7 +80,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 2 + id: "2" index: write_alias body: > { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/31_rollover_using_alias.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/31_rollover_using_alias.yml index fd9f6d1d46050..73979883291c3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/31_rollover_using_alias.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/31_rollover_using_alias.yml @@ -72,7 +72,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 1 + id: "1" index: write_manage_alias body: > { @@ -82,7 +82,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 2 + id: "2" index: write_manage_alias body: > { @@ -120,7 +120,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user create: - id: 3 + id: "3" index: write_manage_alias body: > { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/50_data_streams.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/50_data_streams.yml index e1e6f86ef51aa..4757247a946d3 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/50_data_streams.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/50_data_streams.yml @@ -118,7 +118,7 @@ teardown: - do: # superuser index: index: simple-data-stream1 - id: 1 + id: "1" op_type: create body: { foo: bar, "@timestamp": "2020-12-12" } @@ -158,7 +158,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: index: $idx0name - id: 1 + id: "1" if_seq_no: $seqno if_primary_term: $primary_term op_type: index @@ -257,7 +257,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: index: $idx0name - id: 1 + id: "1" op_type: index if_seq_no: $seqno if_primary_term: $primary_term @@ -267,7 +267,7 @@ teardown: - do: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: - id: 2 + id: "2" op_type: create index: write-data-stream1 body: { foo: bar, bar: baz, "@timestamp": "2000-12-12" } @@ -287,7 +287,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: index: $idx1name - id: 2 + id: "2" op_type: index if_seq_no: $seqno if_primary_term: $primary_term @@ -526,7 +526,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: index: simple-allows-auto-configure - id: 1 + id: "1" op_type: create body: { foo: bar, "@timestamp": "2020-12-12" } @@ -536,7 +536,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: index: simple-data-stream1 - id: 1 + id: "1" op_type: create body: { foo: bar, "@timestamp": "2020-12-12" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/55_auto_configure.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/55_auto_configure.yml index ab818c3f12e64..85c455c41277e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/55_auto_configure.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/security/authz/55_auto_configure.yml @@ -67,7 +67,7 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: index: index-auto-configure - id: 1 + id: "1" op_type: create body: { foo: bar, "@timestamp": "2020-12-12" } @@ -77,6 +77,6 @@ teardown: headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user index: index: index-limited - id: 1 + id: "1" op_type: create body: { "@timestamp": "2020-12-12" } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml index 7442c74a9eae6..7644764bc518a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/10_small_users_one_index.yml @@ -97,7 +97,7 @@ teardown: Authorization: "Basic am9lOngtcGFjay10ZXN0LXBhc3N3b3Jk" index: index: shared_logs - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -108,7 +108,7 @@ teardown: Authorization: "Basic am9objp4LXBhY2stdGVzdC1wYXNzd29yZA==" index: index: shared_logs - id: 2 + id: "2" pipeline: "my_pipeline" body: > { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/20_api_key.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/20_api_key.yml index c30192d7f5d7c..7576dfe7ce250 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/20_api_key.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/set_security_user/20_api_key.yml @@ -48,7 +48,7 @@ teardown: Authorization: ApiKey ${login_creds} index: index: index - id: 1 + id: "1" pipeline: "my_pipeline" body: > { @@ -69,7 +69,7 @@ teardown: Authorization: ApiKey ${login_creds} index: index: index - id: 2 + id: "2" pipeline: "my_pipeline" body: > { @@ -91,7 +91,7 @@ teardown: Authorization: ApiKey ${login_creds} index: index: index - id: 3 + id: "3" pipeline: "my_pipeline" body: > { @@ -105,7 +105,7 @@ teardown: - do: get: index: index - id: 1 + id: "1" - match: { _source.user.api_key.name: "with-metadata" } - match: { _source.user.api_key.id: $id_with_metadata } - match: { _source.user.api_key.metadata: { "string": "hello", "number": 42, "complex": {"foo": "bar", "values": [1, 3, 5]} } } @@ -113,7 +113,7 @@ teardown: - do: get: index: index - id: 2 + id: "2" - match: { _source.user.api_key.name: "no-metadata" } - match: { _source.user.api_key.id: $id_no_metadata } - is_false: _source.user.api_key.metadata @@ -121,7 +121,7 @@ teardown: - do: get: index: index - id: 3 + id: "3" - match: { _source.user.api_key.name: "empty-metadata" } - match: { _source.user.api_key.id: $id_empty_metadata } - is_false: _source.user.api_key.metadata diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/snapshot/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/snapshot/10_basic.yml index 19484e4dd7efe..1d370082c8e48 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/snapshot/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/snapshot/10_basic.yml @@ -30,7 +30,7 @@ setup: - do: index: index: test_index - id: 1 + id: "1" body: { foo: bar } - do: indices.flush: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml index a792ee33ff371..10ab8bed65d34 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/20_geo_centroid.yml @@ -15,27 +15,27 @@ body: - index: _index: locations - _id: 1 + _id: "1" - '{"location": "POINT(4.912350 52.374081)", "city": "Amsterdam", "name": "NEMO Science Museum"}' - index: _index: locations - _id: 2 + _id: "2" - '{"location": "POINT(4.901618 52.369219)", "city": "Amsterdam", "name": "Museum Het Rembrandthuis"}' - index: _index: locations - _id: 3 + _id: "3" - '{"location": "POINT(4.914722 52.371667)", "city": "Amsterdam", "name": "Nederlands Scheepvaartmuseum"}' - index: _index: locations - _id: 4 + _id: "4" - '{"location": "POINT(4.405200 51.222900)", "city": "Antwerp", "name": "Letterenhuis"}' - index: _index: locations - _id: 5 + _id: "5" - '{"location": "POINT(2.336389 48.861111)", "city": "Paris", "name": "Musée du Louvre"}' - index: _index: locations - _id: 6 + _id: "6" - '{"location": "POINT(2.327000 48.860000)", "city": "Paris", "name": "Musée dOrsay"}' - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/30_geotile_grid.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/30_geotile_grid.yml index 2b7471bdf858e..7696e8ab6f16d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/30_geotile_grid.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/30_geotile_grid.yml @@ -15,27 +15,27 @@ body: - index: _index: locations - _id: 1 + _id: "1" - '{"location": "POINT(4.912350 52.374081)", "city": "Amsterdam", "name": "NEMO Science Museum"}' - index: _index: locations - _id: 2 + _id: "2" - '{"location": "POINT(4.901618 52.369219)", "city": "Amsterdam", "name": "Museum Het Rembrandthuis"}' - index: _index: locations - _id: 3 + _id: "3" - '{"location": "POINT(4.914722 52.371667)", "city": "Amsterdam", "name": "Nederlands Scheepvaartmuseum"}' - index: _index: locations - _id: 4 + _id: "4" - '{"location": "POINT(4.405200 51.222900)", "city": "Antwerp", "name": "Letterenhuis"}' - index: _index: locations - _id: 5 + _id: "5" - '{"location": "POINT(2.336389 48.861111)", "city": "Paris", "name": "Musée du Louvre"}' - index: _index: locations - _id: 6 + _id: "6" - '{"location": "POINT(2.327000 48.860000)", "city": "Paris", "name": "Musée dOrsay"}' - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/40_geohash_grid.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/40_geohash_grid.yml index 4f41f6b75c481..809d8697f505c 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/40_geohash_grid.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/40_geohash_grid.yml @@ -15,27 +15,27 @@ body: - index: _index: locations - _id: 1 + _id: "1" - '{"location": "POINT(4.912350 52.374081)", "city": "Amsterdam", "name": "NEMO Science Museum"}' - index: _index: locations - _id: 2 + _id: "2" - '{"location": "POINT(4.901618 52.369219)", "city": "Amsterdam", "name": "Museum Het Rembrandthuis"}' - index: _index: locations - _id: 3 + _id: "3" - '{"location": "POINT(4.914722 52.371667)", "city": "Amsterdam", "name": "Nederlands Scheepvaartmuseum"}' - index: _index: locations - _id: 4 + _id: "4" - '{"location": "POINT(4.405200 51.222900)", "city": "Antwerp", "name": "Letterenhuis"}' - index: _index: locations - _id: 5 + _id: "5" - '{"location": "POINT(2.336389 48.861111)", "city": "Paris", "name": "Musée du Louvre"}' - index: _index: locations - _id: 6 + _id: "6" - '{"location": "POINT(2.327000 48.860000)", "city": "Paris", "name": "Musée dOrsay"}' - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/60_geo_line.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/60_geo_line.yml index f62eb6f10bec5..1e146abef8582 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/60_geo_line.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/60_geo_line.yml @@ -17,15 +17,15 @@ body: - index: _index: races - _id: 1 + _id: "1" - '{"position": "POINT(4.912350 52.374081)", "race_id": "Amsterdam", "timestamp": 4}' - index: _index: races - _id: 2 + _id: "2" - '{"position": "POINT(4.901618 52.369219)", "race_id": "Amsterdam", "timestamp": 3}' - index: _index: races - _id: 3 + _id: "3" - '{"position": "POINT(4.914722 52.371667)", "race_id": "Amsterdam", "timestamp": 10}' - do: @@ -84,27 +84,27 @@ body: - index: _index: test1 - _id: 1 + _id: "1" - '{ "date" : "2020-01-01T01:00:00.0Z", "entity" : "e1", "location" : { "lat" : 50.3, "lon" : 0.13 }}' - index: _index: test1 - _id: 2 + _id: "2" - '{ "date" : "2020-01-01T01:00:01.0Z", "entity" : "e1", "location" : { "lat" : 50.4, "lon" : 0.13 } }' - index: _index: test1 - _id: 3 + _id: "3" - '{ "date" : "2020-01-01T01:00:03.0Z", "entity" : "e1", "location" : { "lat" : 50.5, "lon" : 0.13 }}' - index: _index: test2 - _id: 1 + _id: "1" - '{ "date" : "2020-01-02T02:00:01.0Z", "entity" : "e2", "location" : { "lat" : 51.3, "lon" : 0.13 }}' - index: _index: test2 - _id: 2 + _id: "2" - '{ "date" : "2020-01-02T02:00:02.0Z", "entity" : "e2", "location" : { "lat" : 51.4, "lon" : 0.13 }}' - index: _index: test2 - _id: 3 + _id: "3" - '{ "date" : "2020-01-02T02:00:03.0Z", "entity" : "e2", "location" : { "lat" : 51.5, "lon" : 0.13 }}' - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/70_script_doc_values.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/70_script_doc_values.yml index 3831268c4f783..422afd9522aef 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/70_script_doc_values.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/spatial/70_script_doc_values.yml @@ -13,7 +13,7 @@ setup: - do: index: index: test - id: 1 + id: "1" body: geo_shape: "POLYGON((24.04725 59.942,24.04825 59.94125,24.04875 59.94125,24.04875 59.94175,24.048 59.9425,24.0475 59.94275,24.0465 59.94225,24.046 59.94225,24.04575 59.9425,24.04525 59.94225,24.04725 59.942))" - do: diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml index af45542eefb11..94de0e786a019 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml @@ -6,17 +6,17 @@ setup: body: - index: _index: test - _id: 1 + _id: "1" - str: test1 int: 1 - index: _index: test - _id: 2 + _id: "2" - str: test2 int: 2 - index: _index: test - _id: 3 + _id: "3" - str: test3 int: 3 diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/translate.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/translate.yml index 6439b6f1be92b..21ba2e0cf2883 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/translate.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/translate.yml @@ -6,7 +6,7 @@ body: - index: _index: test - _id: 1 + _id: "1" - str: test1 int: 1 diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/terms_enum/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/terms_enum/10_basic.yml index d29701277d12b..a0d2caa17a946 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/terms_enum/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/terms_enum/10_basic.yml @@ -186,24 +186,24 @@ setup: - do: index: index: test_k - id: 1 + id: "1" body: { foo: "bar_k", foo_non_indexed: "bar_k", "timestamp":"2021-01-01T01:01:01.000Z" } - do: index: index: test_ck - id: 2 + id: "2" body: { other: "foo", "timestamp":"2020-01-01T01:01:01.000Z" } - do: index: index: test_f - id: 3 + id: "3" body: { foo: { bar: "bar_f" }, "timestamp":"2019-01-01T01:01:01.000Z" } - do: index: index: test_security - id: 4 + id: "4" body: { foo: "bar_dls", foo_non_indexed: "bar_dls"} - do: #superuser diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml index 06ddbb21d89f3..08b6bcd978f1e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/preview_transforms.yml @@ -16,7 +16,7 @@ setup: - do: index: index: airline-data - id: 1 + id: "1" body: > { "time": "2017-02-18T00:00:00Z", @@ -28,7 +28,7 @@ setup: - do: index: index: airline-data - id: 2 + id: "2" body: > { "time": "2017-02-18T00:30:00Z", @@ -40,7 +40,7 @@ setup: - do: index: index: airline-data - id: 3 + id: "3" body: > { "time": "2017-02-18T01:00:00Z", @@ -52,7 +52,7 @@ setup: - do: index: index: airline-data - id: 4 + id: "4" body: > { "time": "2017-02-18T01:01:00Z", diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml index ae3554ce9e634..640f5af7b58c7 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/transform/transforms_cat_apis.yml @@ -57,7 +57,7 @@ setup: - do: index: index: airline-data - id: 1 + id: "1" body: > { "time": "2017-02-18T00:00:00Z", @@ -69,7 +69,7 @@ setup: - do: index: index: airline-data - id: 2 + id: "2" body: > { "time": "2017-02-18T00:30:00Z", @@ -81,7 +81,7 @@ setup: - do: index: index: airline-data - id: 3 + id: "3" body: > { "time": "2017-02-18T01:00:00Z", @@ -93,7 +93,7 @@ setup: - do: index: index: airline-data - id: 4 + id: "4" body: > { "time": "2017-02-18T01:01:00Z", diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/10_dense_vector_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/10_dense_vector_basic.yml index d95f249ae0a26..4418a7a602eae 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/10_dense_vector_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/10_dense_vector_basic.yml @@ -21,7 +21,7 @@ setup: - do: index: index: test-index - id: 1 + id: "1" body: vector: [230.0, 300.33, -34.8988, 15.555, -200.0] indexed_vector: [230.0, 300.33, -34.8988, 15.555, -200.0] @@ -29,7 +29,7 @@ setup: - do: index: index: test-index - id: 2 + id: "2" body: vector: [-0.5, 100.0, -13, 14.8, -156.0] indexed_vector: [-0.5, 100.0, -13, 14.8, -156.0] @@ -37,7 +37,7 @@ setup: - do: index: index: test-index - id: 3 + id: "3" body: vector: [0.5, 111.3, -13.0, 14.8, -156.0] indexed_vector: [0.5, 111.3, -13.0, 14.8, -156.0] diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/15_dense_vector_l1l2.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/15_dense_vector_l1l2.yml index a1a59f77aece7..d0e3d4dcdf0c5 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/15_dense_vector_l1l2.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/15_dense_vector_l1l2.yml @@ -16,21 +16,21 @@ setup: - do: index: index: test-index - id: 1 + id: "1" body: my_dense_vector: [230.0, 300.33, -34.8988, 15.555, -200.0] - do: index: index: test-index - id: 2 + id: "2" body: my_dense_vector: [-0.5, 100.0, -13, 14.8, -156.0] - do: index: index: test-index - id: 3 + id: "3" body: my_dense_vector: [0.5, 111.3, -13.0, 14.8, -156.0] diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/20_dense_vector_special_cases.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/20_dense_vector_special_cases.yml index eb7b9850f4399..64ec4bf3639d2 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/20_dense_vector_special_cases.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/20_dense_vector_special_cases.yml @@ -31,7 +31,7 @@ setup: catch: bad_request index: index: test-index - id: 1 + id: "1" body: vector: [10, 2] - match: { error.type: "mapper_parsing_exception" } @@ -40,7 +40,7 @@ setup: catch: bad_request index: index: test-index - id: 1 + id: "1" body: indexed_vector: [10, 2] - match: { error.type: "mapper_parsing_exception" } @@ -50,14 +50,14 @@ setup: - do: index: index: test-index - id: 1 + id: "1" body: vector: [10, 10, 10] - do: index: index: test-index - id: 2 + id: "2" body: vector: [10.5, 10.9, 10.4] @@ -110,7 +110,7 @@ setup: - do: index: index: test-index - id: 1 + id: "1" body: vector: [1, 2, 3] @@ -152,14 +152,14 @@ setup: - do: index: index: test-index - id: 1 + id: "1" body: vector: [10, 10, 10] - do: index: index: test-index - id: 2 + id: "2" body: some_other_field: "random_value" diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/40_knn_search.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/40_knn_search.yml index d0711bf8a6514..18aaf2ab8264e 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/40_knn_search.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/40_knn_search.yml @@ -25,7 +25,7 @@ setup: - do: index: index: test - id: 2 + id: "2" body: name: moose.jpg vector: [-0.5, 100.0, -13, 14.8, -156.0] @@ -33,7 +33,7 @@ setup: - do: index: index: test - id: 3 + id: "3" body: name: rabbit.jpg vector: [0.5, 111.3, -13.0, 14.8, -156.0] diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/50_dense_vector_field_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/50_dense_vector_field_usage.yml index 004c815556fe1..610c381eba2cf 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/50_dense_vector_field_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/50_dense_vector_field_usage.yml @@ -30,7 +30,7 @@ setup: - do: index: index: futest - id: 2 + id: "2" body: name: moose.jpg vector: [ -0.5, 100.0, -13, 14.8, -156.0 ] @@ -38,7 +38,7 @@ setup: - do: index: index: futest - id: 3 + id: "3" body: name: rabbit.jpg vector: [ 0.5, 111.3, -13.0, 14.8, -156.0 ] diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/10_wildcard_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/10_wildcard_basic.yml index 2e9bcc5dd6b29..67444b09a8d32 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/10_wildcard_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/wildcard/10_wildcard_basic.yml @@ -20,20 +20,20 @@ setup: - do: index: index: test-index - id: 1 + id: "1" body: my_wildcard: hello world null_wildcard: null - do: index: index: test-index - id: 2 + id: "2" body: my_wildcard: goodbye world - do: index: index: test-index - id: 3 + id: "3" body: my_wildcard: cAsE iNsEnSiTiVe World null_wildcard: HAS_VALUE diff --git a/x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec/test/freeze.gone/10_basic_compat.yml b/x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec/test/freeze.gone/10_basic_compat.yml index 234e4e0b9e5f7..26cf471ea9db7 100644 --- a/x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec/test/freeze.gone/10_basic_compat.yml +++ b/x-pack/plugin/src/yamlRestTestV7Compat/resources/rest-api-spec/test/freeze.gone/10_basic_compat.yml @@ -9,7 +9,7 @@ setup: - do: index: index: some-test-index-1 - id: 1 + id: "1" body: { foo: bar } --- diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/25_array_compare.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/25_array_compare.yml index 4cdf66d749aa7..28cfa43fe8abe 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/25_array_compare.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/25_array_compare.yml @@ -8,25 +8,25 @@ - do: index: index: test_1 - id: 1 + id: "1" body: { level: 0 } - do: index: index: test_1 - id: 2 + id: "2" body: { level: 0 } - do: index: index: test_1 - id: 3 + id: "3" body: { level: 0 } - do: index: index: test_1 - id: 4 + id: "4" body: { level: 1 } - do: indices.refresh: {} diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/30_search_input.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/30_search_input.yml index 954f0d3e3f214..842a340be1bd5 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/30_search_input.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/30_search_input.yml @@ -7,7 +7,7 @@ setup: - do: index: index: idx - id: 1 + id: "1" body: > { "date" : "2015-01-01T00:00:00", @@ -16,7 +16,7 @@ setup: - do: index: index: idx - id: 2 + id: "2" body: > { "date" : "2015-01-02T00:00:00", @@ -25,7 +25,7 @@ setup: - do: index: index: idx - id: 3 + id: "3" body: > { "date" : "2015-01-03T00:00:00", @@ -34,7 +34,7 @@ setup: - do: index: index: idx - id: 4 + id: "4" body: > { "date" : "2015-01-04T00:00:00", diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/40_search_transform.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/40_search_transform.yml index 91b76e5c66eae..8fd23c455bec4 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/40_search_transform.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/40_search_transform.yml @@ -7,7 +7,7 @@ setup: - do: index: index: idx - id: 1 + id: "1" body: > { "date" : "2015-01-01T00:00:00", @@ -16,7 +16,7 @@ setup: - do: index: index: idx - id: 2 + id: "2" body: > { "date" : "2015-01-02T00:00:00", @@ -25,7 +25,7 @@ setup: - do: index: index: idx - id: 3 + id: "3" body: > { "date" : "2015-01-03T00:00:00", @@ -34,7 +34,7 @@ setup: - do: index: index: idx - id: 4 + id: "4" body: > { "date" : "2015-01-04T00:00:00", diff --git a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/50_webhook_url_escaping.yml b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/50_webhook_url_escaping.yml index 0ed3cfe04480f..c2da572490c68 100644 --- a/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/50_webhook_url_escaping.yml +++ b/x-pack/plugin/watcher/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/mustache/50_webhook_url_escaping.yml @@ -3,7 +3,7 @@ - do: index: index: - id: 1 + id: "1" refresh: true body: { foo: bar } diff --git a/x-pack/plugin/watcher/qa/with-security/src/yamlRestTest/resources/rest-api-spec/test/watcher/security/20_test_run_as_execute_watch.yml b/x-pack/plugin/watcher/qa/with-security/src/yamlRestTest/resources/rest-api-spec/test/watcher/security/20_test_run_as_execute_watch.yml index b50f20afd0358..19bf7b940a662 100644 --- a/x-pack/plugin/watcher/qa/with-security/src/yamlRestTest/resources/rest-api-spec/test/watcher/security/20_test_run_as_execute_watch.yml +++ b/x-pack/plugin/watcher/qa/with-security/src/yamlRestTest/resources/rest-api-spec/test/watcher/security/20_test_run_as_execute_watch.yml @@ -8,7 +8,7 @@ setup: - do: index: index: my_test_index - id: 1 + id: "1" refresh: true body: > { @@ -333,7 +333,7 @@ teardown: - do: get: index: my_test_index - id: 1 + id: "1" - match: { _id: "1" } @@ -382,6 +382,6 @@ teardown: - do: get: index: index_not_allowed_to_read - id: 1 + id: "1" catch: forbidden diff --git a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/10_reindex.yml b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/10_reindex.yml index e63cdefa0bee4..6bcb8c1127394 100644 --- a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/10_reindex.yml +++ b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/10_reindex.yml @@ -7,7 +7,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -27,7 +27,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -59,7 +59,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -91,7 +91,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -112,7 +112,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -132,12 +132,12 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: source - id: 2 + id: "2" body: { "user": "another" } - do: indices.refresh: {} @@ -177,12 +177,12 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test", "hidden": true } - do: indices.refresh: {} @@ -226,7 +226,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test", "foo": "z", "bar": "z" } - do: indices.refresh: {} @@ -278,7 +278,7 @@ setup: - do: index: index: dest - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -298,7 +298,7 @@ setup: - do: index: index: dest - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -318,7 +318,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} diff --git a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/15_reindex_from_remote.yml b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/15_reindex_from_remote.yml index 773c8db88991c..06bb898a99944 100644 --- a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/15_reindex_from_remote.yml +++ b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/15_reindex_from_remote.yml @@ -6,7 +6,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -41,7 +41,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -87,7 +87,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -132,7 +132,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -166,12 +166,12 @@ - do: index: index: source - id: 1 + id: "1" body: { "user": "kimchy" } - do: index: index: source - id: 2 + id: "2" body: { "user": "another" } - do: indices.refresh: {} @@ -226,12 +226,12 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test", "hidden": true } - do: indices.refresh: {} @@ -289,7 +289,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test", "foo": "z", "bar": "z" } - do: indices.refresh: {} @@ -356,7 +356,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -391,7 +391,7 @@ - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} diff --git a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/20_update_by_query.yml b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/20_update_by_query.yml index 8512e4e6308b1..c2177cce77c52 100644 --- a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/20_update_by_query.yml +++ b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/20_update_by_query.yml @@ -7,7 +7,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -37,7 +37,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -68,7 +68,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -99,7 +99,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -116,7 +116,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -133,12 +133,12 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: index: index: source - id: 2 + id: "2" body: { "text": "test", "hidden": true } - do: indices.refresh: {} @@ -185,7 +185,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test", "foo": "z", "bar": "z" } - do: indices.refresh: {} @@ -202,7 +202,7 @@ setup: - do: get: index: source - id: 1 + id: "1" # These were visible to the user running the update_by_query so they stayed. - match: { _source.foo: z } - match: { _source.bar: z } diff --git a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/30_delete_by_query.yml b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/30_delete_by_query.yml index 827bd8364c073..64591ec47bfba 100644 --- a/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/30_delete_by_query.yml +++ b/x-pack/qa/reindex-tests-with-security/src/test/resources/rest-api-spec/test/30_delete_by_query.yml @@ -7,7 +7,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -32,7 +32,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -58,7 +58,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -84,7 +84,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -110,7 +110,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test" } - do: indices.refresh: {} @@ -136,12 +136,12 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test", "hidden": false } - do: index: index: source - id: 2 + id: "2" body: { "text": "test", "hidden": true } - do: indices.refresh: {} @@ -204,7 +204,7 @@ setup: - do: index: index: source - id: 1 + id: "1" body: { "text": "test", "foo": "z", "bar": "z" } - do: indices.refresh: {} diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml index 265f3547b6d65..ff94bb5745326 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml @@ -15,13 +15,13 @@ - do: index: index: upgraded_scroll - id: 42 + id: "42" body: { foo: 1 } - do: index: index: upgraded_scroll - id: 43 + id: "43" body: { foo: 2 } - do: @@ -46,5 +46,5 @@ - do: index: index: scroll_index - id: 1 + id: "1" body: { value: $scroll_id } diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml index 0c5deab19068d..d4aec6ac1f0ab 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml @@ -3,7 +3,7 @@ - do: get: index: scroll_index - id: 1 + id: "1" - set: {_source.value: scroll_id} diff --git a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/10_templated_role_query.yml b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/10_templated_role_query.yml index 4dcc8c847c464..39e7239cba4d3 100644 --- a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/10_templated_role_query.yml +++ b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/10_templated_role_query.yml @@ -110,7 +110,7 @@ setup: - do: index: index: foobar - id: 1 + id: "1" body: > { "username": "inline_template_user" @@ -118,7 +118,7 @@ setup: - do: index: index: foobar - id: 2 + id: "2" body: > { "username": "stored_template_user" diff --git a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/11_templated_role_query_runas.yml b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/11_templated_role_query_runas.yml index b3948028f4144..11f93ee9909cd 100644 --- a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/11_templated_role_query_runas.yml +++ b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/11_templated_role_query_runas.yml @@ -110,7 +110,7 @@ setup: - do: index: index: foobar - id: 1 + id: "1" body: > { "username": "inline_template_user" @@ -118,7 +118,7 @@ setup: - do: index: index: foobar - id: 2 + id: "2" body: > { "username": "stored_template_user" diff --git a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/30_search_template.yml b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/30_search_template.yml index 1ce18208a1085..9b4221e50f8d3 100644 --- a/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/30_search_template.yml +++ b/x-pack/qa/smoke-test-security-with-mustache/src/test/resources/rest-api-spec/test/30_search_template.yml @@ -32,14 +32,14 @@ setup: - do: index: index: foobar - id: 1 + id: "1" body: title: "contains some words" - do: index: index: unauthorized_index - id: 2 + id: "2" body: title: "contains some words too" From 9d867cd5ca06f6a8f0a9de96b716895df4becc59 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Thu, 10 Feb 2022 10:47:40 +0200 Subject: [PATCH 031/167] Adds known issue for aarch64 pwd generation (#83654) We figured out that on Linux and macOS aarch64, we can't determine whether a terminal is attached to elasticsearch and as such we don't print the elastic password and enrollment token on node first startup. This is resolved in #83566 by updating the underlying library we use to detect the terminal. Co-authored-by: Adam Locke --- .../release-notes/8.0.0-rc2.asciidoc | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/docs/reference/release-notes/8.0.0-rc2.asciidoc b/docs/reference/release-notes/8.0.0-rc2.asciidoc index 1e57b09fda5d2..9d3f93e006847 100644 --- a/docs/reference/release-notes/8.0.0-rc2.asciidoc +++ b/docs/reference/release-notes/8.0.0-rc2.asciidoc @@ -15,6 +15,27 @@ data loss. If you upgrade from a released version, such as 7.16, to a pre-release version for testing, discard the contents of the cluster when you are done. Do not attempt to upgrade to the final 8.0 release. +* If you installed {es} from an archive on an aarch64 platform like Linux ARM or macOS M1, the +`elastic` user password and {kib} enrollment token are not generated +automatically when starting your node for the first time. ++ +-- +After the node starts, generate the `elastic` password with the +<> tool: + +[source,bash] +---- +bin/elasticsearch-reset-password -u elastic +---- + +Then, create an enrollment token for {kib} with the +<> tool: + +[source,bash] +---- +bin/elasticsearch-create-enrollment-token -s kibana +---- +-- [[deprecation-8.0.0-rc2]] [float] === Deprecations From fed07a54ba6dc94e723267dd357a7bd4bd22f8c5 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Thu, 10 Feb 2022 09:24:57 +0000 Subject: [PATCH 032/167] Test bug in TimeSeriesIndexSearcherTests (#83730) This test checks that documents are presented in tsid and then timestamp order to collectors; it was not taking a deep copy of the tsid and so when a change in tsid stayed on the same segment we could get inaccurate comparisons as the BytesRef would change out from underneath us and would compare as equal to the old timestamp, meaning that the check for strictly increasing timestamp would fail. Fixes #83647 --- .../aggregations/timeseries/TimeSeriesIndexSearcher.java | 9 ++++++++- .../timeseries/TimeSeriesIndexSearcherTests.java | 8 +++----- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcher.java index 4999346310f52..4837a291df98f 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcher.java @@ -20,6 +20,7 @@ import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; @@ -99,7 +100,7 @@ private boolean populateQueue(List leafWalkers, PriorityQueue= 0); + assertTrue(currentTSID + "->" + latestTSID.utf8ToString(), latestTSID.compareTo(currentTSID) >= 0); if (latestTSID.equals(currentTSID)) { - assertTrue(latestTimestamp >= currentTimestamp); + assertTrue(currentTimestamp + "->" + latestTimestamp, latestTimestamp >= currentTimestamp); } } currentTimestamp = latestTimestamp; - currentTSID = latestTSID; + currentTSID = BytesRef.deepCopyOf(latestTSID); total++; } }; From d4caeea1f7b494036251e87d3caa5ce40788441c Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Thu, 10 Feb 2022 10:31:30 +0100 Subject: [PATCH 033/167] Remove Cluster & XPack clients from HLRC (#83593) Part of #83423 --- .../elasticsearch/client/ClusterClient.java | 389 ------------------ .../client/RestHighLevelClient.java | 24 -- .../org/elasticsearch/client/XPackClient.java | 115 ------ .../upgrades/SearchStatesIT.java | 15 +- .../AbstractMultiClusterRemoteTestCase.java | 12 +- .../cluster/remote/test/RemoteClustersIT.java | 111 +++-- .../test/rest/ESRestTestCase.java | 28 +- .../oldrepos/OldRepositoryAccessIT.java | 22 +- .../SmokeTestMonitoringWithSecurityIT.java | 34 +- 9 files changed, 103 insertions(+), 647 deletions(-) delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java deleted file mode 100644 index a9a119da79bab..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ClusterClient.java +++ /dev/null @@ -1,389 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsRequest; -import org.elasticsearch.action.admin.cluster.settings.ClusterGetSettingsResponse; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.cluster.RemoteInfoRequest; -import org.elasticsearch.client.cluster.RemoteInfoResponse; -import org.elasticsearch.client.indices.ComponentTemplatesExistRequest; -import org.elasticsearch.client.indices.DeleteComponentTemplateRequest; -import org.elasticsearch.client.indices.GetComponentTemplatesRequest; -import org.elasticsearch.client.indices.GetComponentTemplatesResponse; -import org.elasticsearch.client.indices.PutComponentTemplateRequest; -import org.elasticsearch.rest.RestStatus; - -import java.io.IOException; - -import static java.util.Collections.emptySet; -import static java.util.Collections.singleton; - -/** - * A wrapper for the {@link RestHighLevelClient} that provides methods for accessing the Cluster API. - *

    - * See Cluster API on elastic.co - * - * @deprecated The High Level Rest Client is deprecated in favor of the - * - * Elasticsearch Java API Client - */ -@Deprecated(since = "7.16.0", forRemoval = true) -@SuppressWarnings("removal") -public final class ClusterClient { - private final RestHighLevelClient restHighLevelClient; - - ClusterClient(RestHighLevelClient restHighLevelClient) { - this.restHighLevelClient = restHighLevelClient; - } - - /** - * Updates cluster wide specific settings using the Cluster Update Settings API. - * See Cluster Update Settings - * API on elastic.co - * @param clusterUpdateSettingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public ClusterUpdateSettingsResponse putSettings(ClusterUpdateSettingsRequest clusterUpdateSettingsRequest, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - clusterUpdateSettingsRequest, - ClusterRequestConverters::clusterPutSettings, - options, - ClusterUpdateSettingsResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously updates cluster wide specific settings using the Cluster Update Settings API. - * See Cluster Update Settings - * API on elastic.co - * @param clusterUpdateSettingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putSettingsAsync( - ClusterUpdateSettingsRequest clusterUpdateSettingsRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - clusterUpdateSettingsRequest, - ClusterRequestConverters::clusterPutSettings, - options, - ClusterUpdateSettingsResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Get the cluster wide settings using the Cluster Get Settings API. - * See Cluster Get Settings - * API on elastic.co - * @param clusterGetSettingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public ClusterGetSettingsResponse getSettings(ClusterGetSettingsRequest clusterGetSettingsRequest, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - clusterGetSettingsRequest, - ClusterRequestConverters::clusterGetSettings, - options, - ClusterGetSettingsResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously get the cluster wide settings using the Cluster Get Settings API. - * See Cluster Get Settings - * API on elastic.co - * @param clusterGetSettingsRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getSettingsAsync( - ClusterGetSettingsRequest clusterGetSettingsRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - clusterGetSettingsRequest, - ClusterRequestConverters::clusterGetSettings, - options, - ClusterGetSettingsResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Get cluster health using the Cluster Health API. - * See - * Cluster Health API on elastic.co - *

    - * If timeout occurred, {@link ClusterHealthResponse} will have isTimedOut() == true and status() == RestStatus.REQUEST_TIMEOUT - * @param healthRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public ClusterHealthResponse health(ClusterHealthRequest healthRequest, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - healthRequest, - ClusterRequestConverters::clusterHealth, - options, - ClusterHealthResponse::fromXContent, - singleton(RestStatus.REQUEST_TIMEOUT.getStatus()) - ); - } - - /** - * Asynchronously get cluster health using the Cluster Health API. - * See - * Cluster Health API on elastic.co - * If timeout occurred, {@link ClusterHealthResponse} will have isTimedOut() == true and status() == RestStatus.REQUEST_TIMEOUT - * @param healthRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable healthAsync( - ClusterHealthRequest healthRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - healthRequest, - ClusterRequestConverters::clusterHealth, - options, - ClusterHealthResponse::fromXContent, - listener, - singleton(RestStatus.REQUEST_TIMEOUT.getStatus()) - ); - } - - /** - * Get the remote cluster information using the Remote cluster info API. - * See Remote cluster info - * API on elastic.co - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public RemoteInfoResponse remoteInfo(RemoteInfoRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - ClusterRequestConverters::remoteInfo, - options, - RemoteInfoResponse::fromXContent, - singleton(RestStatus.REQUEST_TIMEOUT.getStatus()) - ); - } - - /** - * Asynchronously get remote cluster information using the Remote cluster info API. - * See Remote cluster info - * API on elastic.co - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable remoteInfoAsync(RemoteInfoRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - ClusterRequestConverters::remoteInfo, - options, - RemoteInfoResponse::fromXContent, - listener, - singleton(RestStatus.REQUEST_TIMEOUT.getStatus()) - ); - } - - /** - * Delete a component template using the Component Templates API - * - * @param req the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public AcknowledgedResponse deleteComponentTemplate(DeleteComponentTemplateRequest req, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - req, - ClusterRequestConverters::deleteComponentTemplate, - options, - AcknowledgedResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously delete a component template using the Component Templates API - * - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteComponentTemplateAsync( - DeleteComponentTemplateRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - ClusterRequestConverters::deleteComponentTemplate, - options, - AcknowledgedResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Puts a component template using the Component Templates API. - * - * @param putComponentTemplateRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public AcknowledgedResponse putComponentTemplate(PutComponentTemplateRequest putComponentTemplateRequest, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - putComponentTemplateRequest, - ClusterRequestConverters::putComponentTemplate, - options, - AcknowledgedResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously puts a component template using the Component Templates API. - * - * @param putComponentTemplateRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putComponentTemplateAsync( - PutComponentTemplateRequest putComponentTemplateRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - putComponentTemplateRequest, - ClusterRequestConverters::putComponentTemplate, - options, - AcknowledgedResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Gets component templates using the Components Templates API - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param getComponentTemplatesRequest the request - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public GetComponentTemplatesResponse getComponentTemplate( - GetComponentTemplatesRequest getComponentTemplatesRequest, - RequestOptions options - ) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - getComponentTemplatesRequest, - ClusterRequestConverters::getComponentTemplates, - options, - GetComponentTemplatesResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously gets component templates using the Components Templates API - * @param getComponentTemplatesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getComponentTemplateAsync( - GetComponentTemplatesRequest getComponentTemplatesRequest, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - getComponentTemplatesRequest, - ClusterRequestConverters::getComponentTemplates, - options, - GetComponentTemplatesResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Uses the Component Templates API to determine if component templates exist - * - * @param componentTemplatesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return true if any index templates in the request exist, false otherwise - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public boolean existsComponentTemplate(ComponentTemplatesExistRequest componentTemplatesRequest, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequest( - componentTemplatesRequest, - ClusterRequestConverters::componentTemplatesExist, - options, - RestHighLevelClient::convertExistsResponse, - emptySet() - ); - } - - /** - * Uses the Index Templates API to determine if index templates exist - * @param componentTemplatesRequest the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion. The listener will be called with the value {@code true} - * @return cancellable that may be used to cancel the request - */ - public Cancellable existsComponentTemplateAsync( - ComponentTemplatesExistRequest componentTemplatesRequest, - RequestOptions options, - ActionListener listener - ) { - - return restHighLevelClient.performRequestAsync( - componentTemplatesRequest, - ClusterRequestConverters::componentTemplatesExist, - options, - RestHighLevelClient::convertExistsResponse, - listener, - emptySet() - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 695ae9b69aea1..d94071ccac1d5 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -276,10 +276,8 @@ public class RestHighLevelClient implements Closeable { private volatile ListenableFuture> versionValidationFuture; private final IndicesClient indicesClient = new IndicesClient(this); - private final ClusterClient clusterClient = new ClusterClient(this); private final IngestClient ingestClient = new IngestClient(this); private final SnapshotClient snapshotClient = new SnapshotClient(this); - private final XPackClient xPackClient = new XPackClient(this); private final MachineLearningClient machineLearningClient = new MachineLearningClient(this); private final SecurityClient securityClient = new SecurityClient(this); private final TransformClient transformClient = new TransformClient(this); @@ -365,15 +363,6 @@ public final IndicesClient indices() { return indicesClient; } - /** - * Provides a {@link ClusterClient} which can be used to access the Cluster API. - * - * See Cluster API on elastic.co - */ - public final ClusterClient cluster() { - return clusterClient; - } - /** * Provides a {@link IngestClient} which can be used to access the Ingest API. * @@ -392,19 +381,6 @@ public final SnapshotClient snapshot() { return snapshotClient; } - /** - * Provides methods for accessing the Elastic Licensed X-Pack Info - * and Usage APIs that are shipped with the default distribution of - * Elasticsearch. All of these APIs will 404 if run against the OSS - * distribution of Elasticsearch. - *

    - * See the - * Info APIs on elastic.co for more information. - */ - public final XPackClient xpack() { - return xPackClient; - } - /** * A wrapper for the {@link RestHighLevelClient} that provides methods for accessing the Searchable Snapshots APIs. *

    diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java deleted file mode 100644 index f019a262b607a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java +++ /dev/null @@ -1,115 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.client.xpack.XPackInfoRequest; -import org.elasticsearch.client.xpack.XPackInfoResponse; -import org.elasticsearch.client.xpack.XPackUsageRequest; -import org.elasticsearch.client.xpack.XPackUsageResponse; - -import java.io.IOException; - -import static java.util.Collections.emptySet; - -/** - * A wrapper for the {@link RestHighLevelClient} that provides methods for - * accessing the Elastic Licensed X-Pack APIs that are shipped with the - * default distribution of Elasticsearch. All of these APIs will 404 if run - * against the OSS distribution of Elasticsearch. - *

    - * See the - * REST APIs on elastic.co for more information. - * - * @deprecated The High Level Rest Client is deprecated in favor of the - * - * Elasticsearch Java API Client - */ -@Deprecated(since = "7.16.0", forRemoval = true) -@SuppressWarnings("removal") -public final class XPackClient { - - private final RestHighLevelClient restHighLevelClient; - - XPackClient(RestHighLevelClient restHighLevelClient) { - this.restHighLevelClient = restHighLevelClient; - } - - /** - * Fetch information about X-Pack from the cluster. - * See - * the docs for more. - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public XPackInfoResponse info(XPackInfoRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - XPackRequestConverters::info, - options, - XPackInfoResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously fetch information about X-Pack from the cluster. - * See - * the docs for more. - * @param request the request - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable infoAsync(XPackInfoRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - XPackRequestConverters::info, - options, - XPackInfoResponse::fromXContent, - listener, - emptySet() - ); - } - - /** - * Fetch usage information about X-Pack features from the cluster. - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public XPackUsageResponse usage(XPackUsageRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - XPackRequestConverters::usage, - options, - XPackUsageResponse::fromXContent, - emptySet() - ); - } - - /** - * Asynchronously fetch usage information about X-Pack features from the cluster. - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable usageAsync(XPackUsageRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - XPackRequestConverters::usage, - options, - XPackUsageResponse::fromXContent, - listener, - emptySet() - ); - } -} diff --git a/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java b/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java index c1db9d77f61dd..6dec927308c32 100644 --- a/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java +++ b/qa/ccs-rolling-upgrade-remote-cluster/src/test/java/org/elasticsearch/upgrades/SearchStatesIT.java @@ -31,7 +31,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.index.IndexRequest; @@ -114,7 +113,7 @@ static List parseHosts(String props) { public static void configureRemoteClusters(List remoteNodes) throws Exception { assertThat(remoteNodes, hasSize(3)); final String remoteClusterSettingPrefix = "cluster.remote." + CLUSTER_ALIAS + "."; - try (RestHighLevelClient localClient = newLocalClient()) { + try (RestClient localClient = newLocalClient().getLowLevelClient()) { final Settings remoteConnectionSettings; if (randomBoolean()) { final List seeds = remoteNodes.stream() @@ -137,13 +136,9 @@ public static void configureRemoteClusters(List remoteNodes) throws Except .put(remoteClusterSettingPrefix + "proxy_address", proxyNode.transportAddress) .build(); } - assertTrue( - localClient.cluster() - .putSettings(new ClusterUpdateSettingsRequest().persistentSettings(remoteConnectionSettings), RequestOptions.DEFAULT) - .isAcknowledged() - ); + updateClusterSettings(localClient, remoteConnectionSettings); assertBusy(() -> { - final Response resp = localClient.getLowLevelClient().performRequest(new Request("GET", "/_remote/info")); + final Response resp = localClient.performRequest(new Request("GET", "/_remote/info")); assertOK(resp); final ObjectPath objectPath = ObjectPath.createFromResponse(resp); assertNotNull(objectPath.evaluate(CLUSTER_ALIAS)); @@ -172,7 +167,7 @@ static int indexDocs(RestHighLevelClient client, String index, int numDocs) thro } void verifySearch(String localIndex, int localNumDocs, String remoteIndex, int remoteNumDocs, Integer preFilterShardSize) { - try (RestHighLevelClient localClient = newLocalClient()) { + try (RestClient localClient = newLocalClient().getLowLevelClient()) { Request request = new Request("POST", "/_search"); final int expectedDocs; if (randomBoolean()) { @@ -193,7 +188,7 @@ void verifySearch(String localIndex, int localNumDocs, String remoteIndex, int r } int size = between(1, 100); request.setJsonEntity("{\"sort\": \"f\", \"size\": " + size + "}"); - Response response = localClient.getLowLevelClient().performRequest(request); + Response response = localClient.performRequest(request); try ( XContentParser parser = JsonXContent.jsonXContent.createParser( NamedXContentRegistry.EMPTY, diff --git a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java index c5bee9694a275..c778d6fe4c512 100644 --- a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java +++ b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/AbstractMultiClusterRemoteTestCase.java @@ -9,8 +9,7 @@ import org.apache.http.HttpHost; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; -import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.settings.SecureString; @@ -28,6 +27,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Collections; +import java.util.function.Consumer; @SuppressWarnings("removal") public abstract class AbstractMultiClusterRemoteTestCase extends ESRestTestCase { @@ -58,8 +58,12 @@ public void initClientsAndConfigureClusters() throws Exception { cluster1Client = buildClient("localhost:" + getProperty("test.fixtures.elasticsearch-" + getDistribution() + "-1.tcp.9200")); cluster2Client = buildClient("localhost:" + getProperty("test.fixtures.elasticsearch-" + getDistribution() + "-2.tcp.9200")); - cluster1Client().cluster().health(new ClusterHealthRequest().waitForNodes("1").waitForYellowStatus(), RequestOptions.DEFAULT); - cluster2Client().cluster().health(new ClusterHealthRequest().waitForNodes("1").waitForYellowStatus(), RequestOptions.DEFAULT); + Consumer waitForYellowRequest = request -> { + request.addParameter("wait_for_status", "yellow"); + request.addParameter("wait_for_nodes", "1"); + }; + ensureHealth(cluster1Client().getLowLevelClient(), waitForYellowRequest); + ensureHealth(cluster2Client().getLowLevelClient(), waitForYellowRequest); initialized = true; } diff --git a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java index 6e4d83873f9df..78aa2b7e1c5de 100644 --- a/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java +++ b/qa/remote-clusters/src/test/java/org/elasticsearch/cluster/remote/test/RemoteClustersIT.java @@ -7,14 +7,12 @@ */ package org.elasticsearch.cluster.remote.test; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.RequestOptions; -import org.elasticsearch.client.cluster.RemoteConnectionInfo; -import org.elasticsearch.client.cluster.RemoteInfoRequest; +import org.elasticsearch.client.RestClient; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.xcontent.XContentFactory; @@ -22,6 +20,8 @@ import org.junit.Before; import java.io.IOException; +import java.util.Map; +import java.util.Optional; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assume.assumeThat; @@ -74,27 +74,22 @@ public void clearIndices() throws IOException { @After public void clearRemoteClusterSettings() throws IOException { - ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder().putNull("cluster.remote.*").build() - ); - assertTrue(cluster1Client().cluster().putSettings(request, RequestOptions.DEFAULT).isAcknowledged()); - assertTrue(cluster2Client().cluster().putSettings(request, RequestOptions.DEFAULT).isAcknowledged()); + Settings setting = Settings.builder().putNull("cluster.remote.*").build(); + updateClusterSettings(cluster1Client().getLowLevelClient(), setting); + updateClusterSettings(cluster2Client().getLowLevelClient(), setting); } public void testProxyModeConnectionWorks() throws IOException { String cluster2RemoteClusterSeed = "elasticsearch-" + getDistribution() + "-2:9300"; logger.info("Configuring remote cluster [{}]", cluster2RemoteClusterSeed); - ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder() - .put("cluster.remote.cluster2.mode", "proxy") - .put("cluster.remote.cluster2.proxy_address", cluster2RemoteClusterSeed) - .build() - ); - assertTrue(cluster1Client().cluster().putSettings(request, RequestOptions.DEFAULT).isAcknowledged()); + Settings settings = Settings.builder() + .put("cluster.remote.cluster2.mode", "proxy") + .put("cluster.remote.cluster2.proxy_address", cluster2RemoteClusterSeed) + .build(); + + updateClusterSettings(cluster1Client().getLowLevelClient(), settings); - RemoteConnectionInfo rci = cluster1Client().cluster().remoteInfo(new RemoteInfoRequest(), RequestOptions.DEFAULT).getInfos().get(0); - logger.info("Connection info: {}", rci); - assertTrue(rci.isConnected()); + assertTrue(isConnected(cluster1Client().getLowLevelClient())); assertEquals( 2L, @@ -105,33 +100,25 @@ public void testProxyModeConnectionWorks() throws IOException { public void testSniffModeConnectionFails() throws IOException { String cluster2RemoteClusterSeed = "elasticsearch-" + getDistribution() + "-2:9300"; logger.info("Configuring remote cluster [{}]", cluster2RemoteClusterSeed); - ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder() - .put("cluster.remote.cluster2alt.mode", "sniff") - .put("cluster.remote.cluster2alt.seeds", cluster2RemoteClusterSeed) - .build() - ); - assertTrue(cluster1Client().cluster().putSettings(request, RequestOptions.DEFAULT).isAcknowledged()); + Settings settings = Settings.builder() + .put("cluster.remote.cluster2alt.mode", "sniff") + .put("cluster.remote.cluster2alt.seeds", cluster2RemoteClusterSeed) + .build(); + updateClusterSettings(cluster1Client().getLowLevelClient(), settings); - RemoteConnectionInfo rci = cluster1Client().cluster().remoteInfo(new RemoteInfoRequest(), RequestOptions.DEFAULT).getInfos().get(0); - logger.info("Connection info: {}", rci); - assertFalse(rci.isConnected()); + assertFalse(isConnected(cluster1Client().getLowLevelClient())); } public void testHAProxyModeConnectionWorks() throws IOException { String proxyAddress = "haproxy:9600"; logger.info("Configuring remote cluster [{}]", proxyAddress); - ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder() - .put("cluster.remote.haproxynosn.mode", "proxy") - .put("cluster.remote.haproxynosn.proxy_address", proxyAddress) - .build() - ); - assertTrue(cluster1Client().cluster().putSettings(request, RequestOptions.DEFAULT).isAcknowledged()); + Settings settings = Settings.builder() + .put("cluster.remote.haproxynosn.mode", "proxy") + .put("cluster.remote.haproxynosn.proxy_address", proxyAddress) + .build(); + updateClusterSettings(cluster1Client().getLowLevelClient(), settings); - RemoteConnectionInfo rci = cluster1Client().cluster().remoteInfo(new RemoteInfoRequest(), RequestOptions.DEFAULT).getInfos().get(0); - logger.info("Connection info: {}", rci); - assertTrue(rci.isConnected()); + assertTrue(isConnected(cluster1Client().getLowLevelClient())); assertEquals( 2L, @@ -142,18 +129,14 @@ public void testHAProxyModeConnectionWorks() throws IOException { public void testHAProxyModeConnectionWithSNIToCluster1Works() throws IOException { assumeThat("test is only supported if the distribution contains xpack", getDistribution(), equalTo("default")); - ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder() - .put("cluster.remote.haproxysni1.mode", "proxy") - .put("cluster.remote.haproxysni1.proxy_address", "haproxy:9600") - .put("cluster.remote.haproxysni1.server_name", "application1.example.com") - .build() - ); - assertTrue(cluster2Client().cluster().putSettings(request, RequestOptions.DEFAULT).isAcknowledged()); + Settings settings = Settings.builder() + .put("cluster.remote.haproxysni1.mode", "proxy") + .put("cluster.remote.haproxysni1.proxy_address", "haproxy:9600") + .put("cluster.remote.haproxysni1.server_name", "application1.example.com") + .build(); + updateClusterSettings(cluster2Client().getLowLevelClient(), settings); - RemoteConnectionInfo rci = cluster2Client().cluster().remoteInfo(new RemoteInfoRequest(), RequestOptions.DEFAULT).getInfos().get(0); - logger.info("Connection info: {}", rci); - assertTrue(rci.isConnected()); + assertTrue(isConnected(cluster2Client().getLowLevelClient())); assertEquals( 1L, @@ -164,22 +147,30 @@ public void testHAProxyModeConnectionWithSNIToCluster1Works() throws IOException public void testHAProxyModeConnectionWithSNIToCluster2Works() throws IOException { assumeThat("test is only supported if the distribution contains xpack", getDistribution(), equalTo("default")); - ClusterUpdateSettingsRequest request = new ClusterUpdateSettingsRequest().persistentSettings( - Settings.builder() - .put("cluster.remote.haproxysni2.mode", "proxy") - .put("cluster.remote.haproxysni2.proxy_address", "haproxy:9600") - .put("cluster.remote.haproxysni2.server_name", "application2.example.com") - .build() - ); - assertTrue(cluster1Client().cluster().putSettings(request, RequestOptions.DEFAULT).isAcknowledged()); + Settings settings = Settings.builder() + .put("cluster.remote.haproxysni2.mode", "proxy") + .put("cluster.remote.haproxysni2.proxy_address", "haproxy:9600") + .put("cluster.remote.haproxysni2.server_name", "application2.example.com") + .build(); + updateClusterSettings(cluster1Client().getLowLevelClient(), settings); - RemoteConnectionInfo rci = cluster1Client().cluster().remoteInfo(new RemoteInfoRequest(), RequestOptions.DEFAULT).getInfos().get(0); - logger.info("Connection info: {}", rci); - assertTrue(rci.isConnected()); + assertTrue(isConnected(cluster1Client().getLowLevelClient())); assertEquals( 2L, cluster1Client().search(new SearchRequest("haproxysni2:test2"), RequestOptions.DEFAULT).getHits().getTotalHits().value ); } + + @SuppressWarnings("unchecked") + private boolean isConnected(RestClient restClient) throws IOException { + Optional remoteConnectionInfo = getAsMap(restClient, "/_remote/info").values().stream().findFirst(); + if (remoteConnectionInfo.isPresent()) { + logger.info("Connection info: {}", remoteConnectionInfo); + if (((Map) remoteConnectionInfo.get()).get("connected")instanceof Boolean connected) { + return connected; + } + } + return false; + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index a4e338626191d..245593cc5e76d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1407,6 +1407,24 @@ public static void assertAcknowledged(Response response) throws IOException { assertThat(jsonBody, containsString("\"acknowledged\":true")); } + /** + * Updates the cluster with the provided settings (as persistent settings) + **/ + public static void updateClusterSettings(Settings settings) throws IOException { + updateClusterSettings(client(), settings); + } + + /** + * Updates the cluster with the provided settings (as persistent settings) + **/ + public static void updateClusterSettings(RestClient client, Settings settings) throws IOException { + Request request = new Request("PUT", "/_cluster/settings"); + String entity = "{ \"persistent\":" + Strings.toString(settings) + "}"; + request.setJsonEntity(entity); + Response response = client.performRequest(request); + assertOK(response); + } + /** * Permits subclasses to increase the default timeout when waiting for green health */ @@ -1440,6 +1458,10 @@ public static void ensureHealth(String index, Consumer requestConsumer) ensureHealth(client(), index, requestConsumer); } + public static void ensureHealth(RestClient restClient, Consumer requestConsumer) throws IOException { + ensureHealth(restClient, "", requestConsumer); + } + protected static void ensureHealth(RestClient restClient, String index, Consumer requestConsumer) throws IOException { Request request = new Request("GET", "/_cluster/health" + (index.isBlank() ? "" : "/" + index)); requestConsumer.accept(request); @@ -1604,7 +1626,11 @@ protected static Map getAlias(final String index, final String a } protected static Map getAsMap(final String endpoint) throws IOException { - Response response = client().performRequest(new Request("GET", endpoint)); + return getAsMap(client(), endpoint); + } + + protected static Map getAsMap(RestClient client, final String endpoint) throws IOException { + Response response = client.performRequest(new Request("GET", endpoint)); return responseAsMap(response); } diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index a2e12e6046f06..6174c029c47cb 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -10,7 +10,6 @@ import org.apache.http.HttpHost; import org.elasticsearch.Build; import org.elasticsearch.Version; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; @@ -29,7 +28,6 @@ import org.elasticsearch.client.indices.PutMappingRequest; import org.elasticsearch.client.searchable_snapshots.MountSnapshotRequest; import org.elasticsearch.cluster.SnapshotsInProgress; -import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.routing.Murmur3HashFunction; @@ -342,15 +340,7 @@ private void restoreMountAndVerify( assertEquals(numberOfShards, restoreSnapshotResponse.getRestoreInfo().totalShards()); assertEquals(numberOfShards, restoreSnapshotResponse.getRestoreInfo().successfulShards()); - assertEquals( - ClusterHealthStatus.GREEN, - client.cluster() - .health( - new ClusterHealthRequest("restored_" + indexName).waitForGreenStatus().waitForNoRelocatingShards(true), - RequestOptions.DEFAULT - ) - .getStatus() - ); + ensureGreen("restored_" + indexName); MappingMetadata mapping = client.indices() .getMapping(new GetMappingsRequest().indices("restored_" + indexName), RequestOptions.DEFAULT) @@ -401,15 +391,7 @@ private void restoreMountAndVerify( assertEquals(numberOfShards, mountSnapshotResponse.getRestoreInfo().totalShards()); assertEquals(numberOfShards, mountSnapshotResponse.getRestoreInfo().successfulShards()); - assertEquals( - ClusterHealthStatus.GREEN, - client.cluster() - .health( - new ClusterHealthRequest("mounted_full_copy_" + indexName).waitForGreenStatus().waitForNoRelocatingShards(true), - RequestOptions.DEFAULT - ) - .getStatus() - ); + ensureGreen("mounted_full_copy_" + indexName); // run a search against the index assertDocs("mounted_full_copy_" + indexName, numDocs, expectedIds, client, sourceOnlyRepository, oldVersion); diff --git a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java index df6189b5ca681..e0a72946be332 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java +++ b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java @@ -10,10 +10,6 @@ import io.netty.util.concurrent.GlobalEventExecutor; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; -import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; -import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -23,10 +19,6 @@ import org.elasticsearch.client.indices.GetIndexRequest; import org.elasticsearch.client.indices.GetIndexTemplatesRequest; import org.elasticsearch.client.indices.GetIndexTemplatesResponse; -import org.elasticsearch.client.xpack.XPackUsageRequest; -import org.elasticsearch.client.xpack.XPackUsageResponse; -import org.elasticsearch.cluster.health.ClusterHealthStatus; -import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -165,9 +157,7 @@ public void enableExporter() throws Exception { .put("xpack.monitoring.exporters._http.ssl.certificate_authorities", "testnode.crt") .setSecureSettings(secureSettings) .build(); - ClusterUpdateSettingsResponse response = newHighLevelClient().cluster() - .putSettings(new ClusterUpdateSettingsRequest().transientSettings(exporterSettings), RequestOptions.DEFAULT); - assertTrue(response.isAcknowledged()); + updateClusterSettings(exporterSettings); } @After @@ -181,15 +171,12 @@ public void disableExporter() throws IOException { .putNull("xpack.monitoring.exporters._http.ssl.verification_mode") .putNull("xpack.monitoring.exporters._http.ssl.certificate_authorities") .build(); - ClusterUpdateSettingsResponse response = newHighLevelClient().cluster() - .putSettings(new ClusterUpdateSettingsRequest().transientSettings(exporterSettings), RequestOptions.DEFAULT); - assertTrue(response.isAcknowledged()); + updateClusterSettings(exporterSettings); } + @SuppressWarnings("unchecked") private boolean getMonitoringUsageExportersDefined() throws Exception { - RestHighLevelClient client = newHighLevelClient(); - final XPackUsageResponse usageResponse = client.xpack().usage(new XPackUsageRequest(), RequestOptions.DEFAULT); - Map monitoringUsage = usageResponse.getUsages().get("monitoring"); + Map monitoringUsage = (Map) getAsMap("/_xpack/usage").get("monitoring"); assertThat("Monitoring feature set does not exist", monitoringUsage, notNullValue()); @SuppressWarnings("unchecked") @@ -225,13 +212,12 @@ public void testHTTPExporterWithSSL() throws Exception { }); // Waits for indices to be ready - ClusterHealthRequest healthRequest = new ClusterHealthRequest(MONITORING_PATTERN); - healthRequest.waitForStatus(ClusterHealthStatus.YELLOW); - healthRequest.waitForEvents(Priority.LANGUID); - healthRequest.waitForNoRelocatingShards(true); - healthRequest.waitForNoInitializingShards(true); - ClusterHealthResponse response = client.cluster().health(healthRequest, RequestOptions.DEFAULT); - assertThat(response.isTimedOut(), is(false)); + ensureHealth(MONITORING_PATTERN, (request) -> { + request.addParameter("wait_for_status", "yellow"); + request.addParameter("wait_for_events", "languid"); + request.addParameter("wait_for_no_relocating_shards", "true"); + request.addParameter("wait_for_no_initializing_shards", "true"); + }); // Checks that the HTTP exporter has successfully exported some data SearchRequest searchRequest = new SearchRequest(new String[] { MONITORING_PATTERN }, new SearchSourceBuilder().size(0)); From d4655e880185e1ca277d0b097dcfbdb6c669b89f Mon Sep 17 00:00:00 2001 From: Mary Gouseti Date: Thu, 10 Feb 2022 11:58:10 +0100 Subject: [PATCH 034/167] Discard intermediate node results when a request is cancelled (#82685) Resolves #82337 --- docs/changelog/82685.yaml | 6 ++ .../action/support/NodeResponseTracker.java | 97 +++++++++++++++++++ .../node/TransportBroadcastByNodeAction.java | 76 +++++++++------ .../support/nodes/TransportNodesAction.java | 65 +++++++++---- .../elasticsearch/tasks/CancellableTask.java | 24 +++++ .../node/tasks/CancellableTasksTests.java | 54 +++++++---- .../support/NodeResponseTrackerTests.java | 61 ++++++++++++ .../TransportBroadcastByNodeActionTests.java | 13 ++- .../nodes/TransportNodesActionTests.java | 24 +++-- 9 files changed, 341 insertions(+), 79 deletions(-) create mode 100644 docs/changelog/82685.yaml create mode 100644 server/src/main/java/org/elasticsearch/action/support/NodeResponseTracker.java create mode 100644 server/src/test/java/org/elasticsearch/action/support/NodeResponseTrackerTests.java diff --git a/docs/changelog/82685.yaml b/docs/changelog/82685.yaml new file mode 100644 index 0000000000000..3ef9e7841ba6e --- /dev/null +++ b/docs/changelog/82685.yaml @@ -0,0 +1,6 @@ +pr: 82685 +summary: Discard intermediate results upon cancellation for stats endpoints +area: Stats +type: bug +issues: + - 82337 diff --git a/server/src/main/java/org/elasticsearch/action/support/NodeResponseTracker.java b/server/src/main/java/org/elasticsearch/action/support/NodeResponseTracker.java new file mode 100644 index 0000000000000..aafd6166cb364 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/support/NodeResponseTracker.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.support; + +import java.util.Collection; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicReferenceArray; + +/** + * This class tracks the intermediate responses that will be used to create aggregated cluster response to a request. It also gives the + * possibility to discard the intermediate results when asked, for example when the initial request is cancelled, in order to release the + * resources. + */ +public class NodeResponseTracker { + + private final AtomicInteger counter = new AtomicInteger(); + private final int expectedResponsesCount; + private volatile AtomicReferenceArray responses; + private volatile Exception causeOfDiscarding; + + public NodeResponseTracker(int size) { + this.expectedResponsesCount = size; + this.responses = new AtomicReferenceArray<>(size); + } + + public NodeResponseTracker(Collection array) { + this.expectedResponsesCount = array.size(); + this.responses = new AtomicReferenceArray<>(array.toArray()); + } + + /** + * This method discards the results collected so far to free up the resources. + * @param cause the discarding, this will be communicated if they try to access the discarded results + */ + public void discardIntermediateResponses(Exception cause) { + if (responses != null) { + this.causeOfDiscarding = cause; + responses = null; + } + } + + public boolean responsesDiscarded() { + return responses == null; + } + + /** + * This method stores a new node response if the intermediate responses haven't been discarded yet. If the responses are not discarded + * the method asserts that this is the first response encountered from this node to protect from miscounting the responses in case of a + * double invocation. If the responses have been discarded we accept this risk for simplicity. + * @param nodeIndex, the index that represents a single node of the cluster + * @param response, a response can be either a NodeResponse or an error + * @return true if all the nodes' responses have been received, else false + */ + public boolean trackResponseAndCheckIfLast(int nodeIndex, Object response) { + AtomicReferenceArray responses = this.responses; + + if (responsesDiscarded() == false) { + boolean firstEncounter = responses.compareAndSet(nodeIndex, null, response); + assert firstEncounter : "a response should be tracked only once"; + } + return counter.incrementAndGet() == getExpectedResponseCount(); + } + + /** + * Returns the tracked response or null if the response hasn't been received yet for a specific index that represents a node of the + * cluster. + * @throws DiscardedResponsesException if the responses have been discarded + */ + public Object getResponse(int nodeIndex) throws DiscardedResponsesException { + AtomicReferenceArray responses = this.responses; + if (responsesDiscarded()) { + throw new DiscardedResponsesException(causeOfDiscarding); + } + return responses.get(nodeIndex); + } + + public int getExpectedResponseCount() { + return expectedResponsesCount; + } + + /** + * This exception is thrown when the {@link NodeResponseTracker} is asked to give information about the responses after they have been + * discarded. + */ + public static class DiscardedResponsesException extends Exception { + + public DiscardedResponsesException(Exception cause) { + super(cause); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index 5c5594aa094d6..382c9cf01693e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.NodeResponseTracker; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.action.support.broadcast.BroadcastResponse; @@ -51,7 +52,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.function.Consumer; /** @@ -118,28 +118,29 @@ public TransportBroadcastByNodeAction( private Response newResponse( Request request, - AtomicReferenceArray responses, + NodeResponseTracker nodeResponseTracker, int unavailableShardCount, Map> nodes, ClusterState clusterState - ) { + ) throws NodeResponseTracker.DiscardedResponsesException { int totalShards = 0; int successfulShards = 0; List broadcastByNodeResponses = new ArrayList<>(); List exceptions = new ArrayList<>(); - for (int i = 0; i < responses.length(); i++) { - if (responses.get(i)instanceof FailedNodeException exception) { + for (int i = 0; i < nodeResponseTracker.getExpectedResponseCount(); i++) { + Object response = nodeResponseTracker.getResponse(i); + if (response instanceof FailedNodeException exception) { totalShards += nodes.get(exception.nodeId()).size(); for (ShardRouting shard : nodes.get(exception.nodeId())) { exceptions.add(new DefaultShardOperationFailedException(shard.getIndexName(), shard.getId(), exception)); } } else { @SuppressWarnings("unchecked") - NodeResponse response = (NodeResponse) responses.get(i); - broadcastByNodeResponses.addAll(response.results); - totalShards += response.getTotalShards(); - successfulShards += response.getSuccessfulShards(); - for (BroadcastShardOperationFailedException throwable : response.getExceptions()) { + NodeResponse nodeResponse = (NodeResponse) response; + broadcastByNodeResponses.addAll(nodeResponse.results); + totalShards += nodeResponse.getTotalShards(); + successfulShards += nodeResponse.getSuccessfulShards(); + for (BroadcastShardOperationFailedException throwable : nodeResponse.getExceptions()) { if (TransportActions.isShardNotAvailableException(throwable) == false) { exceptions.add( new DefaultShardOperationFailedException( @@ -256,16 +257,15 @@ protected void doExecute(Task task, Request request, ActionListener li new AsyncAction(task, request, listener).start(); } - protected class AsyncAction { + protected class AsyncAction implements CancellableTask.CancellationListener { private final Task task; private final Request request; private final ActionListener listener; private final ClusterState clusterState; private final DiscoveryNodes nodes; private final Map> nodeIds; - private final AtomicReferenceArray responses; - private final AtomicInteger counter = new AtomicInteger(); private final int unavailableShardCount; + private final NodeResponseTracker nodeResponseTracker; protected AsyncAction(Task task, Request request, ActionListener listener) { this.task = task; @@ -312,10 +312,13 @@ protected AsyncAction(Task task, Request request, ActionListener liste } this.unavailableShardCount = unavailableShardCount; - responses = new AtomicReferenceArray<>(nodeIds.size()); + nodeResponseTracker = new NodeResponseTracker(nodeIds.size()); } public void start() { + if (task instanceof CancellableTask cancellableTask) { + cancellableTask.addListener(this); + } if (nodeIds.size() == 0) { try { onCompletion(); @@ -373,38 +376,34 @@ protected void onNodeResponse(DiscoveryNode node, int nodeIndex, NodeResponse re logger.trace("received response for [{}] from node [{}]", actionName, node.getId()); } - // this is defensive to protect against the possibility of double invocation - // the current implementation of TransportService#sendRequest guards against this - // but concurrency is hard, safety is important, and the small performance loss here does not matter - if (responses.compareAndSet(nodeIndex, null, response)) { - if (counter.incrementAndGet() == responses.length()) { - onCompletion(); - } + if (nodeResponseTracker.trackResponseAndCheckIfLast(nodeIndex, response)) { + onCompletion(); } } protected void onNodeFailure(DiscoveryNode node, int nodeIndex, Throwable t) { String nodeId = node.getId(); logger.debug(new ParameterizedMessage("failed to execute [{}] on node [{}]", actionName, nodeId), t); - - // this is defensive to protect against the possibility of double invocation - // the current implementation of TransportService#sendRequest guards against this - // but concurrency is hard, safety is important, and the small performance loss here does not matter - if (responses.compareAndSet(nodeIndex, null, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t))) { - if (counter.incrementAndGet() == responses.length()) { - onCompletion(); - } + if (nodeResponseTracker.trackResponseAndCheckIfLast( + nodeIndex, + new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t) + )) { + onCompletion(); } } protected void onCompletion() { - if (task instanceof CancellableTask && ((CancellableTask) task).notifyIfCancelled(listener)) { + if ((task instanceof CancellableTask t) && t.notifyIfCancelled(listener)) { return; } Response response = null; try { - response = newResponse(request, responses, unavailableShardCount, nodeIds, clusterState); + response = newResponse(request, nodeResponseTracker, unavailableShardCount, nodeIds, clusterState); + } catch (NodeResponseTracker.DiscardedResponsesException e) { + // We propagate the reason that the results, in this case the task cancellation, in case the listener needs to take + // follow-up actions + listener.onFailure((Exception) e.getCause()); } catch (Exception e) { logger.debug("failed to combine responses from nodes", e); listener.onFailure(e); @@ -417,6 +416,21 @@ protected void onCompletion() { } } } + + @Override + public void onCancelled() { + assert task instanceof CancellableTask : "task must be cancellable"; + try { + ((CancellableTask) task).ensureNotCancelled(); + } catch (TaskCancelledException e) { + nodeResponseTracker.discardIntermediateResponses(e); + } + } + + // For testing purposes + public NodeResponseTracker getNodeResponseTracker() { + return nodeResponseTracker; + } } class BroadcastByNodeTransportRequestHandler implements TransportRequestHandler { diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index 5b13f3aab917d..c93f688b5a16d 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -13,6 +13,7 @@ import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.NodeResponseTracker; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; @@ -20,6 +21,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; @@ -34,8 +36,6 @@ import java.util.Arrays; import java.util.List; import java.util.Objects; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReferenceArray; public abstract class TransportNodesAction< NodesRequest extends BaseNodesRequest, @@ -128,14 +128,15 @@ protected void doExecute(Task task, NodesRequest request, ActionListener nodesResponses, ActionListener listener) { + void newResponse(Task task, NodesRequest request, NodeResponseTracker nodeResponseTracker, ActionListener listener) + throws NodeResponseTracker.DiscardedResponsesException { - if (nodesResponses == null) { + if (nodeResponseTracker == null) { listener.onFailure(new NullPointerException("nodesResponses")); return; } @@ -143,11 +144,10 @@ void newResponse(Task task, NodesRequest request, AtomicReferenceArray nodesR final List responses = new ArrayList<>(); final List failures = new ArrayList<>(); - for (int i = 0; i < nodesResponses.length(); ++i) { - Object response = nodesResponses.get(i); - - if (response instanceof FailedNodeException) { - failures.add((FailedNodeException) response); + for (int i = 0; i < nodeResponseTracker.getExpectedResponseCount(); ++i) { + Object response = nodeResponseTracker.getResponse(i); + if (nodeResponseTracker.getResponse(i)instanceof FailedNodeException failedNodeException) { + failures.add(failedNodeException); } else { responses.add(nodeResponseClass.cast(response)); } @@ -203,12 +203,11 @@ protected String getTransportNodeAction(DiscoveryNode node) { return transportNodeAction; } - class AsyncAction { + class AsyncAction implements CancellableTask.CancellationListener { private final NodesRequest request; private final ActionListener listener; - private final AtomicReferenceArray responses; - private final AtomicInteger counter = new AtomicInteger(); + private final NodeResponseTracker nodeResponseTracker; private final Task task; AsyncAction(Task task, NodesRequest request, ActionListener listener) { @@ -219,10 +218,13 @@ class AsyncAction { resolveRequest(request, clusterService.state()); assert request.concreteNodes() != null; } - this.responses = new AtomicReferenceArray<>(request.concreteNodes().length); + this.nodeResponseTracker = new NodeResponseTracker(request.concreteNodes().length); } void start() { + if (task instanceof CancellableTask cancellableTask) { + cancellableTask.addListener(this); + } final DiscoveryNode[] nodes = request.concreteNodes(); if (nodes.length == 0) { finishHim(); @@ -267,28 +269,49 @@ public void handleException(TransportException exp) { } } + // For testing purposes + NodeResponseTracker getNodeResponseTracker() { + return nodeResponseTracker; + } + private void onOperation(int idx, NodeResponse nodeResponse) { - responses.set(idx, nodeResponse); - if (counter.incrementAndGet() == responses.length()) { + if (nodeResponseTracker.trackResponseAndCheckIfLast(idx, nodeResponse)) { finishHim(); } } private void onFailure(int idx, String nodeId, Throwable t) { logger.debug(new ParameterizedMessage("failed to execute on node [{}]", nodeId), t); - responses.set(idx, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t)); - if (counter.incrementAndGet() == responses.length()) { + if (nodeResponseTracker.trackResponseAndCheckIfLast(idx, new FailedNodeException(nodeId, "Failed node [" + nodeId + "]", t))) { finishHim(); } } private void finishHim() { - if (task instanceof CancellableTask && ((CancellableTask) task).notifyIfCancelled(listener)) { + if ((task instanceof CancellableTask t) && t.notifyIfCancelled(listener)) { return; } final String executor = finalExecutor.equals(ThreadPool.Names.SAME) ? ThreadPool.Names.GENERIC : finalExecutor; - threadPool.executor(executor).execute(() -> newResponse(task, request, responses, listener)); + threadPool.executor(executor).execute(() -> { + try { + newResponse(task, request, nodeResponseTracker, listener); + } catch (NodeResponseTracker.DiscardedResponsesException e) { + // We propagate the reason that the results, in this case the task cancellation, in case the listener needs to take + // follow-up actions + listener.onFailure((Exception) e.getCause()); + } + }); + } + + @Override + public void onCancelled() { + assert task instanceof CancellableTask : "task must be cancellable"; + try { + ((CancellableTask) task).ensureNotCancelled(); + } catch (TaskCancelledException e) { + nodeResponseTracker.discardIntermediateResponses(e); + } } } diff --git a/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java b/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java index b318d485317b7..9010a9d99d3c4 100644 --- a/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java +++ b/server/src/main/java/org/elasticsearch/tasks/CancellableTask.java @@ -12,6 +12,7 @@ import org.elasticsearch.core.Nullable; import java.util.Map; +import java.util.concurrent.ConcurrentLinkedQueue; /** * A task that can be cancelled @@ -20,6 +21,7 @@ public class CancellableTask extends Task { private volatile String reason; private volatile boolean isCancelled; + private final ConcurrentLinkedQueue listeners = new ConcurrentLinkedQueue<>(); public CancellableTask(long id, String type, String action, String description, TaskId parentTaskId, Map headers) { super(id, type, action, description, parentTaskId, headers); @@ -37,6 +39,7 @@ final void cancel(String reason) { this.isCancelled = true; this.reason = reason; } + listeners.forEach(CancellationListener::onCancelled); onCancelled(); } @@ -67,6 +70,20 @@ public final String getReasonCancelled() { return reason; } + /** + * This method adds a listener that needs to be notified if this task is cancelled. + */ + public final void addListener(CancellationListener listener) { + synchronized (this) { + if (this.isCancelled == false) { + listeners.add(listener); + } + } + if (isCancelled) { + listener.onCancelled(); + } + } + /** * Called after the task is cancelled so that it can take any actions that it has to take. */ @@ -103,4 +120,11 @@ private TaskCancelledException getTaskCancelledException() { assert reason != null; return new TaskCancelledException("task cancelled [" + reason + ']'); } + + /** + * This interface is implemented by any class that needs to react to the cancellation of this task. + */ + public interface CancellationListener { + void onCancelled(); + } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java index ef04ad960e607..82677663b01c0 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/CancellableTasksTests.java @@ -187,6 +187,19 @@ protected NodeResponse nodeOperation(CancellableNodeRequest request, Task task) } } + /** + * Simulates a cancellation listener and sets a flag to true if the task was cancelled + */ + static class CancellableTestCancellationListener implements CancellableTask.CancellationListener { + + final AtomicBoolean calledUponCancellation = new AtomicBoolean(false); + + @Override + public void onCancelled() { + calledUponCancellation.set(true); + } + } + private Task startCancellableTestNodesAction( boolean waitForActionToStart, int runNodesCount, @@ -252,6 +265,7 @@ public void testBasicTaskCancellation() throws Exception { setupTestNodes(Settings.EMPTY); connectNodes(testNodes); CountDownLatch responseLatch = new CountDownLatch(1); + AtomicBoolean listenerCalledUponCancellation = new AtomicBoolean(false); boolean waitForActionToStart = randomBoolean(); logger.info("waitForActionToStart is set to {}", waitForActionToStart); final AtomicReference responseReference = new AtomicReference<>(); @@ -260,24 +274,23 @@ public void testBasicTaskCancellation() throws Exception { // Block at least 1 node, otherwise it's quite easy to end up in a race condition where the node tasks // have finished before the cancel request has arrived int blockedNodesCount = randomIntBetween(1, runNodesCount); - Task mainTask = startCancellableTestNodesAction( - waitForActionToStart, - runNodesCount, - blockedNodesCount, - new ActionListener() { - @Override - public void onResponse(NodesResponse listTasksResponse) { - responseReference.set(listTasksResponse); - responseLatch.countDown(); - } + Task mainTask = startCancellableTestNodesAction(waitForActionToStart, runNodesCount, blockedNodesCount, new ActionListener<>() { + @Override + public void onResponse(NodesResponse listTasksResponse) { + responseReference.set(listTasksResponse); + responseLatch.countDown(); + } - @Override - public void onFailure(Exception e) { - throwableReference.set(e); - responseLatch.countDown(); - } + @Override + public void onFailure(Exception e) { + throwableReference.set(e); + responseLatch.countDown(); } - ); + }); + + assert mainTask instanceof CancellableTask; + CancellableTestCancellationListener listenerAddedBeforeCancellation = new CancellableTestCancellationListener(); + ((CancellableTask) mainTask).addListener(listenerAddedBeforeCancellation); // Cancel main task CancelTasksRequest request = new CancelTasksRequest(); @@ -311,6 +324,13 @@ public void onFailure(Exception e) { for (TaskInfo taskInfo : response.getTasks()) { assertTrue(taskInfo.cancellable()); } + + CancellableTestCancellationListener listenerAddedAfterCancellation = new CancellableTestCancellationListener(); + ((CancellableTask) mainTask).addListener(listenerAddedAfterCancellation); + + // Verify both cancellation listeners have been notified + assertTrue(listenerAddedBeforeCancellation.calledUponCancellation.get()); + assertTrue(listenerAddedAfterCancellation.calledUponCancellation.get()); } // Make sure that tasks are no longer running @@ -337,7 +357,7 @@ public void testChildTasksCancellation() throws Exception { final AtomicReference throwableReference = new AtomicReference<>(); int runNodesCount = randomIntBetween(1, nodesCount); int blockedNodesCount = randomIntBetween(0, runNodesCount); - Task mainTask = startCancellableTestNodesAction(true, runNodesCount, blockedNodesCount, new ActionListener() { + Task mainTask = startCancellableTestNodesAction(true, runNodesCount, blockedNodesCount, new ActionListener<>() { @Override public void onResponse(NodesResponse listTasksResponse) { responseReference.set(listTasksResponse); diff --git a/server/src/test/java/org/elasticsearch/action/support/NodeResponseTrackerTests.java b/server/src/test/java/org/elasticsearch/action/support/NodeResponseTrackerTests.java new file mode 100644 index 0000000000000..11d2ee1f12a04 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/support/NodeResponseTrackerTests.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.support; + +import org.elasticsearch.test.ESTestCase; + +public class NodeResponseTrackerTests extends ESTestCase { + + public void testAllResponsesReceived() throws Exception { + int nodes = randomIntBetween(1, 10); + NodeResponseTracker intermediateNodeResponses = new NodeResponseTracker(nodes); + for (int i = 0; i < nodes; i++) { + boolean isLast = i == nodes - 1; + assertEquals( + isLast, + intermediateNodeResponses.trackResponseAndCheckIfLast(i, randomBoolean() ? i : new Exception("from node " + i)) + ); + } + + assertFalse(intermediateNodeResponses.responsesDiscarded()); + assertEquals(nodes, intermediateNodeResponses.getExpectedResponseCount()); + for (int i = 0; i < nodes; i++) { + assertNotNull(intermediateNodeResponses.getResponse(i)); + if (intermediateNodeResponses.getResponse(i)instanceof Integer nodeResponse) { + assertEquals(i, nodeResponse.intValue()); + } + } + } + + public void testDiscardingResults() { + int nodes = randomIntBetween(1, 10); + int cancelAt = randomIntBetween(0, Math.max(0, nodes - 2)); + NodeResponseTracker intermediateNodeResponses = new NodeResponseTracker(nodes); + for (int i = 0; i < nodes; i++) { + if (i == cancelAt) { + intermediateNodeResponses.discardIntermediateResponses(new Exception("simulated")); + } + boolean isLast = i == nodes - 1; + assertEquals( + isLast, + intermediateNodeResponses.trackResponseAndCheckIfLast(i, randomBoolean() ? i : new Exception("from node " + i)) + ); + } + + assertTrue(intermediateNodeResponses.responsesDiscarded()); + assertEquals(nodes, intermediateNodeResponses.getExpectedResponseCount()); + expectThrows(NodeResponseTracker.DiscardedResponsesException.class, () -> intermediateNodeResponses.getResponse(0)); + } + + public void testResponseIsRegisteredOnlyOnce() { + NodeResponseTracker intermediateNodeResponses = new NodeResponseTracker(1); + assertTrue(intermediateNodeResponses.trackResponseAndCheckIfLast(0, "response1")); + expectThrows(AssertionError.class, () -> intermediateNodeResponses.trackResponseAndCheckIfLast(0, "response2")); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 948288fe06281..93defb70ec466 100644 --- a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -537,14 +537,23 @@ public void testResultAggregation() throws ExecutionException, InterruptedExcept public void testNoResultAggregationIfTaskCancelled() { Request request = new Request(new String[] { TEST_INDEX }); PlainActionFuture listener = new PlainActionFuture<>(); - action.new AsyncAction(cancelledTask(), request, listener).start(); + final CancellableTask task = new CancellableTask(randomLong(), "transport", "action", "", null, emptyMap()); + TransportBroadcastByNodeAction.AsyncAction asyncAction = + action.new AsyncAction(task, request, listener); + asyncAction.start(); Map> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear(); - + int cancelAt = randomIntBetween(0, Math.max(0, capturedRequests.size() - 2)); + int i = 0; for (Map.Entry> entry : capturedRequests.entrySet()) { + if (cancelAt == i) { + TaskCancelHelper.cancel(task, "simulated"); + } transport.handleRemoteError(entry.getValue().get(0).requestId(), new ElasticsearchException("simulated")); + i++; } assertTrue(listener.isDone()); + assertTrue(asyncAction.getNodeResponseTracker().responsesDiscarded()); expectThrows(ExecutionException.class, TaskCancelledException.class, listener::get); } diff --git a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java index ee43aaa5b5e90..def2e4558bd23 100644 --- a/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/nodes/TransportNodesActionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.NodeResponseTracker; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeActionTests; import org.elasticsearch.cluster.ClusterName; @@ -47,7 +48,6 @@ import java.util.Set; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReferenceArray; import java.util.function.Supplier; import static java.util.Collections.emptyMap; @@ -94,14 +94,14 @@ public void testNodesSelectors() { assertEquals(clusterService.state().nodes().resolveNodes(finalNodesIds).length, capturedRequests.size()); } - public void testNewResponseNullArray() { + public void testNewResponseNullArray() throws Exception { TransportNodesAction action = getTestTransportNodesAction(); final PlainActionFuture future = new PlainActionFuture<>(); action.newResponse(new Task(1, "test", "test", "", null, emptyMap()), new TestNodesRequest(), null, future); expectThrows(NullPointerException.class, future::actionGet); } - public void testNewResponse() { + public void testNewResponse() throws Exception { TestTransportNodesAction action = getTestTransportNodesAction(); TestNodesRequest request = new TestNodesRequest(); List expectedNodeResponses = mockList(TestNodeResponse::new, randomIntBetween(0, 2)); @@ -120,10 +120,10 @@ public void testNewResponse() { Collections.shuffle(allResponses, random()); - AtomicReferenceArray atomicArray = new AtomicReferenceArray<>(allResponses.toArray()); + NodeResponseTracker nodeResponseCollector = new NodeResponseTracker(allResponses); final PlainActionFuture future = new PlainActionFuture<>(); - action.newResponse(new Task(1, "test", "test", "", null, emptyMap()), request, atomicArray, future); + action.newResponse(new Task(1, "test", "test", "", null, emptyMap()), request, nodeResponseCollector, future); TestNodesResponse response = future.actionGet(); assertSame(request, response.request); @@ -146,7 +146,7 @@ public void testCustomResolving() throws Exception { assertEquals(clusterService.state().nodes().getDataNodes().size(), capturedRequests.size()); } - public void testTaskCancellationThrowsException() { + public void testTaskCancellation() { TransportNodesAction action = getTestTransportNodesAction(); List nodeIds = new ArrayList<>(); for (DiscoveryNode node : clusterService.state().nodes()) { @@ -156,10 +156,16 @@ public void testTaskCancellationThrowsException() { TestNodesRequest request = new TestNodesRequest(nodeIds.toArray(new String[0])); PlainActionFuture listener = new PlainActionFuture<>(); CancellableTask cancellableTask = new CancellableTask(randomLong(), "transport", "action", "", null, emptyMap()); - TaskCancelHelper.cancel(cancellableTask, "simulated"); - action.doExecute(cancellableTask, request, listener); + TransportNodesAction.AsyncAction asyncAction = + action.new AsyncAction(cancellableTask, request, listener); + asyncAction.start(); Map> capturedRequests = transport.getCapturedRequestsByTargetNodeAndClear(); + int cancelAt = randomIntBetween(0, Math.max(0, capturedRequests.values().size() - 2)); + int requestCount = 0; for (List requests : capturedRequests.values()) { + if (requestCount == cancelAt) { + TaskCancelHelper.cancel(cancellableTask, "simulated"); + } for (CapturingTransport.CapturedRequest capturedRequest : requests) { if (randomBoolean()) { transport.handleResponse(capturedRequest.requestId(), new TestNodeResponse(capturedRequest.node())); @@ -167,9 +173,11 @@ public void testTaskCancellationThrowsException() { transport.handleRemoteError(capturedRequest.requestId(), new TaskCancelledException("simulated")); } } + requestCount++; } assertTrue(listener.isDone()); + assertTrue(asyncAction.getNodeResponseTracker().responsesDiscarded()); expectThrows(ExecutionException.class, TaskCancelledException.class, listener::get); } From 3991961de359d50dac725ee4f5bfcde2f68c7847 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 10 Feb 2022 12:40:41 +0000 Subject: [PATCH 035/167] Return result from cluster state task execution (#83562) The `MasterService` executes batches of tasks which compute changes to the `ClusterState`. After executing each batch the `MasterService` publishes the updated cluster state and notifies every task in the batch when the publication completes. Many tasks compute some kind of result during their execution which needs to be made available to the publication completion handler for subsequent activities. Today there's no good general way to pass anything to the completion handler other than the fact that the publication succeeded. Some tasks work around this by storing their result in the `ClusterState` itself. Others use the executor to capture the result and pass it through. Neither solution works well with batching: later tasks in a batch may overwrite the part of the `ClusterState` containing the results of earlier tasks, and batching executors are re-used across batches. This commit adjusts the `ClusterStateTaskExecutor` interface so that now implementations must supply a listener for each task they successfully execute. The `MasterService` collects the listeners for the batch and notifies them all when publication completes. This gives the executor control over the completion handler of each task which lets it pass in any extra data needed. --- .../DesiredNodesClusterStateTaskExecutor.java | 2 +- .../indices/create/AutoCreateAction.java | 2 +- .../rollover/TransportRolloverAction.java | 2 +- .../cluster/ClusterStateTaskExecutor.java | 75 +++-- .../cluster/ClusterStateTaskListener.java | 10 +- .../cluster/LocalMasterServiceTask.java | 9 +- .../action/shard/ShardStateAction.java | 18 +- .../coordination/JoinTaskExecutor.java | 5 +- .../NodeRemovalClusterStateTaskExecutor.java | 18 +- .../metadata/MetadataMappingService.java | 3 +- .../MetadataUpdateSettingsService.java | 2 +- .../cluster/service/MasterService.java | 101 ++++--- .../snapshots/SnapshotsService.java | 12 +- ...rdFailedClusterStateTaskExecutorTests.java | 6 +- .../cluster/service/MasterServiceTests.java | 261 +++++++++++++++++- .../xpack/ilm/IndexLifecycleRunner.java | 2 +- 16 files changed, 437 insertions(+), 91 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/DesiredNodesClusterStateTaskExecutor.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/DesiredNodesClusterStateTaskExecutor.java index 1f09386f535ac..d3be7bad6eb82 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/DesiredNodesClusterStateTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/desirednodes/DesiredNodesClusterStateTaskExecutor.java @@ -24,7 +24,7 @@ public ClusterTasksResult execute(ClusterState currentSt for (ClusterStateUpdateTask task : tasks) { try { clusterState = task.execute(clusterState); - builder.success(task); + builder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); } catch (Exception e) { builder.failure(task, e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java index adabbd19f6fc1..5cf491d42a6de 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java @@ -120,7 +120,7 @@ public TransportAction( // each duplicate task task.indexNameRef.set(successfulBefore.indexNameRef.get()); } - builder.success(task); + builder.success(task, new ClusterStateTaskExecutor.LegacyClusterTaskResultActionListener(task, currentState)); } catch (Exception e) { builder.failure(task, e); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index 8a57f56f6777d..f46ae3bd62d45 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -380,7 +380,7 @@ public ClusterTasksResult execute(ClusterState currentState, List< for (RolloverTask task : tasks) { try { state = task.performRollover(state); - builder.success(task); + builder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); } catch (Exception e) { builder.failure(task, e); } diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java index b9ceef4c8e98a..d5d1743706bd1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java @@ -7,12 +7,14 @@ */ package org.elasticsearch.cluster; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; import java.util.IdentityHashMap; import java.util.List; import java.util.Map; +import java.util.Objects; public interface ClusterStateTaskExecutor { /** @@ -75,17 +77,28 @@ public static Builder builder() { public static class Builder { private final Map executionResults = new IdentityHashMap<>(); - public Builder success(T task) { - return result(task, TaskResult.success()); - } - - public Builder successes(Iterable tasks) { - for (T task : tasks) { - success(task); - } - return this; + /** + * Record that the cluster state update task succeeded. + * + * @param taskListener A listener for the completion of the resulting cluster state publication. This listener is completed with + * the cluster state that was published (or the publication exception that occurred) in the thread context + * in which the task was submitted. The task's {@link ClusterStateTaskListener#clusterStateProcessed} method + * is not called directly by the master service, nor is {@link ClusterStateTaskListener#onFailure} once the + * task execution has succeeded, but legacy implementations may use this listener to call those methods. + *

    + * The listener should prefer not to use the published state for things like determining the result of a + * task. The task may have been executed as part of a batch, and later tasks in the batch may overwrite + * the results from earlier tasks. Instead the listener should independently capture the information it + * needs to properly process the completion of a cluster state update. + */ + // TODO remove all remaining usages of the published state and then make this an ActionListener + public Builder success(T task, ActionListener taskListener) { + return result(task, TaskResult.success(taskListener)); } + /** + * Record that the cluster state update task failed. + */ public Builder failure(T task, Exception e) { return result(task, TaskResult.failure(e)); } @@ -109,19 +122,22 @@ public ClusterTasksResult build(ClusterState resultingState) { } } - record TaskResult(Exception failure) { - private static final TaskResult SUCCESS = new TaskResult(null); + record TaskResult(@Nullable ActionListener taskListener, @Nullable Exception failure) { + + public TaskResult { + assert failure == null ^ taskListener == null; + } - public static TaskResult success() { - return SUCCESS; + public static TaskResult success(ActionListener taskListener) { + return new TaskResult(Objects.requireNonNull(taskListener), null); } public static TaskResult failure(Exception failure) { - return new TaskResult(failure); + return new TaskResult(null, Objects.requireNonNull(failure)); } public boolean isSuccess() { - return this == SUCCESS; + return failure == null; } public Exception getFailure() { @@ -139,8 +155,11 @@ static ClusterStateTaskExecutor unbatched( @Override public ClusterTasksResult execute(ClusterState currentState, List tasks) throws Exception { assert tasks.size() == 1 : "this only supports a single task but received " + tasks; - ClusterState result = tasks.get(0).execute(currentState); - return ClusterTasksResult.builder().successes(tasks).build(result); + final T task = tasks.get(0); + final ClusterState newState = task.execute(currentState); + return ClusterTasksResult.builder() + .success(task, new LegacyClusterTaskResultActionListener(task, currentState)) + .build(newState); } @Override @@ -150,4 +169,26 @@ public String describeTasks(List tasks) { }; } + /** + * An {@link ActionListener} for passing to {@link ClusterStateTaskExecutor.ClusterTasksResult.Builder#success} which preserves the + * legacy behaviour of calling {@link ClusterStateTaskListener#clusterStateProcessed} or {@link ClusterStateTaskListener#onFailure}. + *

    + * New implementations should use a dedicated listener rather than relying on this legacy behaviour. + */ + // TODO remove all remaining usages of this listener + record LegacyClusterTaskResultActionListener(ClusterStateTaskListener task, ClusterState originalState) + implements + ActionListener { + + @Override + public void onResponse(ClusterState publishedState) { + task.clusterStateProcessed(originalState, publishedState); + } + + @Override + public void onFailure(Exception e) { + task.onFailure(e); + } + } + } diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java index da014ddb780ac..8ef25b18a4b06 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskListener.java @@ -32,12 +32,18 @@ default void onNoLongerMaster() { } /** - * Called when the result of the {@link ClusterStateTaskExecutor#execute(ClusterState, List)} have been processed - * properly by all listeners. + * Called when the result of the {@link ClusterStateTaskExecutor#execute(ClusterState, List)} method have been processed properly by all + * listeners. + * + * The {@param newState} parameter is the state that was ultimately published. This can lead to surprising behaviour if tasks are + * batched together: a later task in the batch may undo or overwrite the changes made by an earlier task. In general you should prefer + * to ignore the published state and instead handle the success of a publication via the listener that the executor passes to + * {@link ClusterStateTaskExecutor.ClusterTasksResult.Builder#success}. * * Implementations of this callback must not throw exceptions: an exception thrown here is logged by the master service at {@code ERROR} * level and otherwise ignored, except in tests where it raises an {@link AssertionError}. If log-and-ignore is the right behaviour then * implementations must do so themselves, typically using a more specific logger and at a less dramatic log level. */ + // TODO: replace all remaining usages of this method with dedicated listeners and then remove it. default void clusterStateProcessed(ClusterState oldState, ClusterState newState) {} } diff --git a/server/src/main/java/org/elasticsearch/cluster/LocalMasterServiceTask.java b/server/src/main/java/org/elasticsearch/cluster/LocalMasterServiceTask.java index fff3894c16d4a..87462cffcadce 100644 --- a/server/src/main/java/org/elasticsearch/cluster/LocalMasterServiceTask.java +++ b/server/src/main/java/org/elasticsearch/cluster/LocalMasterServiceTask.java @@ -47,10 +47,13 @@ public String describeTasks(List tasks) { @Override public ClusterTasksResult execute(ClusterState currentState, List tasks) throws Exception { - assert tasks.size() == 1 && tasks.get(0) == LocalMasterServiceTask.this + final LocalMasterServiceTask thisTask = LocalMasterServiceTask.this; + assert tasks.size() == 1 && tasks.get(0) == thisTask : "expected one-element task list containing current object but was " + tasks; - LocalMasterServiceTask.this.execute(currentState); - return ClusterTasksResult.builder().successes(tasks).build(currentState); + thisTask.execute(currentState); + return ClusterTasksResult.builder() + .success(thisTask, new LegacyClusterTaskResultActionListener(thisTask, currentState)) + .build(currentState); } } ); diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index ac92b489ebb48..720fc0ce9efa7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -333,7 +333,7 @@ public ClusterTasksResult execute(ClusterState currentSta entry, entry.getShardId().getIndex() ); - batchResultBuilder.success(task); + batchResultBuilder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); } else { // The primary term is 0 if the shard failed itself. It is > 0 if a write was done on a primary but was failed to be // replicated to the shard copy with the provided allocation id. In case where the shard failed itself, it's ok to just @@ -393,7 +393,7 @@ public ClusterTasksResult execute(ClusterState currentSta } else { // tasks that correspond to non-existent shards are marked as successful logger.debug("{} ignoring shard failed task [{}] (shard does not exist anymore)", entry.getShardId(), entry); - batchResultBuilder.success(task); + batchResultBuilder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); } } else { // failing a shard also possibly marks it as stale (see IndexMetadataUpdater) @@ -408,7 +408,9 @@ public ClusterTasksResult execute(ClusterState currentSta ClusterState maybeUpdatedState = currentState; try { maybeUpdatedState = applyFailedShards(currentState, failedShardsToBeApplied, staleShardsToBeApplied); - batchResultBuilder.successes(tasksToBeApplied); + for (var task : tasksToBeApplied) { + batchResultBuilder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); + } } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("failed to apply failed shards {}", failedShardsToBeApplied), e); // failures are communicated back to the requester @@ -637,7 +639,7 @@ public ClusterTasksResult execute(ClusterState currentSt // requests might still be in flight even after the shard has already been started or failed on the master. We just // ignore these requests for now. logger.debug("{} ignoring shard started task [{}] (shard does not exist anymore)", entry.shardId, entry); - builder.success(task); + builder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); } else { if (matched.primary() && entry.primaryTerm > 0) { final IndexMetadata indexMetadata = currentState.metadata().index(entry.shardId.getIndex()); @@ -658,7 +660,7 @@ public ClusterTasksResult execute(ClusterState currentSt entry.primaryTerm, currentPrimaryTerm ); - builder.success(task); + builder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); continue; } } @@ -671,7 +673,7 @@ public ClusterTasksResult execute(ClusterState currentSt entry, matched ); - builder.success(task); + builder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); } else { // remove duplicate actions as allocation service expects a clean list without duplicates if (seenShardRoutings.contains(matched)) { @@ -727,7 +729,9 @@ public ClusterTasksResult execute(ClusterState currentSt assert assertStartedIndicesHaveCompleteTimestampRanges(maybeUpdatedState); - builder.successes(tasksToBeApplied); + for (var task : tasksToBeApplied) { + builder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); + } } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("failed to apply started shards {}", shardRoutingsToBeApplied), e); builder.failures(tasksToBeApplied, e); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java index 03d58a9760ca7..740dda3035666 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java @@ -95,7 +95,8 @@ public ClusterTasksResult execute(ClusterState currentState, List jo ClusterState.Builder newState; if (joiningNodes.size() == 1 && joiningNodes.get(0).isFinishElectionTask()) { - return results.successes(joiningNodes).build(currentState); + final Task task = joiningNodes.get(0); + return results.success(task, new LegacyClusterTaskResultActionListener(task, currentState)).build(currentState); } else if (currentNodes.getMasterNode() == null && joiningNodes.stream().anyMatch(Task::isBecomeMasterTask)) { assert joiningNodes.stream().anyMatch(Task::isFinishElectionTask) : "becoming a master but election is not finished " + joiningNodes; @@ -148,7 +149,7 @@ public ClusterTasksResult execute(ClusterState currentState, List jo continue; } } - results.success(joinTask); + results.success(joinTask, new LegacyClusterTaskResultActionListener(joinTask, currentState)); } if (nodesChanged) { diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeRemovalClusterStateTaskExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeRemovalClusterStateTaskExecutor.java index 5038471d0c2cd..e6c86ad3fe569 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeRemovalClusterStateTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeRemovalClusterStateTaskExecutor.java @@ -59,6 +59,7 @@ public NodeRemovalClusterStateTaskExecutor(AllocationService allocationService) public ClusterTasksResult execute(final ClusterState currentState, final List tasks) throws Exception { final DiscoveryNodes.Builder remainingNodesBuilder = DiscoveryNodes.builder(currentState.nodes()); boolean removed = false; + final var resultBuilder = ClusterTasksResult.builder(); for (final Task task : tasks) { if (currentState.nodes().nodeExists(task.node())) { remainingNodesBuilder.remove(task.node()); @@ -66,18 +67,21 @@ public ClusterTasksResult execute(final ClusterState currentState, final L } else { logger.debug("node [{}] does not exist in cluster state, ignoring", task); } + resultBuilder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); } - if (removed == false) { + final ClusterState finalState; + + if (removed) { + final ClusterState remainingNodesClusterState = remainingNodesClusterState(currentState, remainingNodesBuilder); + final ClusterState ptasksDisassociatedState = PersistentTasksCustomMetadata.disassociateDeadNodes(remainingNodesClusterState); + finalState = allocationService.disassociateDeadNodes(ptasksDisassociatedState, true, describeTasks(tasks)); + } else { // no nodes to remove, keep the current cluster state - return ClusterTasksResult.builder().successes(tasks).build(currentState); + finalState = currentState; } - final ClusterState remainingNodesClusterState = remainingNodesClusterState(currentState, remainingNodesBuilder); - final ClusterState ptasksDisassociatedState = PersistentTasksCustomMetadata.disassociateDeadNodes(remainingNodesClusterState); - final ClusterState finalState = allocationService.disassociateDeadNodes(ptasksDisassociatedState, true, describeTasks(tasks)); - - return ClusterTasksResult.builder().successes(tasks).build(finalState); + return resultBuilder.build(finalState); } // visible for testing diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java index 6924c20fed814..3e89e11ae0ec0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java @@ -99,6 +99,7 @@ public ClusterTasksResult execute( ClusterState currentState, List tasks ) throws Exception { + final ClusterState originalState = currentState; Map indexMapperServices = new HashMap<>(); ClusterTasksResult.Builder builder = ClusterTasksResult.builder(); try { @@ -115,7 +116,7 @@ public ClusterTasksResult execute( } } currentState = applyRequest(currentState, request, indexMapperServices); - builder.success(task); + builder.success(task, new LegacyClusterTaskResultActionListener(task, originalState)); } catch (Exception e) { builder.failure(task, e); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java index ae06e6f6f9636..06f01a2129805 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java @@ -78,7 +78,7 @@ public MetadataUpdateSettingsService( for (AckedClusterStateUpdateTask task : tasks) { try { state = task.execute(state); - builder.success(task); + builder.success(task, new ClusterStateTaskExecutor.LegacyClusterTaskResultActionListener(task, currentState)); } catch (Exception e) { builder.failure(task, e); } diff --git a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java index 4474b24ddc241..7b559f9cce1bc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -12,6 +12,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.Assertions; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState.Builder; @@ -54,6 +55,7 @@ import java.util.function.LongSupplier; import java.util.function.Supplier; import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.elasticsearch.common.util.concurrent.EsExecutors.daemonThreadFactory; @@ -212,18 +214,6 @@ public void onNoLongerMaster() { } } - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - try (ThreadContext.StoredContext ignore = threadContextSupplier.get()) { - listener.clusterStateProcessed(oldState, newState); - } catch (Exception e) { - logger.error(() -> new ParameterizedMessage(""" - exception thrown by listener while notifying of cluster state, old cluster state: - {} - new cluster state: - {}""", oldState, newState), e); - } - } - @Nullable public TaskAckListener createTaskAckListener(long clusterStateVersion, DiscoveryNodes nodes) { return contextPreservingAckListener == null @@ -231,14 +221,6 @@ public TaskAckListener createTaskAckListener(long clusterStateVersion, Discovery : new TaskAckListener(contextPreservingAckListener, clusterStateVersion, nodes, threadPool); } - public void clusterStateUnchanged(ClusterState clusterState) { - if (contextPreservingAckListener != null) { - // no need to wait for ack if nothing changed, the update can be counted as acknowledged - contextPreservingAckListener.onAllNodesAcked(null); - } - clusterStateProcessed(clusterState, clusterState); - } - @Override public ClusterStateTaskListener getTask() { return (ClusterStateTaskListener) task; @@ -367,7 +349,7 @@ protected boolean blockingAllowed() { void onPublicationSuccess(ClusterStatePublicationEvent clusterStatePublicationEvent, TaskOutputs taskOutputs) { final long notificationStartTime = threadPool.rawRelativeTimeInMillis(); - taskOutputs.processedDifferentClusterState(clusterStatePublicationEvent.getOldState(), clusterStatePublicationEvent.getNewState()); + taskOutputs.processedDifferentClusterState(clusterStatePublicationEvent.getNewState()); try { taskOutputs.clusterStatePublished(clusterStatePublicationEvent); @@ -534,14 +516,14 @@ class TaskOutputs { final TaskInputs taskInputs; final ClusterState previousClusterState; final ClusterState newClusterState; - final List nonFailedTasks; + final List nonFailedTasks; final Map executionResults; TaskOutputs( TaskInputs taskInputs, ClusterState previousClusterState, ClusterState newClusterState, - List nonFailedTasks, + List nonFailedTasks, Map executionResults ) { this.taskInputs = taskInputs; @@ -551,12 +533,12 @@ class TaskOutputs { this.executionResults = executionResults; } - void publishingFailed(FailedToCommitClusterStateException t) { - nonFailedTasks.forEach(task -> task.onFailure(t)); + void publishingFailed(FailedToCommitClusterStateException e) { + nonFailedTasks.forEach(task -> task.onPublishFailure(e)); } - void processedDifferentClusterState(ClusterState previousClusterState, ClusterState newClusterState) { - nonFailedTasks.forEach(task -> task.clusterStateProcessed(previousClusterState, newClusterState)); + void processedDifferentClusterState(ClusterState newClusterState) { + nonFailedTasks.forEach(task -> task.onPublishSuccess(newClusterState)); } void clusterStatePublished(ClusterStatePublicationEvent clusterStatePublicationEvent) { @@ -566,7 +548,7 @@ void clusterStatePublished(ClusterStatePublicationEvent clusterStatePublicationE ClusterStatePublisher.AckListener createAckListener(ClusterState newClusterState) { return new CompositeTaskAckListener( nonFailedTasks.stream() - .map(task -> task.createTaskAckListener(newClusterState.version(), newClusterState.nodes())) + .map(task -> task.task().createTaskAckListener(newClusterState.version(), newClusterState.nodes())) .filter(Objects::nonNull) .collect(Collectors.toList()) ); @@ -588,7 +570,14 @@ void notifyFailedTasks() { } void notifySuccessfulTasksOnUnchangedClusterState() { - nonFailedTasks.forEach(task -> task.clusterStateUnchanged(newClusterState)); + nonFailedTasks.forEach(task -> { + Batcher.UpdateTask updateTask = task.task(); + if (updateTask.contextPreservingAckListener != null) { + // no need to wait for ack if nothing changed, the update can be counted as acknowledged + updateTask.contextPreservingAckListener.onAllNodesAcked(null); + } + task.onClusterStateUnchanged(newClusterState); + }); } } @@ -839,13 +828,55 @@ private ClusterTasksResult executeTasks(TaskInputs tas return clusterTasksResult; } - private List getNonFailedTasks( - TaskInputs taskInputs, - ClusterTasksResult clusterTasksResult - ) { - return taskInputs.updateTasks.stream().filter(updateTask -> { + private record NonFailedTask(Batcher.UpdateTask task, ActionListener publishListener) { + + public void onPublishSuccess(ClusterState newClusterState) { + try (ThreadContext.StoredContext ignored = task.threadContextSupplier.get()) { + publishListener.onResponse(newClusterState); + } catch (Exception e) { + logger.error( + () -> new ParameterizedMessage( + "exception thrown by listener while notifying of new cluster state:\n{}", + newClusterState + ), + e + ); + } + } + + public void onClusterStateUnchanged(ClusterState clusterState) { + try (ThreadContext.StoredContext ignored = task.threadContextSupplier.get()) { + publishListener.onResponse(clusterState); + } catch (Exception e) { + logger.error( + () -> new ParameterizedMessage( + "exception thrown by listener while notifying of unchanged cluster state:\n{}", + clusterState + ), + e + ); + } + } + + public void onPublishFailure(FailedToCommitClusterStateException e) { + try (ThreadContext.StoredContext ignored = task.threadContextSupplier.get()) { + publishListener.onFailure(e); + } catch (Exception inner) { + inner.addSuppressed(e); + logger.error("exception thrown by listener notifying of failure", inner); + } + } + } + + private List getNonFailedTasks(TaskInputs taskInputs, ClusterTasksResult clusterTasksResult) { + return taskInputs.updateTasks.stream().flatMap(updateTask -> { assert clusterTasksResult.executionResults().containsKey(updateTask.getTask()) : "missing " + updateTask; - return clusterTasksResult.executionResults().get(updateTask.getTask()).isSuccess(); + final ClusterStateTaskExecutor.TaskResult taskResult = clusterTasksResult.executionResults().get(updateTask.getTask()); + if (taskResult.isSuccess()) { + return Stream.of(new NonFailedTask(updateTask, taskResult.taskListener())); + } else { + return Stream.of(); + } }).collect(Collectors.toList()); } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 77885fb624e4c..35f88ddef3ea9 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -3010,11 +3010,13 @@ public boolean assertAllListenersResolved() { * * Package private to allow for tests. */ - static final ClusterStateTaskExecutor SHARD_STATE_EXECUTOR = ( - currentState, - tasks) -> ClusterStateTaskExecutor.ClusterTasksResult.builder() - .successes(tasks) - .build(new SnapshotShardsUpdateContext(currentState, tasks).computeUpdatedState()); + static final ClusterStateTaskExecutor SHARD_STATE_EXECUTOR = (currentState, tasks) -> { + final var builder = ClusterStateTaskExecutor.ClusterTasksResult.builder(); + for (var task : tasks) { + builder.success(task, new ClusterStateTaskExecutor.LegacyClusterTaskResultActionListener(task, currentState)); + } + return builder.build(new SnapshotShardsUpdateContext(currentState, tasks).computeUpdatedState()); + }; private static boolean isQueued(@Nullable ShardSnapshotStatus status) { return status != null && status.state() == ShardState.QUEUED; diff --git a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java index 767b10a2ade73..544be2c438706 100644 --- a/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/action/shard/ShardFailedClusterStateTaskExecutorTests.java @@ -51,6 +51,8 @@ public class ShardFailedClusterStateTaskExecutorTests extends ESAllocationTestCase { + private static final ActionListener NO_OP_TASK_LISTENER = ActionListener.wrap(() -> {}); + private static final String INDEX = "INDEX"; private AllocationService allocationService; private int numberOfReplicas; @@ -136,7 +138,7 @@ ClusterState applyFailedShards(ClusterState currentState, List fail ); } for (FailedShardUpdateTask nonExistentTask : nonExistentTasks) { - taskResultList.add(Tuple.tuple(nonExistentTask, ClusterStateTaskExecutor.TaskResult.success())); + taskResultList.add(Tuple.tuple(nonExistentTask, ClusterStateTaskExecutor.TaskResult.success(NO_OP_TASK_LISTENER))); } assertTaskResults(taskResultList, result, currentState, false); } @@ -303,7 +305,7 @@ private static void assertTasksSuccessful( boolean clusterStateChanged ) { List> taskResultList = tasks.stream() - .map(t -> Tuple.tuple(t, ClusterStateTaskExecutor.TaskResult.success())) + .map(t -> Tuple.tuple(t, ClusterStateTaskExecutor.TaskResult.success(NO_OP_TASK_LISTENER))) .collect(Collectors.toList()); assertTaskResults(taskResultList, result, clusterState, clusterStateChanged); } diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index a0117fb5692b6..4da46f329026b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -14,6 +14,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterName; @@ -66,6 +67,7 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; +import java.util.stream.IntStream; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; @@ -73,6 +75,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; public class MasterServiceTests extends ESTestCase { @@ -282,7 +285,7 @@ public ClusterTasksResult execute( List tasks ) { ClusterState newClusterState = ClusterState.builder(currentState).build(); - return ClusterTasksResult.builder().successes(tasks).build(newClusterState); + return successes(currentState, tasks).build(newClusterState); } @Override @@ -298,6 +301,14 @@ public void clusterStatePublished(ClusterStatePublicationEvent clusterStatePubli } } + private static ClusterTasksResult.Builder successes(ClusterState originalState, List tasks) { + ClusterTasksResult.Builder builder = ClusterTasksResult.builder(); + for (T task : tasks) { + builder = builder.success(task, new ClusterStateTaskExecutor.LegacyClusterTaskResultActionListener(task, originalState)); + } + return builder; + } + @TestLogging(value = "org.elasticsearch.cluster.service:TRACE", reason = "to ensure that we log cluster state events on TRACE level") public void testClusterStateUpdateLogging() throws Exception { MockLogAppender mockAppender = new MockLogAppender(); @@ -492,7 +503,7 @@ public ClusterTasksResult execute(ClusterState currentState, List ta assertTrue("Should execute all tasks at once", executed.compareAndSet(false, true)); assertThat("Should execute all tasks at once", tasks.size(), equalTo(expectedTaskCount)); executionCountDown.countDown(); - return ClusterTasksResult.builder().successes(tasks).build(currentState); + return successes(currentState, tasks).build(currentState); } } @@ -512,7 +523,7 @@ public ClusterTasksResult execute(ClusterState currentState, List ta (currentState, tasks) -> { executionBarrier.await(10, TimeUnit.SECONDS); // notify test thread that the master service is blocked executionBarrier.await(10, TimeUnit.SECONDS); // wait for test thread to release us - return ClusterTasksResult.builder().successes(tasks).build(currentState); + return successes(currentState, tasks).build(currentState); } ); @@ -672,7 +683,7 @@ public ClusterTasksResult execute(ClusterState currentState, List ta equalTo(true) ); } - return ClusterTasksResult.builder().successes(tasks).build(maybeUpdatedClusterState); + return successes(currentState, tasks).build(maybeUpdatedClusterState); } @Override @@ -762,6 +773,246 @@ public void clusterStatePublished(ClusterStatePublicationEvent clusterPublicatio } } + public void testTaskFailureNotification() throws Exception { + + final String testContextHeaderName = "test-context-header"; + final ThreadContext threadContext = threadPool.getThreadContext(); + final int taskCount = between(1, 10); + final CountDownLatch taskCountDown = new CountDownLatch(taskCount); + + class Task implements ClusterStateTaskListener { + + private final String expectedHeaderValue; + + Task(String expectedHeaderValue) { + this.expectedHeaderValue = expectedHeaderValue; + } + + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + throw new AssertionError("should not complete task"); + } + + @Override + public void onFailure(Exception e) { + assertThat(e, instanceOf(RuntimeException.class)); + assertThat(e.getMessage(), equalTo("simulated")); + assertThat(threadContext.getHeader(testContextHeaderName), equalTo(expectedHeaderValue)); + taskCountDown.countDown(); + } + } + + final ClusterStateTaskExecutor executor = (currentState, tasks) -> { + if (randomBoolean()) { + throw new RuntimeException("simulated"); + } else { + return ClusterTasksResult.builder().failures(tasks, new RuntimeException("simulated")).build(currentState); + } + }; + + final var executionBarrier = new CyclicBarrier(2); + final ClusterStateUpdateTask blockMasterTask = new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + executionBarrier.await(10, TimeUnit.SECONDS); // notify test thread that the master service is blocked + executionBarrier.await(10, TimeUnit.SECONDS); // wait for test thread to release us + return currentState; + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }; + + try (var masterService = createMasterService(true)) { + + masterService.submitStateUpdateTask("block", blockMasterTask, ClusterStateTaskExecutor.unbatched()); + executionBarrier.await(10, TimeUnit.SECONDS); // wait for the master service to be blocked + + masterService.setClusterStatePublisher( + (clusterStatePublicationEvent, publishListener, ackListener) -> { + throw new AssertionError("should not publish any states"); + } + ); + + int toSubmit = taskCount; + + while (toSubmit > 0) { + final int batchSize = between(1, toSubmit); + toSubmit -= batchSize; + try (ThreadContext.StoredContext ignored = threadContext.newStoredContext(false)) { + final String testContextHeaderValue = randomAlphaOfLength(10); + threadContext.putHeader(testContextHeaderName, testContextHeaderValue); + + final List tasks = IntStream.range(0, batchSize) + .mapToObj(i -> new Task(testContextHeaderValue)) + .collect(Collectors.toList()); + + final ClusterStateTaskConfig clusterStateTaskConfig = ClusterStateTaskConfig.build(Priority.NORMAL); + + if (batchSize == 1 && randomBoolean()) { + masterService.submitStateUpdateTask("test", tasks.get(0), clusterStateTaskConfig, executor); + } else { + masterService.submitStateUpdateTasks("test", tasks, clusterStateTaskConfig, executor); + } + } + } + + executionBarrier.await(10, TimeUnit.SECONDS); // release block on master service + assertTrue(taskCountDown.await(10, TimeUnit.SECONDS)); + } + } + + public void testTaskNotificationAfterPublication() throws Exception { + + class Task implements ClusterStateTaskListener { + + final ActionListener publishListener; + + Task(ActionListener publishListener) { + this.publishListener = publishListener; + } + + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + throw new AssertionError("should not complete task"); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + } + + final String testContextHeaderName = "test-context-header"; + final ThreadContext threadContext = threadPool.getThreadContext(); + + final ClusterStateTaskExecutor executor = (currentState, tasks) -> { + final ClusterTasksResult.Builder builder = ClusterTasksResult.builder(); + for (Task task : tasks) { + builder.success(task, task.publishListener); + } + return builder.build(ClusterState.builder(currentState).build()); + }; + + final var executionBarrier = new CyclicBarrier(2); + final ClusterStateUpdateTask blockMasterTask = new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + executionBarrier.await(10, TimeUnit.SECONDS); // notify test thread that the master service is blocked + executionBarrier.await(10, TimeUnit.SECONDS); // wait for test thread to release us + return currentState; + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }; + + try (var masterService = createMasterService(true)) { + + // success case: submit some tasks, possibly in different contexts, and verify that the expected listener is completed + + masterService.submitStateUpdateTask("block", blockMasterTask, ClusterStateTaskExecutor.unbatched()); + executionBarrier.await(10, TimeUnit.SECONDS); // wait for the master service to be blocked + + final AtomicReference publishedState = new AtomicReference<>(); + masterService.setClusterStatePublisher((clusterStatePublicationEvent, publishListener, ackListener) -> { + assertTrue(publishedState.compareAndSet(null, clusterStatePublicationEvent.getNewState())); + ClusterServiceUtils.setAllElapsedMillis(clusterStatePublicationEvent); + publishListener.onResponse(null); + }); + + int toSubmit = between(1, 10); + final CountDownLatch publishSuccessCountdown = new CountDownLatch(toSubmit); + + while (toSubmit > 0) { + final int batchSize = between(1, toSubmit); + toSubmit -= batchSize; + try (ThreadContext.StoredContext ignored = threadContext.newStoredContext(false)) { + final String testContextHeaderValue = randomAlphaOfLength(10); + threadContext.putHeader(testContextHeaderName, testContextHeaderValue); + + final List tasks = IntStream.range(0, batchSize).mapToObj(i -> new Task(new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + assertEquals(testContextHeaderValue, threadContext.getHeader(testContextHeaderName)); + assertSame(publishedState.get(), clusterState); + publishSuccessCountdown.countDown(); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + })).collect(Collectors.toList()); + + final ClusterStateTaskConfig clusterStateTaskConfig = ClusterStateTaskConfig.build(Priority.NORMAL); + + if (batchSize == 1 && randomBoolean()) { + masterService.submitStateUpdateTask("test", tasks.get(0), clusterStateTaskConfig, executor); + } else { + masterService.submitStateUpdateTasks("test", tasks, clusterStateTaskConfig, executor); + } + } + } + + executionBarrier.await(10, TimeUnit.SECONDS); // release block on master service + assertTrue(publishSuccessCountdown.await(10, TimeUnit.SECONDS)); + + // failure case: submit some tasks, possibly in different contexts, and verify that the expected listener is completed + + masterService.submitStateUpdateTask("block", blockMasterTask, ClusterStateTaskExecutor.unbatched()); + executionBarrier.await(10, TimeUnit.SECONDS); // wait for the master service to be blocked + + final String exceptionMessage = "simulated"; + masterService.setClusterStatePublisher((clusterStatePublicationEvent, publishListener, ackListener) -> { + ClusterServiceUtils.setAllElapsedMillis(clusterStatePublicationEvent); + publishListener.onFailure(new FailedToCommitClusterStateException(exceptionMessage)); + }); + + toSubmit = between(1, 10); + final CountDownLatch publishFailureCountdown = new CountDownLatch(toSubmit); + + while (toSubmit > 0) { + final int batchSize = between(1, toSubmit); + toSubmit -= batchSize; + try (ThreadContext.StoredContext ignored = threadContext.newStoredContext(false)) { + final String testContextHeaderValue = randomAlphaOfLength(10); + threadContext.putHeader(testContextHeaderName, testContextHeaderValue); + + final List tasks = IntStream.range(0, batchSize).mapToObj(i -> new Task(new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + throw new AssertionError("should not succeed"); + } + + @Override + public void onFailure(Exception e) { + assertEquals(testContextHeaderValue, threadContext.getHeader(testContextHeaderName)); + assertThat(e, instanceOf(FailedToCommitClusterStateException.class)); + assertThat(e.getMessage(), equalTo(exceptionMessage)); + publishFailureCountdown.countDown(); + } + })).collect(Collectors.toList()); + + final ClusterStateTaskConfig clusterStateTaskConfig = ClusterStateTaskConfig.build(Priority.NORMAL); + + if (batchSize == 1 && randomBoolean()) { + masterService.submitStateUpdateTask("test", tasks.get(0), clusterStateTaskConfig, executor); + } else { + masterService.submitStateUpdateTasks("test", tasks, clusterStateTaskConfig, executor); + } + } + } + + executionBarrier.await(10, TimeUnit.SECONDS); // release block on master service + assertTrue(publishFailureCountdown.await(10, TimeUnit.SECONDS)); + } + } + public void testBlockingCallInClusterStateTaskListenerFails() throws InterruptedException { assumeTrue("assertions must be enabled for this test to work", BaseFuture.class.desiredAssertionStatus()); final CountDownLatch latch = new CountDownLatch(1); @@ -796,7 +1047,7 @@ public void onFailure(Exception e) {} ClusterStateTaskConfig.build(Priority.NORMAL), (currentState, tasks) -> { ClusterState newClusterState = ClusterState.builder(currentState).build(); - return ClusterTasksResult.builder().successes(tasks).build(newClusterState); + return successes(currentState, tasks).build(newClusterState); } ); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunner.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunner.java index a0b3391c0958e..ed82a16fe237e 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunner.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunner.java @@ -61,7 +61,7 @@ class IndexLifecycleRunner { for (IndexLifecycleClusterStateUpdateTask task : tasks) { try { state = task.execute(state); - builder.success(task); + builder.success(task, new ClusterStateTaskExecutor.LegacyClusterTaskResultActionListener(task, currentState)); } catch (Exception e) { builder.failure(task, e); } From b3ee1e64643110fa95acd8661c5d5b6e99e709b8 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Thu, 10 Feb 2022 07:48:45 -0500 Subject: [PATCH 036/167] [DOCS] Remove 8.0.0 coming tag (#83757) (#83781) --- docs/reference/migration/migrate_8_0.asciidoc | 2 -- docs/reference/release-notes/8.0.0.asciidoc | 2 -- 2 files changed, 4 deletions(-) diff --git a/docs/reference/migration/migrate_8_0.asciidoc b/docs/reference/migration/migrate_8_0.asciidoc index 14dc02b060d1d..9c7ba87a4759f 100644 --- a/docs/reference/migration/migrate_8_0.asciidoc +++ b/docs/reference/migration/migrate_8_0.asciidoc @@ -9,8 +9,6 @@ your application to {es} 8.0. See also <> and <>. -coming::[8.0.0] - [discrete] [[breaking-changes-8.0]] === Breaking changes diff --git a/docs/reference/release-notes/8.0.0.asciidoc b/docs/reference/release-notes/8.0.0.asciidoc index 3eef72aa0e44e..2e333974a38e5 100644 --- a/docs/reference/release-notes/8.0.0.asciidoc +++ b/docs/reference/release-notes/8.0.0.asciidoc @@ -1,8 +1,6 @@ [[release-notes-8.0.0]] == {es} version 8.0.0 -coming::[8.0.0] - The following list are changes in 8.0.0 as compared to 7.17.0, and combines release notes from the 8.0.0-alpha1, -alpha2, -beta1, -rc1 and -rc2 releases. From 7ce47613c3aae57b55ea824cc9a49b18747b6b3b Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Thu, 10 Feb 2022 14:23:17 +0100 Subject: [PATCH 037/167] Add "Has eligible master" Indicator for Health API (#82874) Based on #83119 and #83205 --- docs/changelog/82874.yaml | 5 ++ .../HasEligibleMasterNodeIndicator.java | 54 +++++++++++ .../health/HealthIndicatorDetails.java | 7 +- .../health/HealthIndicatorResult.java | 4 + .../java/org/elasticsearch/node/Node.java | 4 +- .../HasEligibleMasterNodeIndicatorTests.java | 89 +++++++++++++++++++ 6 files changed, 161 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/82874.yaml create mode 100644 server/src/main/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicator.java create mode 100644 server/src/test/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicatorTests.java diff --git a/docs/changelog/82874.yaml b/docs/changelog/82874.yaml new file mode 100644 index 0000000000000..ad41633e99ce7 --- /dev/null +++ b/docs/changelog/82874.yaml @@ -0,0 +1,5 @@ +pr: 82874 +summary: 'Add "Has eligible master" Indicator for Health API' +area: Health +type: feature +issues: [] diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicator.java new file mode 100644 index 0000000000000..466798cfc314d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicator.java @@ -0,0 +1,54 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.coordination.indicators; + +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.HealthIndicatorService; +import org.elasticsearch.health.HealthStatus; + +import static org.elasticsearch.health.ServerHealthComponents.CLUSTER_COORDINATION; + +public class HasEligibleMasterNodeIndicator implements HealthIndicatorService { + + public static final String HAS_ELIGIBLE_MASTER = "has_eligible_master"; + + private final ClusterService clusterService; + + public HasEligibleMasterNodeIndicator(ClusterService clusterService) { + this.clusterService = clusterService; + } + + @Override + public String name() { + return HAS_ELIGIBLE_MASTER; + } + + @Override + public String component() { + return CLUSTER_COORDINATION; + } + + @Override + public HealthIndicatorResult calculate() { + for (DiscoveryNode node : clusterService.state().nodes()) { + if (node.getRoles().contains(DiscoveryNodeRole.MASTER_ROLE)) { + return HealthIndicatorResult.of( + HAS_ELIGIBLE_MASTER, + CLUSTER_COORDINATION, + HealthStatus.GREEN, + "There is a master-eligible node." + ); + } + } + return HealthIndicatorResult.of(HAS_ELIGIBLE_MASTER, CLUSTER_COORDINATION, HealthStatus.RED, "No master-eligible nodes."); + } +} diff --git a/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java b/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java index e085b9981e496..b0fd50a1024f7 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java +++ b/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java @@ -18,7 +18,12 @@ public interface HealthIndicatorDetails extends ToXContentObject { HealthIndicatorDetails EMPTY = new HealthIndicatorDetails() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder; + return builder.startObject().endObject(); + } + + @Override + public String toString() { + return "HealthIndicatorDetails{}"; } }; } diff --git a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java index cec405e16a59d..ad24761e1766e 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java +++ b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java @@ -17,6 +17,10 @@ public record HealthIndicatorResult(String name, String component, HealthStatus implements ToXContentObject { + public static HealthIndicatorResult of(String name, String component, HealthStatus status, String summary) { + return new HealthIndicatorResult(name, component, status, summary, HealthIndicatorDetails.EMPTY); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 3c4d699160799..35e5970f4ca6a 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -41,6 +41,7 @@ import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.coordination.Coordinator; import org.elasticsearch.cluster.coordination.InstanceHasMasterHealthIndicatorService; +import org.elasticsearch.cluster.coordination.indicators.HasEligibleMasterNodeIndicator; import org.elasticsearch.cluster.desirednodes.DesiredNodesSettingsValidator; import org.elasticsearch.cluster.metadata.IndexMetadataVerifier; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; @@ -901,7 +902,8 @@ protected Node( ); List serverHealthIndicatorServices = List.of( - new InstanceHasMasterHealthIndicatorService(clusterService) + new InstanceHasMasterHealthIndicatorService(clusterService), + new HasEligibleMasterNodeIndicator(clusterService) ); List pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) .stream() diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicatorTests.java new file mode 100644 index 0000000000000..db342678bfdb6 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicatorTests.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.coordination.indicators; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.HealthStatus; +import org.elasticsearch.test.ESTestCase; +import org.mockito.Mockito; + +import java.util.Set; + +import static java.util.Collections.emptyMap; +import static org.elasticsearch.cluster.node.DiscoveryNodeRole.DATA_ROLE; +import static org.elasticsearch.cluster.node.DiscoveryNodeRole.INGEST_ROLE; +import static org.elasticsearch.cluster.node.DiscoveryNodeRole.MASTER_ROLE; +import static org.elasticsearch.cluster.node.DiscoveryNodeRole.TRANSFORM_ROLE; + +public class HasEligibleMasterNodeIndicatorTests extends ESTestCase { + + public void testIsGreenIfThereIsMasterNode() { + ClusterState clusterState = ClusterState.builder(new ClusterName("test-cluster")) + .nodes( + DiscoveryNodes.builder() + .add(new DiscoveryNode("node1", transportAddress(), emptyMap(), Set.of(DATA_ROLE), Version.CURRENT)) + .add(new DiscoveryNode("node2", transportAddress(), emptyMap(), Set.of(DATA_ROLE), Version.CURRENT)) + .add(new DiscoveryNode("node3", transportAddress(), emptyMap(), Set.of(MASTER_ROLE), Version.CURRENT)) + .build() + ) + .build(); + + ClusterService clusterService = clusterService(clusterState); + HealthIndicatorResult noEligibleMasterNodes = new HasEligibleMasterNodeIndicator(clusterService).calculate(); + + assertEquals("has_eligible_master", noEligibleMasterNodes.name()); + assertEquals(HealthStatus.GREEN, noEligibleMasterNodes.status()); + assertEquals("There is a master-eligible node.", noEligibleMasterNodes.summary()); + } + + public void testIsRedIfThereNoMasterNodes() { + ClusterState clusterState = ClusterState.builder(new ClusterName("test-cluster")) + .nodes( + DiscoveryNodes.builder() + .add(new DiscoveryNode("node_1", transportAddress(), emptyMap(), Set.of(DATA_ROLE), Version.CURRENT)) + .add(new DiscoveryNode("node_2", transportAddress(), emptyMap(), Set.of(TRANSFORM_ROLE), Version.CURRENT)) + .add(new DiscoveryNode("node_3", transportAddress(), emptyMap(), Set.of(INGEST_ROLE), Version.CURRENT)) + .build() + ) + .build(); + + HealthIndicatorResult noEligibleMasterNodes = new HasEligibleMasterNodeIndicator(clusterService(clusterState)).calculate(); + + assertEquals("has_eligible_master", noEligibleMasterNodes.name()); + assertEquals(HealthStatus.RED, noEligibleMasterNodes.status()); + assertEquals("No master-eligible nodes.", noEligibleMasterNodes.summary()); + } + + public void testRedIfThereNoNodes() { + ClusterState clusterState = ClusterState.builder(new ClusterName("test-cluster")).nodes(DiscoveryNodes.builder().build()).build(); + + HealthIndicatorResult noEligibleMasterNodes = new HasEligibleMasterNodeIndicator(clusterService(clusterState)).calculate(); + + assertEquals("has_eligible_master", noEligibleMasterNodes.name()); + assertEquals(HealthStatus.RED, noEligibleMasterNodes.status()); + assertEquals("No master-eligible nodes.", noEligibleMasterNodes.summary()); + } + + private static TransportAddress transportAddress() { + return buildNewFakeTransportAddress(); + } + + private static ClusterService clusterService(ClusterState clusterState) { + ClusterService clusterService = Mockito.mock(ClusterService.class); + Mockito.when(clusterService.state()).thenReturn(clusterState); + return clusterService; + } +} From d4f7a980f9d9ee8f6988bf05f3abc230b1ab3dff Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Thu, 10 Feb 2022 14:30:51 +0100 Subject: [PATCH 038/167] AbstractByteReference: use primitive integer to cache the hash code (#83248) * AbstractByteReference: use primitive integer to cache the hash code Instead of using a null reference to `Integer` to check whether the hash code has been calculated, we can use an `int` primitive set to zero. It's very rare that a calculated hash is equal to zero, so that's an acceptable trade-off. `java.lang.String` and Lombok use the same approach for caching hash codes. Using an int primtive saves 16 bytes of the memory footprint for every byte reference with the calculated hash code With Integer ``` org.elasticsearch.common.bytes.ZeroBytesReference@2a33fae0d footprint: COUNT AVG SUM DESCRIPTION 1 16 16 java.lang.Integer 1 24 24 org.elasticsearch.common.bytes.ZeroBytesReference 2 40 (total) ``` With int ``` org.elasticsearch.common.bytes.ZeroBytesReference@2a33fae0d footprint: COUNT AVG SUM DESCRIPTION 1 24 24 org.elasticsearch.common.bytes.ZeroBytesReference 1 24 (total) ``` * Add hashIsZero --- .../common/bytes/AbstractBytesReference.java | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReference.java b/server/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReference.java index 2ca8c7b9be20d..459328c6df5aa 100644 --- a/server/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReference.java +++ b/server/src/main/java/org/elasticsearch/common/bytes/AbstractBytesReference.java @@ -18,7 +18,8 @@ public abstract class AbstractBytesReference implements BytesReference { - private Integer hash = null; // we cache the hash of this reference since it can be quite costly to re-calculated it + private int hash; // we cache the hash of this reference since it can be quite costly to re-calculated it + private boolean hashIsZero; // if the calculated hash is actually zero @Override public int getInt(int index) { @@ -89,7 +90,7 @@ public boolean equals(Object other) { @Override public int hashCode() { - if (hash == null) { + if (hash == 0 && hashIsZero == false) { final BytesRefIterator iterator = iterator(); BytesRef ref; int result = 1; @@ -102,10 +103,13 @@ public int hashCode() { } catch (IOException ex) { throw new AssertionError("wont happen", ex); } - return hash = result; - } else { - return hash; + if (result == 0) { + hashIsZero = true; + } else { + hash = result; + } } + return hash; } @Override From 8bc46ad9596bb75504295cf86ff39afcf493bfd9 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Thu, 10 Feb 2022 14:06:26 +0000 Subject: [PATCH 039/167] Add filtering to fieldcaps endpoint (#83636) Many consumers of the field caps API need to do some post-processing of the results before they can use them; for instance, Kibana would like to exclude multifields from certain field selections, or would like to display only geo_point fields in Maps. ML and QL consumers exclude nested fields in certain circumstances. This post-processing is possible at the moment, but can be hacky; and in all cases it involves sending the whole (possibly very large) field caps response over the wire and then whittling it down in the client. It is also not guaranteed to be accurate - runtime fields may be incorrectly classified as multifields, for example. This commit pushes filtering into elasticsearch itself, reducing the amount of data that needs to be transported and ensuring better accuracy. The field caps API gets two new parameters: * filters - a comma-delimited list that may contain any combination of: `+metadata`, `-metadata`, `-nested`, `-parent`, `-multifield` * types - a comma-delimited list of field types; only fields that have a type in this set will be returned The API will make best-effort attempts to apply the filters post-hoc to responses from older nodes, so this should still work in a mixed-cluster or cross-cluster situation. Fixes #82966, #72174 --- docs/changelog/83636.yaml | 6 + docs/reference/search/field-caps.asciidoc | 9 + .../rest-api-spec/api/field_caps.json | 8 + .../{30_filter.yml => 30_index_filter.yml} | 0 .../test/field_caps/50_fieldtype_filter.yml | 213 +++++++++++++++++ .../fieldcaps/FieldCapabilitiesFetcher.java | 168 ++++++++----- .../FieldCapabilitiesNodeRequest.java | 31 +++ .../fieldcaps/FieldCapabilitiesRequest.java | 37 +++ .../fieldcaps/IndexFieldCapabilities.java | 13 ++ .../action/fieldcaps/RequestDispatcher.java | 2 + .../action/fieldcaps/ResponseRewriter.java | 129 ++++++++++ .../TransportFieldCapabilitiesAction.java | 34 +-- .../index/mapper/FieldTypeLookup.java | 6 + .../index/mapper/MapperService.java | 4 + .../index/mapper/MappingLookup.java | 3 + .../index/query/SearchExecutionContext.java | 4 + .../action/RestFieldCapabilitiesAction.java | 2 + .../FieldCapabilitiesFilterTests.java | 221 ++++++++++++++++++ .../FieldCapabilitiesNodeRequestTests.java | 56 ++++- .../FieldCapabilitiesRequestTests.java | 20 +- .../fieldcaps/ResponseRewriterTests.java | 158 +++++++++++++ x-pack/qa/runtime-fields/build.gradle | 2 +- 22 files changed, 1045 insertions(+), 81 deletions(-) create mode 100644 docs/changelog/83636.yaml rename rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/{30_filter.yml => 30_index_filter.yml} (100%) create mode 100644 rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml create mode 100644 server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java create mode 100644 server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java create mode 100644 server/src/test/java/org/elasticsearch/action/fieldcaps/ResponseRewriterTests.java diff --git a/docs/changelog/83636.yaml b/docs/changelog/83636.yaml new file mode 100644 index 0000000000000..5ca5352e12422 --- /dev/null +++ b/docs/changelog/83636.yaml @@ -0,0 +1,6 @@ +pr: 83636 +summary: Add filtering to fieldcaps endpoint +area: Search +type: enhancement +issues: + - 82966 diff --git a/docs/reference/search/field-caps.asciidoc b/docs/reference/search/field-caps.asciidoc index 7cbb583bab800..580553b027fa2 100644 --- a/docs/reference/search/field-caps.asciidoc +++ b/docs/reference/search/field-caps.asciidoc @@ -77,6 +77,15 @@ include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=index-ignore-unavailab (Optional, Boolean) If `true`, unmapped fields are included in the response. Defaults to `false`. +`filters`:: +(Optional, string) Comma-separated list of filters to apply to the response. The +following filters are supported: +metadata,-metadata,-parent,-nested,-multifield + +`types`:: +(Optional, string) Comma-separated list of field types to include. Any fields that +do not match one of these types will be excluded from the results. Defaults to empty, +meaning that all field types are returned. + [[search-field-caps-api-request-body]] ==== {api-request-body-title} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json b/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json index f1f5e3992ab4a..934ef3daa44aa 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json @@ -63,6 +63,14 @@ "type":"boolean", "default":false, "description":"Indicates whether unmapped fields should be included in the response." + }, + "filters": { + "type":"list", + "description":"An optional set of filters: can include +metadata,-metadata,-nested,-multifield,-parent" + }, + "types": { + "type": "list", + "description":"Only return results for fields that have one of the types in the list" } }, "body":{ diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/30_filter.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/30_index_filter.yml similarity index 100% rename from rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/30_filter.yml rename to rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/30_index_filter.yml diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml new file mode 100644 index 0000000000000..cfed4f68ea5e7 --- /dev/null +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/field_caps/50_fieldtype_filter.yml @@ -0,0 +1,213 @@ +--- +setup: + - skip: + version: "- 8.1.99" + reason: Field type filters were added in 8.2 + - do: + indices.create: + index: test1 + body: + mappings: + properties: + text: + type: text + fields: + keyword: + type: keyword + keyword: + type: keyword + number: + type: double + geo: + type: geo_point + misc: + type: text + object: + type: object + properties: + nested1 : + type : text + index: false + nested2: + type: float + doc_values: false + level1: + type: nested + properties: + level2: + type: object + properties: + leaf1: + type: text + index: false + runtime: + misc.keyword: + type: keyword + + - do: + indices.create: + index: test2 + body: + mappings: + properties: + text: + type: text + keyword: + type: keyword + number: + type: double + date: + type: date + geo: + type: geo_point + object: + type: object + properties: + nested1 : + type : text + index: true + nested2: + type: float + doc_values: true + level1: + type: nested + properties: + level2: + type: object + properties: + leaf1: + type: text + index: false + - do: + indices.create: + index: test3 + body: + mappings: + properties: + text: + type: text + keyword: + type: keyword + number: + type: long + date: + type: date + non_indexed_date: + type: date + index: false + non_indexed_keyword: + type: keyword + index: false + non_indexed_boolean: + type: boolean + index: false + non_indexed_ip: + type: ip + index: false + non_indexed_geo_point: + type: geo_point + index: false + geo: + type: keyword + object: + type: nested + properties: + nested1 : + type : long + index: false + nested2: + type: keyword + doc_values: false +--- +"No filters includes all the following fields": + - do: + field_caps: + index: 'test1,test2,test3' + fields: '*' + + - is_true: fields.object + - is_true: fields.text + - is_true: fields.text\\.keyword + - is_true: fields._seq_no + - is_true: fields.level1\\.level2\\.leaf1 + - is_true: fields.level1 + +--- +"Exclude parent objects": + - do: + field_caps: + index: 'test1,test2,test3' + fields: '*' + filters: '-parent' + + - is_true: fields.object\\.nested1 + - is_false: fields.object + +--- +"Exclude metadata fields": + - do: + field_caps: + index: 'test1,test2,test3' + fields: '*' + filters: '-metadata' + + - is_false: fields._seq_no + +--- +"Exclude non-metadata fields": + - do: + field_caps: + index: 'test1,test2,test3' + fields: '*' + filters: '+metadata' + + - is_true: fields._seq_no + - is_false: fields.text + +--- +"Exclude nested fields": + - do: + field_caps: + index: 'test1,test2,test3' + fields: '*' + filters: '-nested' + + - is_false: fields.level1 + - is_false: fields.level1\\.level2\\.leaf1 + +--- +"Exclude multifields": + - do: + field_caps: + index: 'test1,test2,test3' + fields: '*' + filters: '-multifield' + + - is_false: fields.text\\.keyword + - is_true: fields.misc\\.keyword + +--- +"Field type filters": + - do: + field_caps: + index: 'test1,test2,test3' + fields: '*' + types: 'text,keyword,long' + + - is_false: fields.date + - is_false: fields.non_indexed_boolean + - is_true: fields.non_indexed_keyword + - is_true: fields.misc + +--- +"Field type filters with field name restrictions": + - do: + field_caps: + index: 'test1,test2,test3' + fields: 'non_*,text' + types: 'text,keyword,long' + + - is_false: fields.non_indexed_boolean + - is_true: fields.non_indexed_keyword + - is_false: fields.misc + - is_true: fields.text diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java index 7eac2f54c1137..b2815dbb05a09 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java @@ -45,6 +45,8 @@ class FieldCapabilitiesFetcher { FieldCapabilitiesIndexResponse fetch( ShardId shardId, String[] fieldPatterns, + String[] filters, + String[] fieldTypes, QueryBuilder indexFilter, long nowInMillis, Map runtimeFields @@ -66,67 +68,92 @@ FieldCapabilitiesIndexResponse fetch( return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), Collections.emptyMap(), false); } - Set fieldNames = new HashSet<>(); - for (String pattern : fieldPatterns) { - fieldNames.addAll(searchExecutionContext.getMatchingFieldNames(pattern)); - } - Predicate fieldPredicate = indicesService.getFieldFilter().apply(shardId.getIndexName()); - Map responseMap = new HashMap<>(); - for (String field : fieldNames) { - MappedFieldType ft = searchExecutionContext.getFieldType(field); - boolean isMetadataField = searchExecutionContext.isMetadataField(field); - if (isMetadataField || fieldPredicate.test(ft.name())) { - IndexFieldCapabilities fieldCap = new IndexFieldCapabilities( - field, - ft.familyTypeName(), - isMetadataField, - ft.isSearchable(), - ft.isAggregatable(), - ft.isDimension(), - ft.getMetricType(), - ft.meta() - ); - responseMap.put(field, fieldCap); - } else { - continue; - } - // Check the ancestor of the field to find nested and object fields. - // Runtime fields are excluded since they can override any path. - // TODO find a way to do this that does not require an instanceof check - if (ft instanceof RuntimeField == false) { - int dotIndex = ft.name().lastIndexOf('.'); - while (dotIndex > -1) { - String parentField = ft.name().substring(0, dotIndex); - if (responseMap.containsKey(parentField)) { - // we added this path on another field already - break; - } - // checks if the parent field contains sub-fields - if (searchExecutionContext.getFieldType(parentField) == null) { - // no field type, it must be an object field - String type = searchExecutionContext.nestedLookup().getNestedMappers().get(parentField) != null - ? "nested" - : "object"; - IndexFieldCapabilities fieldCap = new IndexFieldCapabilities( - parentField, - type, - false, - false, - false, - false, - null, - Collections.emptyMap() - ); - responseMap.put(parentField, fieldCap); - } - dotIndex = parentField.lastIndexOf('.'); + return retrieveFieldCaps(shardId.getIndexName(), searchExecutionContext, fieldPatterns, filters, fieldTypes, fieldPredicate); + } + } + + public static FieldCapabilitiesIndexResponse retrieveFieldCaps( + String indexName, + SearchExecutionContext context, + String[] fieldPatterns, + String[] filters, + String[] types, + Predicate indexFieldfilter + ) { + + Set fieldNames = new HashSet<>(); + for (String pattern : fieldPatterns) { + fieldNames.addAll(context.getMatchingFieldNames(pattern)); + } + + boolean includeParentObjects = checkIncludeParents(filters); + + FieldCapsFilter filter = buildFilter(indexFieldfilter, filters, types); + Map responseMap = new HashMap<>(); + for (String field : fieldNames) { + MappedFieldType ft = context.getFieldType(field); + if (filter.matches(ft, context)) { + IndexFieldCapabilities fieldCap = new IndexFieldCapabilities( + field, + ft.familyTypeName(), + context.isMetadataField(field), + ft.isSearchable(), + ft.isAggregatable(), + ft.isDimension(), + ft.getMetricType(), + ft.meta() + ); + responseMap.put(field, fieldCap); + } else { + continue; + } + + // Check the ancestor of the field to find nested and object fields. + // Runtime fields are excluded since they can override any path. + // TODO find a way to do this that does not require an instanceof check + if (ft instanceof RuntimeField == false && includeParentObjects) { + int dotIndex = ft.name().lastIndexOf('.'); + while (dotIndex > -1) { + String parentField = ft.name().substring(0, dotIndex); + if (responseMap.containsKey(parentField)) { + // we added this path on another field already + break; + } + // checks if the parent field contains sub-fields + if (context.getFieldType(parentField) == null) { + // no field type, it must be an object field + String type = context.nestedLookup().getNestedMappers().get(parentField) != null ? "nested" : "object"; + IndexFieldCapabilities fieldCap = new IndexFieldCapabilities( + parentField, + type, + false, + false, + false, + false, + null, + Collections.emptyMap() + ); + responseMap.put(parentField, fieldCap); } + dotIndex = parentField.lastIndexOf('.'); } } - return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), responseMap, true); } + return new FieldCapabilitiesIndexResponse(indexName, responseMap, true); + } + + private static boolean checkIncludeParents(String[] filters) { + for (String filter : filters) { + if ("-parent".equals(filter)) { + return false; + } + if ("parent".equals(filter)) { + return true; + } + } + return true; } private boolean canMatchShard( @@ -144,4 +171,35 @@ private boolean canMatchShard( return SearchService.queryStillMatchesAfterRewrite(searchRequest, searchExecutionContext); } + private interface FieldCapsFilter { + boolean matches(MappedFieldType fieldType, SearchExecutionContext context); + + default FieldCapsFilter and(FieldCapsFilter other) { + return (ft, context) -> matches(ft, context) && other.matches(ft, context); + } + } + + private static FieldCapsFilter buildFilter(Predicate fieldFilter, String[] filters, String[] fieldTypes) { + // security filters don't exclude metadata fields + FieldCapsFilter fcf = (ft, c) -> fieldFilter.test(ft.name()) || c.isMetadataField(ft.name()); + if (fieldTypes.length > 0) { + Set acceptedTypes = Set.of(fieldTypes); + fcf = fcf.and((ft, c) -> acceptedTypes.contains(ft.familyTypeName())); + } + for (String filter : filters) { + if ("parent".equals(filter) || "-parent".equals(filter)) { + continue; + } + FieldCapsFilter next = switch (filter) { + case "+metadata" -> (ft, c) -> c.isMetadataField(ft.name()); + case "-metadata" -> (ft, c) -> c.isMetadataField(ft.name()) == false; + case "-nested" -> (ft, c) -> c.nestedLookup().getNestedParent(ft.name()) == null; + case "-multifield" -> (ft, c) -> c.isMultiField(ft.name()) == false; + default -> throw new IllegalArgumentException("Unknown field caps filter [" + filter + "]"); + }; + fcf = fcf.and(next); + } + return fcf; + } + } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java index a35ed069c63b9..6280b0266ba4c 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequest.java @@ -8,11 +8,13 @@ package org.elasticsearch.action.fieldcaps; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; @@ -28,6 +30,8 @@ class FieldCapabilitiesNodeRequest extends ActionRequest implements IndicesReque private final List shardIds; private final String[] fields; + private final String[] filters; + private final String[] allowedTypes; private final OriginalIndices originalIndices; private final QueryBuilder indexFilter; private final long nowInMillis; @@ -37,6 +41,13 @@ class FieldCapabilitiesNodeRequest extends ActionRequest implements IndicesReque super(in); shardIds = in.readList(ShardId::new); fields = in.readStringArray(); + if (in.getVersion().onOrAfter(Version.V_8_2_0)) { + filters = in.readStringArray(); + allowedTypes = in.readStringArray(); + } else { + filters = Strings.EMPTY_ARRAY; + allowedTypes = Strings.EMPTY_ARRAY; + } originalIndices = OriginalIndices.readOriginalIndices(in); indexFilter = in.readOptionalNamedWriteable(QueryBuilder.class); nowInMillis = in.readLong(); @@ -46,6 +57,8 @@ class FieldCapabilitiesNodeRequest extends ActionRequest implements IndicesReque FieldCapabilitiesNodeRequest( List shardIds, String[] fields, + String[] filters, + String[] allowedTypes, OriginalIndices originalIndices, QueryBuilder indexFilter, long nowInMillis, @@ -53,6 +66,8 @@ class FieldCapabilitiesNodeRequest extends ActionRequest implements IndicesReque ) { this.shardIds = Objects.requireNonNull(shardIds); this.fields = fields; + this.filters = filters; + this.allowedTypes = allowedTypes; this.originalIndices = originalIndices; this.indexFilter = indexFilter; this.nowInMillis = nowInMillis; @@ -63,6 +78,14 @@ public String[] fields() { return fields; } + public String[] filters() { + return filters; + } + + public String[] allowedTypes() { + return allowedTypes; + } + public OriginalIndices originalIndices() { return originalIndices; } @@ -98,6 +121,10 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeList(shardIds); out.writeStringArray(fields); + if (out.getVersion().onOrAfter(Version.V_8_2_0)) { + out.writeStringArray(filters); + out.writeStringArray(allowedTypes); + } OriginalIndices.writeOriginalIndices(originalIndices, out); out.writeOptionalNamedWriteable(indexFilter); out.writeLong(nowInMillis); @@ -117,6 +144,8 @@ public boolean equals(Object o) { return nowInMillis == that.nowInMillis && shardIds.equals(that.shardIds) && Arrays.equals(fields, that.fields) + && Arrays.equals(filters, that.filters) + && Arrays.equals(allowedTypes, that.allowedTypes) && Objects.equals(originalIndices, that.originalIndices) && Objects.equals(indexFilter, that.indexFilter) && Objects.equals(runtimeFields, that.runtimeFields); @@ -127,6 +156,8 @@ public int hashCode() { int result = Objects.hash(originalIndices, indexFilter, nowInMillis, runtimeFields); result = 31 * result + shardIds.hashCode(); result = 31 * result + Arrays.hashCode(fields); + result = 31 * result + Arrays.hashCode(filters); + result = 31 * result + Arrays.hashCode(allowedTypes); return result; } } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java index f66ef34972385..0397f61515fd7 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.fieldcaps; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; @@ -35,6 +36,8 @@ public final class FieldCapabilitiesRequest extends ActionRequest implements Ind private String[] indices = Strings.EMPTY_ARRAY; private IndicesOptions indicesOptions = DEFAULT_INDICES_OPTIONS; private String[] fields = Strings.EMPTY_ARRAY; + private String[] filters = Strings.EMPTY_ARRAY; + private String[] allowedTypes = Strings.EMPTY_ARRAY; private boolean includeUnmapped = false; // pkg private API mainly for cross cluster search to signal that we do multiple reductions ie. the results should not be merged private boolean mergeResults = true; @@ -52,6 +55,10 @@ public FieldCapabilitiesRequest(StreamInput in) throws IOException { indexFilter = in.readOptionalNamedWriteable(QueryBuilder.class); nowInMillis = in.readOptionalLong(); runtimeFields = in.readMap(); + if (in.getVersion().onOrAfter(Version.V_8_2_0)) { + filters = in.readStringArray(); + allowedTypes = in.readStringArray(); + } } public FieldCapabilitiesRequest() {} @@ -86,6 +93,10 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalNamedWriteable(indexFilter); out.writeOptionalLong(nowInMillis); out.writeMap(runtimeFields); + if (out.getVersion().onOrAfter(Version.V_8_2_0)) { + out.writeStringArray(filters); + out.writeStringArray(allowedTypes); + } } @Override @@ -117,6 +128,24 @@ public String[] fields() { return fields; } + public FieldCapabilitiesRequest filters(String... filters) { + this.filters = filters; + return this; + } + + public String[] filters() { + return filters; + } + + public FieldCapabilitiesRequest allowedTypes(String... types) { + this.allowedTypes = types; + return this; + } + + public String[] allowedTypes() { + return allowedTypes; + } + /** * The list of indices to lookup */ @@ -213,6 +242,8 @@ public boolean equals(Object o) { && Arrays.equals(fields, that.fields) && Objects.equals(indexFilter, that.indexFilter) && Objects.equals(nowInMillis, that.nowInMillis) + && Arrays.equals(filters, that.filters) + && Arrays.equals(allowedTypes, that.allowedTypes) && Objects.equals(runtimeFields, that.runtimeFields); } @@ -221,6 +252,8 @@ public int hashCode() { int result = Objects.hash(indicesOptions, includeUnmapped, mergeResults, indexFilter, nowInMillis, runtimeFields); result = 31 * result + Arrays.hashCode(indices); result = 31 * result + Arrays.hashCode(fields); + result = 31 * result + Arrays.hashCode(filters); + result = 31 * result + Arrays.hashCode(allowedTypes); return result; } @@ -230,6 +263,10 @@ public String getDescription() { Strings.collectionToDelimitedStringWithLimit(Arrays.asList(indices), ",", "", "", 1024, stringBuilder); stringBuilder.append("], fields["); Strings.collectionToDelimitedStringWithLimit(Arrays.asList(fields), ",", "", "", 1024, stringBuilder); + stringBuilder.append("], filters["); + stringBuilder.append(Strings.collectionToDelimitedString(Arrays.asList(filters), ",")); + stringBuilder.append("], types["); + stringBuilder.append(Strings.collectionToDelimitedString(Arrays.asList(allowedTypes), ",")); stringBuilder.append("]"); return stringBuilder.toString(); } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java index 674a8e7da8456..6cefa39f7f6ab 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/IndexFieldCapabilities.java @@ -35,6 +35,19 @@ public class IndexFieldCapabilities implements Writeable { private final TimeSeriesParams.MetricType metricType; private final Map meta; + public static IndexFieldCapabilities withMetadata(IndexFieldCapabilities input, boolean isMetadata) { + return new IndexFieldCapabilities( + input.getName(), + input.getType(), + isMetadata, + input.isSearchable, + input.isAggregatable, + input.isDimension, + input.metricType, + input.meta + ); + } + /** * @param name The name of the field. * @param type The type associated with the field. diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java index dc2b99afa46cd..f9d5cff2471b4 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/RequestDispatcher.java @@ -174,6 +174,8 @@ private void sendRequestToNode(String nodeId, List shardIds) { final FieldCapabilitiesNodeRequest nodeRequest = new FieldCapabilitiesNodeRequest( shardIds, fieldCapsRequest.fields(), + fieldCapsRequest.filters(), + fieldCapsRequest.allowedTypes(), originalIndices, fieldCapsRequest.indexFilter(), nowInMillis, diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java new file mode 100644 index 0000000000000..59c1caa80c2e6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/ResponseRewriter.java @@ -0,0 +1,129 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.Version; + +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; +import java.util.stream.Collectors; + +final class ResponseRewriter { + + public static Map rewriteOldResponses( + Version version, + Map input, + String[] filters, + String[] allowedTypes, + Predicate isMetadata + ) { + if (version.onOrAfter(Version.V_8_1_0)) { + return input; // nothing needs to be done + } + Function transformer = buildTransformer( + version, + input, + filters, + allowedTypes, + isMetadata + ); + Map rewritten = new HashMap<>(); + for (var entry : input.entrySet()) { + IndexFieldCapabilities fc = transformer.apply(entry.getValue()); + if (fc != null) { + rewritten.put(entry.getKey(), fc); + } + } + return rewritten; + } + + private static Function buildTransformer( + Version version, + Map input, + String[] filters, + String[] allowedTypes, + Predicate isMetadata + ) { + boolean checkMetadata = version.before(Version.V_7_13_0); + Predicate test = ifc -> true; + Set objects = null; + Set nestedObjects = null; + if (allowedTypes.length > 0) { + Set at = Set.of(allowedTypes); + test = test.and(ifc -> at.contains(ifc.getType())); + } + for (String filter : filters) { + if ("-parent".equals(filter)) { + test = test.and(fc -> fc.getType().equals("nested") == false && fc.getType().equals("object") == false); + } + if ("-metadata".equals(filter)) { + test = test.and(fc -> fc.isMetadatafield() == false); + } + if ("+metadata".equals(filter)) { + test = test.and(IndexFieldCapabilities::isMetadatafield); + } + if ("-nested".equals(filter)) { + if (nestedObjects == null) { + nestedObjects = findTypes("nested", input); + } + Set no = nestedObjects; + test = test.and(fc -> isNestedField(fc.getName(), no) == false); + } + if ("-multifield".equals(filter)) { + // immediate parent is not an object field + if (objects == null) { + objects = findTypes("object", input); + } + Set o = objects; + test = test.and(fc -> isNotMultifield(fc.getName(), o)); + } + } + Predicate finalTest = test; + return fc -> { + IndexFieldCapabilities rewritten = fc; + if (checkMetadata) { + rewritten = IndexFieldCapabilities.withMetadata(fc, isMetadata.test(fc.getName())); + } + if (finalTest.test(rewritten) == false) { + return null; + } + return rewritten; + }; + } + + private static Set findTypes(String type, Map fieldCaps) { + return fieldCaps.entrySet() + .stream() + .filter(entry -> type.equals(entry.getValue().getType())) + .map(Map.Entry::getKey) + .collect(Collectors.toSet()); + } + + private static boolean isNestedField(String field, Set nestedParents) { + for (String parent : nestedParents) { + if (field.startsWith(parent + ".") || field.equals(parent)) { + return true; + } + } + return false; + } + + private static boolean isNotMultifield(String field, Set objectFields) { + int lastDotPos = field.lastIndexOf("."); + if (lastDotPos == -1) { + return true; + } + String parent = field.substring(0, lastDotPos); + return objectFields.contains(parent); + } + +} diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index 16413339d719d..2b17b37a2d893 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -9,7 +9,6 @@ package org.elasticsearch.action.fieldcaps; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.OriginalIndices; @@ -178,12 +177,7 @@ private Runnable createResponseMerger( // fork off to the management pool for merging the responses as the operation can run for longer than is acceptable // on a transport thread in case of large numbers of indices and/or fields threadPool.executor(ThreadPool.Names.SEARCH_COORDINATION) - .submit( - ActionRunnable.supply( - listener, - () -> merge(indexResponses, request.includeUnmapped(), new ArrayList<>(failures)) - ) - ); + .submit(ActionRunnable.supply(listener, () -> merge(indexResponses, request, new ArrayList<>(failures)))); } else { listener.onResponse( new FieldCapabilitiesResponse(new ArrayList<>(indexResponses.values()), new ArrayList<>(failures)) @@ -212,6 +206,8 @@ private static FieldCapabilitiesRequest prepareRemoteRequest( remoteRequest.indicesOptions(originalIndices.indicesOptions()); remoteRequest.indices(originalIndices.indices()); remoteRequest.fields(request.fields()); + remoteRequest.filters(request.filters()); + remoteRequest.allowedTypes(request.allowedTypes()); remoteRequest.runtimeFields(request.runtimeFields()); remoteRequest.indexFilter(request.indexFilter()); remoteRequest.nowInMillis(nowInMillis); @@ -220,7 +216,7 @@ private static FieldCapabilitiesRequest prepareRemoteRequest( private FieldCapabilitiesResponse merge( Map indexResponsesMap, - boolean includeUnmapped, + FieldCapabilitiesRequest request, List failures ) { final List indexResponses = indexResponsesMap.values() @@ -230,12 +226,12 @@ private FieldCapabilitiesResponse merge( final String[] indices = indexResponses.stream().map(FieldCapabilitiesIndexResponse::getIndexName).toArray(String[]::new); final Map> responseMapBuilder = new HashMap<>(); for (FieldCapabilitiesIndexResponse response : indexResponses) { - innerMerge(responseMapBuilder, response); + innerMerge(responseMapBuilder, request, response); } final Map> responseMap = new HashMap<>(); for (Map.Entry> entry : responseMapBuilder.entrySet()) { final Map typeMapBuilder = entry.getValue(); - if (includeUnmapped) { + if (request.includeUnmapped()) { addUnmappedFields(indices, entry.getKey(), typeMapBuilder); } boolean multiTypes = typeMapBuilder.size() > 1; @@ -264,14 +260,18 @@ private void addUnmappedFields(String[] indices, String field, Map> responseMapBuilder, + FieldCapabilitiesRequest request, FieldCapabilitiesIndexResponse response ) { - for (Map.Entry entry : response.get().entrySet()) { + Map fields = ResponseRewriter.rewriteOldResponses( + response.getOriginVersion(), + response.get(), + request.filters(), + request.allowedTypes(), + metadataFieldPred + ); + for (Map.Entry entry : fields.entrySet()) { final String field = entry.getKey(); - // best effort to detect metadata field coming from older nodes - final boolean isMetadataField = response.getOriginVersion().onOrAfter(Version.V_7_13_0) - ? entry.getValue().isMetadatafield() - : metadataFieldPred.test(field); final IndexFieldCapabilities fieldCap = entry.getValue(); Map typeMap = responseMapBuilder.computeIfAbsent(field, f -> new HashMap<>()); FieldCapabilities.Builder builder = typeMap.computeIfAbsent( @@ -280,7 +280,7 @@ private void innerMerge( ); builder.add( response.getIndexName(), - isMetadataField, + fieldCap.isMetadatafield(), fieldCap.isSearchable(), fieldCap.isAggregatable(), fieldCap.isDimension(), @@ -355,6 +355,8 @@ public void messageReceived(FieldCapabilitiesNodeRequest request, TransportChann final FieldCapabilitiesIndexResponse response = fieldCapabilitiesFetcher.fetch( shardId, request.fields(), + request.filters(), + request.allowedTypes(), request.indexFilter(), request.nowInMillis(), request.runtimeFields() diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java index af51b42deda80..c67d64bd2048c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldTypeLookup.java @@ -25,6 +25,7 @@ final class FieldTypeLookup { private final Map fullNameToFieldType = new HashMap<>(); private final Map dynamicFieldTypes = new HashMap<>(); + private final Set runtimeFieldNames = new HashSet<>(); /** * A map from field name to all fields whose content has been copied into it @@ -80,6 +81,7 @@ final class FieldTypeLookup { for (MappedFieldType fieldType : RuntimeField.collectFieldTypes(runtimeFields).values()) { // this will override concrete fields with runtime fields that have the same name fullNameToFieldType.put(fieldType.name(), fieldType); + runtimeFieldNames.add(fieldType.name()); } } @@ -104,6 +106,10 @@ MappedFieldType get(String field) { return getDynamicField(field); } + boolean isRuntimeField(String field) { + return runtimeFieldNames.contains(field); + } + // for testing int getMaxParentPathDots() { return maxParentPathDots; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 8d5ff6df45b00..d2615641d468f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -510,6 +510,10 @@ public boolean isMetadataField(String field) { return mapperRegistry.getMetadataMapperParsers(indexVersionCreated).containsKey(field); } + public boolean isMultiField(String field) { + return mappingLookup().isMultiField(field); + } + public synchronized List reloadSearchAnalyzers(AnalysisRegistry registry) throws IOException { logger.info("reloading search analyzers"); // refresh indexAnalyzers and search analyzers diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java index 5ce6b05556459..e35b283af0197 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MappingLookup.java @@ -336,6 +336,9 @@ public NestedLookup nestedLookup() { } public boolean isMultiField(String field) { + if (fieldTypeLookup.isRuntimeField(field)) { + return false; + } String sourceParent = parentObject(field); return sourceParent != null && fieldMappers.containsKey(sourceParent); } diff --git a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java index 457f7160306eb..ee584dd9acb38 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java +++ b/server/src/main/java/org/elasticsearch/index/query/SearchExecutionContext.java @@ -374,6 +374,10 @@ public boolean isMetadataField(String field) { return mapperService.isMetadataField(field); } + public boolean isMultiField(String field) { + return mapperService.isMultiField(field); + } + public Set sourcePath(String fullName) { return mappingLookup.sourcePaths(fullName); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java index eb98197793017..f39b3d3a3479c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/RestFieldCapabilitiesAction.java @@ -50,6 +50,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC fieldRequest.indicesOptions(IndicesOptions.fromRequest(request, fieldRequest.indicesOptions())); fieldRequest.includeUnmapped(request.paramAsBoolean("include_unmapped", false)); + fieldRequest.filters(request.paramAsStringArray("filters", Strings.EMPTY_ARRAY)); + fieldRequest.allowedTypes(request.paramAsStringArray("types", Strings.EMPTY_ARRAY)); request.withContentOrSourceParamParserOrNull(parser -> { if (parser != null) { PARSER.parse(parser, fieldRequest, null); diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java new file mode 100644 index 0000000000000..25f64107e5b23 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java @@ -0,0 +1,221 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.common.Strings; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperServiceTestCase; +import org.elasticsearch.index.query.SearchExecutionContext; + +import java.io.IOException; +import java.util.function.Predicate; + +public class FieldCapabilitiesFilterTests extends MapperServiceTestCase { + + public void testExcludeNestedFields() throws IOException { + MapperService mapperService = createMapperService(""" + { "_doc" : { + "properties" : { + "field1" : { "type" : "keyword" }, + "field2" : { + "type" : "nested", + "properties" : { + "field3" : { "type" : "keyword" } + } + }, + "field4" : { "type" : "keyword" } + } + } } + """); + SearchExecutionContext sec = createSearchExecutionContext(mapperService); + + FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( + "index", + sec, + new String[] { "*" }, + new String[] { "-nested" }, + Strings.EMPTY_ARRAY, + f -> true + ); + + assertNotNull(response.getField("field1")); + assertNotNull(response.getField("field4")); + assertNull(response.getField("field2")); + assertNull(response.getField("field2.field3")); + } + + public void testMetadataFilters() throws IOException { + MapperService mapperService = createMapperService(""" + { "_doc" : { + "properties" : { + "field1" : { "type" : "keyword" }, + "field2" : { "type" : "keyword" } + } + } } + """); + SearchExecutionContext sec = createSearchExecutionContext(mapperService); + + { + FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( + "index", + sec, + new String[] { "*" }, + new String[] { "+metadata" }, + Strings.EMPTY_ARRAY, + f -> true + ); + assertNotNull(response.getField("_index")); + assertNull(response.getField("field1")); + } + { + FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( + "index", + sec, + new String[] { "*" }, + new String[] { "-metadata" }, + Strings.EMPTY_ARRAY, + f -> true + ); + assertNull(response.getField("_index")); + assertNotNull(response.getField("field1")); + } + } + + public void testExcludeMultifields() throws IOException { + MapperService mapperService = createMapperService(""" + { "_doc" : { + "properties" : { + "field1" : { + "type" : "text", + "fields" : { + "keyword" : { "type" : "keyword" } + } + }, + "field2" : { "type" : "keyword" } + }, + "runtime" : { + "field2.keyword" : { "type" : "keyword" } + } + } } + """); + SearchExecutionContext sec = createSearchExecutionContext(mapperService); + + FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( + "index", + sec, + new String[] { "*" }, + new String[] { "-multifield" }, + Strings.EMPTY_ARRAY, + f -> true + ); + assertNotNull(response.getField("field1")); + assertNull(response.getField("field1.keyword")); + assertNotNull(response.getField("field2")); + assertNotNull(response.getField("field2.keyword")); + assertNotNull(response.getField("_index")); + } + + public void testDontIncludeParentInfo() throws IOException { + MapperService mapperService = createMapperService(""" + { "_doc" : { + "properties" : { + "parent" : { + "properties" : { + "field1" : { "type" : "keyword" }, + "field2" : { "type" : "keyword" } + } + } + } + } } + """); + SearchExecutionContext sec = createSearchExecutionContext(mapperService); + + FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( + "index", + sec, + new String[] { "*" }, + new String[] { "-parent" }, + Strings.EMPTY_ARRAY, + f -> true + ); + assertNotNull(response.getField("parent.field1")); + assertNotNull(response.getField("parent.field2")); + assertNull(response.getField("parent")); + } + + public void testSecurityFilter() throws IOException { + MapperService mapperService = createMapperService(""" + { "_doc" : { + "properties" : { + "permitted1" : { "type" : "keyword" }, + "permitted2" : { "type" : "keyword" }, + "forbidden" : { "type" : "keyword" } + } + } } + """); + SearchExecutionContext sec = createSearchExecutionContext(mapperService); + Predicate securityFilter = f -> f.startsWith("permitted"); + + { + FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( + "index", + sec, + new String[] { "*" }, + Strings.EMPTY_ARRAY, + Strings.EMPTY_ARRAY, + securityFilter + ); + + assertNotNull(response.getField("permitted1")); + assertNull(response.getField("forbidden")); + assertNotNull(response.getField("_index")); // security filter doesn't apply to metadata + } + + { + FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( + "index", + sec, + new String[] { "*" }, + new String[] { "-metadata" }, + Strings.EMPTY_ARRAY, + securityFilter + ); + + assertNotNull(response.getField("permitted1")); + assertNull(response.getField("forbidden")); + assertNull(response.getField("_index")); // -metadata filter applies on top + } + } + + public void testFieldTypeFiltering() throws IOException { + MapperService mapperService = createMapperService(""" + { "_doc" : { + "properties" : { + "field1" : { "type" : "keyword" }, + "field2" : { "type" : "long" }, + "field3" : { "type" : "text" } + } + } } + """); + SearchExecutionContext sec = createSearchExecutionContext(mapperService); + + FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( + "index", + sec, + new String[] { "*" }, + Strings.EMPTY_ARRAY, + new String[] { "text", "keyword" }, + f -> true + ); + assertNotNull(response.getField("field1")); + assertNull(response.getField("field2")); + assertNotNull(response.getField("field3")); + assertNull(response.getField("_index")); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequestTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequestTests.java index 8810cdb3a7a25..549bd4e299737 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeRequestTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; @@ -31,6 +32,8 @@ public class FieldCapabilitiesNodeRequestTests extends AbstractWireSerializingTe protected FieldCapabilitiesNodeRequest createTestInstance() { List randomShards = randomShardIds(randomIntBetween(1, 5)); String[] randomFields = randomFields(randomIntBetween(1, 20)); + String[] randomFilter = randomBoolean() ? Strings.EMPTY_ARRAY : new String[] { "-nested" }; + String[] randomTypeFilter = randomBoolean() ? Strings.EMPTY_ARRAY : new String[] { "keyword" }; OriginalIndices originalIndices = randomOriginalIndices(randomIntBetween(0, 20)); QueryBuilder indexFilter = randomBoolean() ? QueryBuilders.termQuery("field", randomAlphaOfLength(5)) : null; @@ -40,7 +43,16 @@ protected FieldCapabilitiesNodeRequest createTestInstance() { ? Collections.singletonMap(randomAlphaOfLength(5), randomAlphaOfLength(5)) : null; - return new FieldCapabilitiesNodeRequest(randomShards, randomFields, originalIndices, indexFilter, nowInMillis, runtimeFields); + return new FieldCapabilitiesNodeRequest( + randomShards, + randomFields, + randomFilter, + randomTypeFilter, + originalIndices, + indexFilter, + nowInMillis, + runtimeFields + ); } private List randomShardIds(int numShards) { @@ -81,12 +93,14 @@ protected Writeable.Reader instanceReader() { @Override protected FieldCapabilitiesNodeRequest mutateInstance(FieldCapabilitiesNodeRequest instance) throws IOException { - switch (random().nextInt(5)) { + switch (random().nextInt(7)) { case 0 -> { List shardIds = randomShardIds(instance.shardIds().size() + 1); return new FieldCapabilitiesNodeRequest( shardIds, instance.fields(), + instance.filters(), + instance.allowedTypes(), instance.originalIndices(), instance.indexFilter(), instance.nowInMillis(), @@ -98,6 +112,8 @@ protected FieldCapabilitiesNodeRequest mutateInstance(FieldCapabilitiesNodeReque return new FieldCapabilitiesNodeRequest( instance.shardIds(), fields, + instance.filters(), + instance.allowedTypes(), instance.originalIndices(), instance.indexFilter(), instance.nowInMillis(), @@ -109,6 +125,8 @@ protected FieldCapabilitiesNodeRequest mutateInstance(FieldCapabilitiesNodeReque return new FieldCapabilitiesNodeRequest( instance.shardIds(), instance.fields(), + instance.filters(), + instance.allowedTypes(), originalIndices, instance.indexFilter(), instance.nowInMillis(), @@ -120,6 +138,8 @@ protected FieldCapabilitiesNodeRequest mutateInstance(FieldCapabilitiesNodeReque return new FieldCapabilitiesNodeRequest( instance.shardIds(), instance.fields(), + instance.filters(), + instance.allowedTypes(), instance.originalIndices(), indexFilter, instance.nowInMillis(), @@ -131,6 +151,8 @@ protected FieldCapabilitiesNodeRequest mutateInstance(FieldCapabilitiesNodeReque return new FieldCapabilitiesNodeRequest( instance.shardIds(), instance.fields(), + instance.filters(), + instance.allowedTypes(), instance.originalIndices(), instance.indexFilter(), nowInMillis, @@ -144,13 +166,41 @@ protected FieldCapabilitiesNodeRequest mutateInstance(FieldCapabilitiesNodeReque return new FieldCapabilitiesNodeRequest( instance.shardIds(), instance.fields(), + instance.filters(), + instance.allowedTypes(), instance.originalIndices(), instance.indexFilter(), instance.nowInMillis(), runtimeFields ); } - default -> throw new IllegalStateException("The test should only allow 5 parameters mutated"); + case 6 -> { + String[] randomFilter = instance.filters().length > 0 ? Strings.EMPTY_ARRAY : new String[] { "-nested" }; + return new FieldCapabilitiesNodeRequest( + instance.shardIds(), + instance.fields(), + randomFilter, + instance.allowedTypes(), + instance.originalIndices(), + instance.indexFilter(), + instance.nowInMillis(), + instance.runtimeFields() + ); + } + case 7 -> { + String[] randomType = instance.allowedTypes().length > 0 ? Strings.EMPTY_ARRAY : new String[] { "text" }; + return new FieldCapabilitiesNodeRequest( + instance.shardIds(), + instance.fields(), + instance.filters(), + randomType, + instance.originalIndices(), + instance.indexFilter(), + instance.nowInMillis(), + instance.runtimeFields() + ); + } + default -> throw new IllegalStateException("The test should only allow 7 parameters mutated"); } } } diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java index 566fa43a8da62..dc72d180ef0c2 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; @@ -146,14 +147,20 @@ public void testValidation() { public void testGetDescription() { final FieldCapabilitiesRequest request = new FieldCapabilitiesRequest(); - assertThat(request.getDescription(), equalTo("indices[], fields[]")); + assertThat(request.getDescription(), equalTo("indices[], fields[], filters[], types[]")); request.fields("a", "b"); - assertThat(request.getDescription(), anyOf(equalTo("indices[], fields[a,b]"), equalTo("indices[], fields[b,a]"))); + assertThat( + request.getDescription(), + anyOf(equalTo("indices[], fields[a,b], filters[], types[]"), equalTo("indices[], fields[b,a], filters[], types[]")) + ); request.indices("x", "y", "z"); request.fields("a"); - assertThat(request.getDescription(), equalTo("indices[x,y,z], fields[a]")); + assertThat(request.getDescription(), equalTo("indices[x,y,z], fields[a], filters[], types[]")); + + request.filters("-metadata", "-multifields"); + assertThat(request.getDescription(), endsWith("filters[-metadata,-multifields], types[]")); final String[] lots = new String[between(1024, 2048)]; for (int i = 0; i < lots.length; i++) { @@ -162,6 +169,7 @@ public void testGetDescription() { request.indices("x", "y", "z"); request.fields(lots); + request.filters(Strings.EMPTY_ARRAY); assertThat( request.getDescription(), allOf( @@ -173,7 +181,7 @@ public void testGetDescription() { ); assertThat( request.getDescription().length(), - lessThanOrEqualTo(1024 + ("indices[x,y,z], fields[" + "s9999,... (9999 in total, 9999 omitted)]").length()) + lessThanOrEqualTo(1024 + ("indices[x,y,z], fields[" + "s9999,... (9999 in total, 9999 omitted)], filters[], types[]").length()) ); request.fields("a"); @@ -185,12 +193,12 @@ public void testGetDescription() { containsString("..."), containsString(lots.length + " in total"), containsString("omitted"), - endsWith("], fields[a]") + endsWith("], fields[a], filters[], types[]") ) ); assertThat( request.getDescription().length(), - lessThanOrEqualTo(1024 + ("indices[" + "s9999,... (9999 in total, 9999 omitted)], fields[a]").length()) + lessThanOrEqualTo(1024 + ("indices[" + "s9999,... (9999 in total, 9999 omitted)], fields[a], filters[], types[]").length()) ); final FieldCapabilitiesRequest randomRequest = createTestInstance(); diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/ResponseRewriterTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/ResponseRewriterTests.java new file mode 100644 index 0000000000000..830f9bb42912b --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/ResponseRewriterTests.java @@ -0,0 +1,158 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.Version; +import org.elasticsearch.common.Strings; +import org.elasticsearch.test.ESTestCase; + +import java.util.Collections; +import java.util.Map; + +public class ResponseRewriterTests extends ESTestCase { + + public void testExcludeMetadata() { + Map oldResponse = Map.of( + "field", + fieldCaps("field", "keyword", false), + "_index", + fieldCaps("_index", "_index", true) + ); + + Map rewritten = ResponseRewriter.rewriteOldResponses( + Version.V_8_0_0, + oldResponse, + new String[] { "-metadata" }, + Strings.EMPTY_ARRAY, + f -> f.startsWith("_") + ); + + assertTrue(rewritten.containsKey("field")); + assertFalse(rewritten.containsKey("_index")); + } + + public void testIncludeOnlyMetadata() { + Map oldResponse = Map.of( + "field", + fieldCaps("field", "keyword", false), + "_index", + fieldCaps("_index", "_index", true) + ); + + Map rewritten = ResponseRewriter.rewriteOldResponses( + Version.V_8_0_0, + oldResponse, + new String[] { "+metadata" }, + Strings.EMPTY_ARRAY, + f -> f.startsWith("_") + ); + + assertFalse(rewritten.containsKey("field")); + assertTrue(rewritten.containsKey("_index")); + } + + public void testExcludeNested() { + Map oldResponse = Map.of( + "field", + fieldCaps("field", "keyword", false), + "parent", + fieldCaps("parent", "nested", false), + "parent.child", + fieldCaps("parent.child", "keyword", false) + ); + + Map rewritten = ResponseRewriter.rewriteOldResponses( + Version.V_8_0_0, + oldResponse, + new String[] { "-nested" }, + Strings.EMPTY_ARRAY, + f -> f.startsWith("_") + ); + + assertTrue(rewritten.containsKey("field")); + assertFalse(rewritten.containsKey("parent.child")); + assertFalse(rewritten.containsKey("parent")); + } + + public void testExcludeMultifield() { + Map oldResponse = Map.of( + "field", + fieldCaps("field", "text", false), + "field.keyword", + fieldCaps("field.keyword", "keyword", false), + "parent", + fieldCaps("parent", "object", false), + "parent.child", + fieldCaps("parent.child", "keyword", false) + ); + + Map rewritten = ResponseRewriter.rewriteOldResponses( + Version.V_8_0_0, + oldResponse, + new String[] { "-multifield" }, + Strings.EMPTY_ARRAY, + f -> f.startsWith("_") + ); + + assertTrue(rewritten.containsKey("field")); + assertFalse(rewritten.containsKey("field.keyword")); + assertTrue(rewritten.containsKey("parent.child")); + } + + public void testExcludeParents() { + Map oldResponse = Map.of( + "field", + fieldCaps("field", "text", false), + "parent", + fieldCaps("parent", "object", false), + "parent.child", + fieldCaps("parent.child", "keyword", false) + ); + + Map rewritten = ResponseRewriter.rewriteOldResponses( + Version.V_8_0_0, + oldResponse, + new String[] { "-parent" }, + Strings.EMPTY_ARRAY, + f -> f.startsWith("_") + ); + + assertTrue(rewritten.containsKey("field")); + assertFalse(rewritten.containsKey("parent")); + assertTrue(rewritten.containsKey("parent.child")); + } + + public void testAllowedTypes() { + Map oldResponse = Map.of( + "text", + fieldCaps("text", "text", false), + "long", + fieldCaps("long", "long", false), + "keyword", + fieldCaps("keyword", "keyword", false) + ); + + Map rewritten = ResponseRewriter.rewriteOldResponses( + Version.V_8_0_0, + oldResponse, + Strings.EMPTY_ARRAY, + new String[] { "text", "keyword" }, + f -> f.startsWith("_") + ); + + assertTrue(rewritten.containsKey("text")); + assertTrue(rewritten.containsKey("keyword")); + assertFalse(rewritten.containsKey("long")); + } + + private static IndexFieldCapabilities fieldCaps(String name, String type, boolean isMetadata) { + return new IndexFieldCapabilities(name, type, isMetadata, true, true, false, null, Collections.emptyMap()); + } + +} diff --git a/x-pack/qa/runtime-fields/build.gradle b/x-pack/qa/runtime-fields/build.gradle index a115b74a0bc90..52a3f4c1c864f 100644 --- a/x-pack/qa/runtime-fields/build.gradle +++ b/x-pack/qa/runtime-fields/build.gradle @@ -71,7 +71,7 @@ subprojects { /////// TO FIX /////// 'search.aggregation/40_range/Date range', //source only date field should also emit values for numbers, it expects strings only 'search/115_multiple_field_collapsing/two levels fields collapsing', // Field collapsing on a runtime field does not work - 'field_caps/30_filter/Field caps with index filter', // We don't support filtering field caps on runtime fields. What should we do? + 'field_caps/30_index_filter/Field caps with index filter', // We don't support filtering field caps on runtime fields. What should we do? 'search.aggregation/220_filters_bucket/cache busting', // runtime keyword does not support split_queries_on_whitespace 'search/140_pre_filter_search_shards/pre_filter_shard_size with shards that have no hit', //completion suggester does not return options when the context field is a geo_point runtime field From 7bc5b21eb280d864d6bdd8d271937a9deb017f3f Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Thu, 10 Feb 2022 16:12:31 +0100 Subject: [PATCH 040/167] Don't set index.version.compatibility when not needed (#83790) When cloning, shrinking or splitting an index, we shouldn't set `index.version.compatibility` unless it appears in the source index. Otherwise we will run into trouble on mixed-version clusters where this setting isn't known. Marked as non-issue as it's an unreleased bug. --- .../cluster/metadata/MetadataCreateIndexService.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java index 3c639a5d15012..993d7cba8a3f1 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexService.java @@ -1463,11 +1463,13 @@ static void prepareResizeIndexSettings( } indexSettingsBuilder.put(IndexMetadata.SETTING_VERSION_CREATED, sourceMetadata.getCreationVersion()) - .put(IndexMetadata.SETTING_VERSION_COMPATIBILITY, sourceMetadata.getCompatibilityVersion()) .put(builder.build()) .put(IndexMetadata.SETTING_ROUTING_PARTITION_SIZE, sourceMetadata.getRoutingPartitionSize()) .put(IndexMetadata.INDEX_RESIZE_SOURCE_NAME.getKey(), resizeSourceIndex.getName()) .put(IndexMetadata.INDEX_RESIZE_SOURCE_UUID.getKey(), resizeSourceIndex.getUUID()); + if (sourceMetadata.getSettings().hasValue(IndexMetadata.SETTING_VERSION_COMPATIBILITY)) { + indexSettingsBuilder.put(IndexMetadata.SETTING_VERSION_COMPATIBILITY, sourceMetadata.getCompatibilityVersion()); + } } /** From 50bba6b19a0db75158936334f50307530adc9b5c Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 10 Feb 2022 16:35:13 +0100 Subject: [PATCH 041/167] Place index_mode field behind feature flag. (#83650) Ensure that `index_mode` field in data stream template is behind `es.index_mode_feature_flag_registered` feature flag. This was not the case. --- .../cluster/metadata/ComposableIndexTemplate.java | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java index 0c34449fdea20..55063ce658d46 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/ComposableIndexTemplate.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.xcontent.ConstructingObjectParser; @@ -312,7 +313,12 @@ public static class DataStreamTemplate implements Writeable, ToXContentObject { "data_stream_template", false, args -> { - IndexMode indexMode = args[2] != null ? IndexMode.fromString((String) args[2]) : null; + IndexMode indexMode; + if (IndexSettings.isTimeSeriesModeEnabled()) { + indexMode = args[2] != null ? IndexMode.fromString((String) args[2]) : null; + } else { + indexMode = null; + } return new DataStreamTemplate(args[0] != null && (boolean) args[0], args[1] != null && (boolean) args[1], indexMode); } ); @@ -320,7 +326,9 @@ public static class DataStreamTemplate implements Writeable, ToXContentObject { static { PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), HIDDEN); PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ALLOW_CUSTOM_ROUTING); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), INDEX_MODE); + if (IndexSettings.isTimeSeriesModeEnabled()) { + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), INDEX_MODE); + } } private final boolean hidden; From c3b88394347ffb00d6ccdbc0637204d072117f81 Mon Sep 17 00:00:00 2001 From: Tim Brooks Date: Thu, 10 Feb 2022 09:07:58 -0700 Subject: [PATCH 042/167] Lowercase health status in xcontent (#83765) Currently, the health status is serialized as an all uppercase enum. This is inconsistent with how we serialize it in the cluster health API. This PR changes it to be lowercased. --- .../resources/rest-api-spec/test/health/10_basic.yml | 6 +++--- .../main/java/org/elasticsearch/health/GetHealthAction.java | 2 +- .../org/elasticsearch/health/HealthComponentResult.java | 2 +- .../org/elasticsearch/health/HealthIndicatorResult.java | 2 +- .../main/java/org/elasticsearch/health/HealthStatus.java | 5 +++++ 5 files changed, 11 insertions(+), 6 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml index 7cad7c2471710..53499ffd8b201 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/health/10_basic.yml @@ -8,10 +8,10 @@ _internal.health: {} - is_true: cluster_name - - match: { status: "GREEN" } + - match: { status: "green" } - match: { impacts: [] } - - match: { components.cluster_coordination.status: "GREEN" } - - match: { components.cluster_coordination.indicators.instance_has_master.status: "GREEN" } + - match: { components.cluster_coordination.status: "green" } + - match: { components.cluster_coordination.indicators.instance_has_master.status: "green" } - match: { components.cluster_coordination.indicators.instance_has_master.summary: "Health coordinating instance has a master node." } - is_true: components.cluster_coordination.indicators.instance_has_master.details.coordinating_node.node_id - is_true: components.cluster_coordination.indicators.instance_has_master.details.coordinating_node.name diff --git a/server/src/main/java/org/elasticsearch/health/GetHealthAction.java b/server/src/main/java/org/elasticsearch/health/GetHealthAction.java index 309643656802b..926f58646bf58 100644 --- a/server/src/main/java/org/elasticsearch/health/GetHealthAction.java +++ b/server/src/main/java/org/elasticsearch/health/GetHealthAction.java @@ -82,7 +82,7 @@ public void writeTo(StreamOutput out) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { builder.startObject(); - builder.field("status", status); + builder.field("status", status.xContentValue()); builder.field("cluster_name", clusterName.value()); builder.array("impacts"); builder.startObject("components"); diff --git a/server/src/main/java/org/elasticsearch/health/HealthComponentResult.java b/server/src/main/java/org/elasticsearch/health/HealthComponentResult.java index 625ae6fb0a926..f2c0db461ddbb 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthComponentResult.java +++ b/server/src/main/java/org/elasticsearch/health/HealthComponentResult.java @@ -28,7 +28,7 @@ public HealthIndicatorResult findIndicator(String name) { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field("status", status); + builder.field("status", status.xContentValue()); builder.startObject("indicators"); for (HealthIndicatorResult indicator : indicators) { builder.field(indicator.name(), indicator, params); diff --git a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java index ad24761e1766e..bec67b47f585c 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java +++ b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java @@ -24,7 +24,7 @@ public static HealthIndicatorResult of(String name, String component, HealthStat @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field("status", status); + builder.field("status", status.xContentValue()); builder.field("summary", summary); builder.field("details", details, params); // TODO 83303: Add detail / documentation diff --git a/server/src/main/java/org/elasticsearch/health/HealthStatus.java b/server/src/main/java/org/elasticsearch/health/HealthStatus.java index a9e6a7dba9d44..1094c35b5aac3 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthStatus.java +++ b/server/src/main/java/org/elasticsearch/health/HealthStatus.java @@ -13,6 +13,7 @@ import java.io.IOException; import java.util.Comparator; +import java.util.Locale; import java.util.stream.Stream; public enum HealthStatus implements Writeable { @@ -38,4 +39,8 @@ public byte value() { public static HealthStatus merge(Stream statuses) { return statuses.max(Comparator.comparing(HealthStatus::value)).orElse(GREEN); } + + public String xContentValue() { + return name().toLowerCase(Locale.ROOT); + } } From 87de9f8904bc826a9163c5c801efcfd81ab45a8b Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Thu, 10 Feb 2022 17:34:13 +0100 Subject: [PATCH 043/167] Revert "Add "Has eligible master" Indicator for Health API (#82874)" (#83794) This reverts commit 7ce47613c3aae57b55ea824cc9a49b18747b6b3b. --- docs/changelog/82874.yaml | 5 -- .../HasEligibleMasterNodeIndicator.java | 54 ----------- .../health/HealthIndicatorDetails.java | 7 +- .../health/HealthIndicatorResult.java | 4 - .../java/org/elasticsearch/node/Node.java | 4 +- .../HasEligibleMasterNodeIndicatorTests.java | 89 ------------------- 6 files changed, 2 insertions(+), 161 deletions(-) delete mode 100644 docs/changelog/82874.yaml delete mode 100644 server/src/main/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicator.java delete mode 100644 server/src/test/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicatorTests.java diff --git a/docs/changelog/82874.yaml b/docs/changelog/82874.yaml deleted file mode 100644 index ad41633e99ce7..0000000000000 --- a/docs/changelog/82874.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 82874 -summary: 'Add "Has eligible master" Indicator for Health API' -area: Health -type: feature -issues: [] diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicator.java deleted file mode 100644 index 466798cfc314d..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicator.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.cluster.coordination.indicators; - -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodeRole; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.health.HealthIndicatorResult; -import org.elasticsearch.health.HealthIndicatorService; -import org.elasticsearch.health.HealthStatus; - -import static org.elasticsearch.health.ServerHealthComponents.CLUSTER_COORDINATION; - -public class HasEligibleMasterNodeIndicator implements HealthIndicatorService { - - public static final String HAS_ELIGIBLE_MASTER = "has_eligible_master"; - - private final ClusterService clusterService; - - public HasEligibleMasterNodeIndicator(ClusterService clusterService) { - this.clusterService = clusterService; - } - - @Override - public String name() { - return HAS_ELIGIBLE_MASTER; - } - - @Override - public String component() { - return CLUSTER_COORDINATION; - } - - @Override - public HealthIndicatorResult calculate() { - for (DiscoveryNode node : clusterService.state().nodes()) { - if (node.getRoles().contains(DiscoveryNodeRole.MASTER_ROLE)) { - return HealthIndicatorResult.of( - HAS_ELIGIBLE_MASTER, - CLUSTER_COORDINATION, - HealthStatus.GREEN, - "There is a master-eligible node." - ); - } - } - return HealthIndicatorResult.of(HAS_ELIGIBLE_MASTER, CLUSTER_COORDINATION, HealthStatus.RED, "No master-eligible nodes."); - } -} diff --git a/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java b/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java index b0fd50a1024f7..e085b9981e496 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java +++ b/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java @@ -18,12 +18,7 @@ public interface HealthIndicatorDetails extends ToXContentObject { HealthIndicatorDetails EMPTY = new HealthIndicatorDetails() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject().endObject(); - } - - @Override - public String toString() { - return "HealthIndicatorDetails{}"; + return builder; } }; } diff --git a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java index bec67b47f585c..5d54ecb709d56 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java +++ b/server/src/main/java/org/elasticsearch/health/HealthIndicatorResult.java @@ -17,10 +17,6 @@ public record HealthIndicatorResult(String name, String component, HealthStatus implements ToXContentObject { - public static HealthIndicatorResult of(String name, String component, HealthStatus status, String summary) { - return new HealthIndicatorResult(name, component, status, summary, HealthIndicatorDetails.EMPTY); - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 35e5970f4ca6a..3c4d699160799 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -41,7 +41,6 @@ import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.coordination.Coordinator; import org.elasticsearch.cluster.coordination.InstanceHasMasterHealthIndicatorService; -import org.elasticsearch.cluster.coordination.indicators.HasEligibleMasterNodeIndicator; import org.elasticsearch.cluster.desirednodes.DesiredNodesSettingsValidator; import org.elasticsearch.cluster.metadata.IndexMetadataVerifier; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; @@ -902,8 +901,7 @@ protected Node( ); List serverHealthIndicatorServices = List.of( - new InstanceHasMasterHealthIndicatorService(clusterService), - new HasEligibleMasterNodeIndicator(clusterService) + new InstanceHasMasterHealthIndicatorService(clusterService) ); List pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) .stream() diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicatorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicatorTests.java deleted file mode 100644 index db342678bfdb6..0000000000000 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/indicators/HasEligibleMasterNodeIndicatorTests.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.cluster.coordination.indicators; - -import org.elasticsearch.Version; -import org.elasticsearch.cluster.ClusterName; -import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.health.HealthIndicatorResult; -import org.elasticsearch.health.HealthStatus; -import org.elasticsearch.test.ESTestCase; -import org.mockito.Mockito; - -import java.util.Set; - -import static java.util.Collections.emptyMap; -import static org.elasticsearch.cluster.node.DiscoveryNodeRole.DATA_ROLE; -import static org.elasticsearch.cluster.node.DiscoveryNodeRole.INGEST_ROLE; -import static org.elasticsearch.cluster.node.DiscoveryNodeRole.MASTER_ROLE; -import static org.elasticsearch.cluster.node.DiscoveryNodeRole.TRANSFORM_ROLE; - -public class HasEligibleMasterNodeIndicatorTests extends ESTestCase { - - public void testIsGreenIfThereIsMasterNode() { - ClusterState clusterState = ClusterState.builder(new ClusterName("test-cluster")) - .nodes( - DiscoveryNodes.builder() - .add(new DiscoveryNode("node1", transportAddress(), emptyMap(), Set.of(DATA_ROLE), Version.CURRENT)) - .add(new DiscoveryNode("node2", transportAddress(), emptyMap(), Set.of(DATA_ROLE), Version.CURRENT)) - .add(new DiscoveryNode("node3", transportAddress(), emptyMap(), Set.of(MASTER_ROLE), Version.CURRENT)) - .build() - ) - .build(); - - ClusterService clusterService = clusterService(clusterState); - HealthIndicatorResult noEligibleMasterNodes = new HasEligibleMasterNodeIndicator(clusterService).calculate(); - - assertEquals("has_eligible_master", noEligibleMasterNodes.name()); - assertEquals(HealthStatus.GREEN, noEligibleMasterNodes.status()); - assertEquals("There is a master-eligible node.", noEligibleMasterNodes.summary()); - } - - public void testIsRedIfThereNoMasterNodes() { - ClusterState clusterState = ClusterState.builder(new ClusterName("test-cluster")) - .nodes( - DiscoveryNodes.builder() - .add(new DiscoveryNode("node_1", transportAddress(), emptyMap(), Set.of(DATA_ROLE), Version.CURRENT)) - .add(new DiscoveryNode("node_2", transportAddress(), emptyMap(), Set.of(TRANSFORM_ROLE), Version.CURRENT)) - .add(new DiscoveryNode("node_3", transportAddress(), emptyMap(), Set.of(INGEST_ROLE), Version.CURRENT)) - .build() - ) - .build(); - - HealthIndicatorResult noEligibleMasterNodes = new HasEligibleMasterNodeIndicator(clusterService(clusterState)).calculate(); - - assertEquals("has_eligible_master", noEligibleMasterNodes.name()); - assertEquals(HealthStatus.RED, noEligibleMasterNodes.status()); - assertEquals("No master-eligible nodes.", noEligibleMasterNodes.summary()); - } - - public void testRedIfThereNoNodes() { - ClusterState clusterState = ClusterState.builder(new ClusterName("test-cluster")).nodes(DiscoveryNodes.builder().build()).build(); - - HealthIndicatorResult noEligibleMasterNodes = new HasEligibleMasterNodeIndicator(clusterService(clusterState)).calculate(); - - assertEquals("has_eligible_master", noEligibleMasterNodes.name()); - assertEquals(HealthStatus.RED, noEligibleMasterNodes.status()); - assertEquals("No master-eligible nodes.", noEligibleMasterNodes.summary()); - } - - private static TransportAddress transportAddress() { - return buildNewFakeTransportAddress(); - } - - private static ClusterService clusterService(ClusterState clusterState) { - ClusterService clusterService = Mockito.mock(ClusterService.class); - Mockito.when(clusterService.state()).thenReturn(clusterState); - return clusterService; - } -} From 2f9ceaa449ac49e392cdc7833a60eafe337c75a5 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Thu, 10 Feb 2022 12:30:32 -0500 Subject: [PATCH 044/167] Batch open-indices cluster state updates (#83760) --- docs/changelog/83760.yaml | 5 + .../open/TransportOpenIndexAction.java | 2 +- .../metadata/MetadataIndexStateService.java | 270 ++++++++++++------ .../indices/cluster/ClusterStateChanges.java | 14 +- .../action/TransportFreezeIndexAction.java | 2 +- 5 files changed, 193 insertions(+), 100 deletions(-) create mode 100644 docs/changelog/83760.yaml diff --git a/docs/changelog/83760.yaml b/docs/changelog/83760.yaml new file mode 100644 index 0000000000000..05e7775d4401d --- /dev/null +++ b/docs/changelog/83760.yaml @@ -0,0 +1,5 @@ +pr: 83760 +summary: Batch open-indices +area: Indices APIs +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java index 4ed98350e3070..95ab9d9e12f41 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/open/TransportOpenIndexAction.java @@ -92,7 +92,7 @@ protected void masterOperation( .indices(concreteIndices) .waitForActiveShards(request.waitForActiveShards()); - indexStateService.openIndex(updateRequest, new ActionListener<>() { + indexStateService.openIndices(updateRequest, new ActionListener<>() { @Override public void onResponse(ShardsAcknowledgedResponse response) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java index 66c48106fb0c6..408fadd3bf04b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java @@ -32,14 +32,17 @@ import org.elasticsearch.action.support.master.ShardsAcknowledgedResponse; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.cluster.AckedClusterStateUpdateTask; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateAckListener; +import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; @@ -55,6 +58,8 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; @@ -76,6 +81,7 @@ import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -111,12 +117,10 @@ public class MetadataIndexStateService { private final ClusterService clusterService; private final AllocationService allocationService; - private final IndexMetadataVerifier indexMetadataVerifier; - private final IndicesService indicesService; - private final ShardLimitValidator shardLimitValidator; - private final ThreadPool threadPool; private final NodeClient client; + private final ThreadPool threadPool; private final ActiveShardsObserver activeShardsObserver; + private final ClusterStateTaskExecutor opensExecutor; @Inject public MetadataIndexStateService( @@ -128,14 +132,12 @@ public MetadataIndexStateService( NodeClient client, ThreadPool threadPool ) { - this.indicesService = indicesService; this.clusterService = clusterService; this.allocationService = allocationService; - this.threadPool = threadPool; this.client = client; - this.indexMetadataVerifier = indexMetadataVerifier; - this.shardLimitValidator = shardLimitValidator; + this.threadPool = threadPool; this.activeShardsObserver = new ActiveShardsObserver(clusterService, threadPool); + this.opensExecutor = new OpenIndicesExecutor(allocationService, indexMetadataVerifier, indicesService, shardLimitValidator); } /** @@ -343,7 +345,7 @@ static ClusterState addIndexClosedBlocks( * @param block The type of block to add * @return a tuple of the updated cluster state, as well as the blocks that got added */ - static Tuple> addIndexBlock( + private static Tuple> addIndexBlock( final Index[] indices, final ClusterState currentState, final APIBlock block @@ -538,7 +540,7 @@ public void onFailure(final Exception e) { * this action succeed then the shard is considered to be ready for closing. When all shards of a given index are ready for closing, * the index is considered ready to be closed. */ - class WaitForClosedBlocksApplied extends ActionRunnable> { + private class WaitForClosedBlocksApplied extends ActionRunnable> { private final Map blockedIndices; private final CloseIndexClusterStateUpdateRequest request; @@ -670,7 +672,7 @@ private void sendVerifyShardBeforeCloseRequest( * Helper class that coordinates with shards to ensure that blocks have been properly applied to all shards using * {@link TransportVerifyShardIndexBlockAction}. */ - class WaitForBlocksApplied extends ActionRunnable> { + private class WaitForBlocksApplied extends ActionRunnable> { private final Map blockedIndices; private final AddIndexBlockClusterStateUpdateRequest request; @@ -884,8 +886,8 @@ static Tuple> closeRoutingTable( ); } - public void openIndex(final OpenIndexClusterStateUpdateRequest request, final ActionListener listener) { - onlyOpenIndex(request, ActionListener.wrap(response -> { + public void openIndices(final OpenIndexClusterStateUpdateRequest request, final ActionListener listener) { + onlyOpenIndices(request, ActionListener.wrap(response -> { if (response.isAcknowledged()) { String[] indexNames = Arrays.stream(request.indices()).map(Index::getName).toArray(String[]::new); activeShardsObserver.waitForActiveShards( @@ -895,7 +897,7 @@ public void openIndex(final OpenIndexClusterStateUpdateRequest request, final Ac shardsAcknowledged -> { if (shardsAcknowledged == false) { logger.debug( - "[{}] indices opened, but the operation timed out while waiting for " + "enough shards to be started.", + "[{}] indices opened, but the operation timed out while waiting for enough shards to be started.", Arrays.toString(indexNames) ); } @@ -909,7 +911,7 @@ public void openIndex(final OpenIndexClusterStateUpdateRequest request, final Ac }, listener::onFailure)); } - private void onlyOpenIndex(final OpenIndexClusterStateUpdateRequest request, final ActionListener listener) { + private void onlyOpenIndices(final OpenIndexClusterStateUpdateRequest request, final ActionListener listener) { if (request.indices() == null || request.indices().length == 0) { throw new IllegalArgumentException("Index name is required"); } @@ -917,84 +919,12 @@ private void onlyOpenIndex(final OpenIndexClusterStateUpdateRequest request, fin final String indicesAsString = Arrays.toString(request.indices()); clusterService.submitStateUpdateTask( "open-indices " + indicesAsString, - new AckedClusterStateUpdateTask(Priority.URGENT, request, listener) { - @Override - public ClusterState execute(final ClusterState currentState) { - final ClusterState updatedState = openIndices(request.indices(), currentState); - // no explicit wait for other nodes needed as we use AckedClusterStateUpdateTask - return allocationService.reroute(updatedState, "indices opened [" + indicesAsString + "]"); - } - }, - ClusterStateTaskExecutor.unbatched() + new OpenIndicesTask(request, listener), + ClusterStateTaskConfig.build(Priority.URGENT, request.masterNodeTimeout()), + this.opensExecutor ); } - ClusterState openIndices(final Index[] indices, final ClusterState currentState) { - final List indicesToOpen = new ArrayList<>(); - for (Index index : indices) { - final IndexMetadata indexMetadata = currentState.metadata().getIndexSafe(index); - if (indexMetadata.getState() != IndexMetadata.State.OPEN) { - indicesToOpen.add(indexMetadata); - } else if (currentState.blocks().hasIndexBlockWithId(index.getName(), INDEX_CLOSED_BLOCK_ID)) { - indicesToOpen.add(indexMetadata); - } - } - - shardLimitValidator.validateShardLimit(currentState, indices); - if (indicesToOpen.isEmpty()) { - return currentState; - } - - logger.info( - () -> new ParameterizedMessage( - "opening indices [{}]", - String.join(",", indicesToOpen.stream().map(i -> (CharSequence) i.getIndex().toString())::iterator) - ) - ); - - final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - final Version minIndexCompatibilityVersion = currentState.getNodes().getMaxNodeVersion().minimumIndexCompatibilityVersion(); - - for (IndexMetadata indexMetadata : indicesToOpen) { - final Index index = indexMetadata.getIndex(); - if (indexMetadata.getState() != IndexMetadata.State.OPEN) { - final Settings.Builder updatedSettings = Settings.builder().put(indexMetadata.getSettings()); - updatedSettings.remove(VERIFIED_BEFORE_CLOSE_SETTING.getKey()); - - IndexMetadata newIndexMetadata = IndexMetadata.builder(indexMetadata) - .state(IndexMetadata.State.OPEN) - .settingsVersion(indexMetadata.getSettingsVersion() + 1) - .settings(updatedSettings) - .timestampRange(IndexLongFieldRange.NO_SHARDS) - .build(); - - // The index might be closed because we couldn't import it due to an old incompatible - // version, so we need to verify its compatibility. - newIndexMetadata = indexMetadataVerifier.verifyIndexMetadata(newIndexMetadata, minIndexCompatibilityVersion); - try { - indicesService.verifyIndexMetadata(newIndexMetadata, newIndexMetadata); - } catch (Exception e) { - throw new ElasticsearchException("Failed to verify index " + index, e); - } - metadata.put(newIndexMetadata, true); - } - - // Always removes index closed blocks (note: this can fail on-going close index actions) - blocks.removeIndexBlockWithId(index.getName(), INDEX_CLOSED_BLOCK_ID); - } - - ClusterState updatedState = ClusterState.builder(currentState).metadata(metadata).blocks(blocks).build(); - - final RoutingTable.Builder routingTable = RoutingTable.builder(updatedState.routingTable()); - for (IndexMetadata previousIndexMetadata : indicesToOpen) { - if (previousIndexMetadata.getState() != IndexMetadata.State.OPEN) { - routingTable.addAsFromCloseToOpen(updatedState.metadata().getIndexSafe(previousIndexMetadata.getIndex())); - } - } - return ClusterState.builder(updatedState).routingTable(routingTable.build()).build(); - } - /** * Finalizes the addition of blocks by turning the temporary UUID-based blocks into full blocks. * @param currentState the cluster state to update @@ -1003,7 +933,7 @@ ClusterState openIndices(final Index[] indices, final ClusterState currentState) * @param block the full block to convert to * @return the updated cluster state, as well as the (failed and successful) index-level results for adding the block */ - static Tuple> finalizeBlock( + private static Tuple> finalizeBlock( final ClusterState currentState, final Map blockedIndices, final Map verifyResult, @@ -1079,7 +1009,7 @@ public static ClusterBlock createIndexClosingBlock() { return new ClusterBlock( INDEX_CLOSED_BLOCK_ID, UUIDs.randomBase64UUID(), - "index preparing to close. Reopen the index to allow " + "writes again or retry closing the index to fully close the index.", + "index preparing to close. Reopen the index to allow writes again or retry closing the index to fully close the index.", false, false, false, @@ -1108,4 +1038,158 @@ public static ClusterBlock createUUIDBasedBlock(ClusterBlock clusterBlock) { clusterBlock.levels() ); } + + private static class OpenIndicesExecutor implements ClusterStateTaskExecutor { + + private final AllocationService allocationService; + private final IndexMetadataVerifier indexMetadataVerifier; + private final IndicesService indicesService; + private final ShardLimitValidator shardLimitValidator; + + OpenIndicesExecutor( + AllocationService allocationService, + IndexMetadataVerifier indexMetadataVerifier, + IndicesService indicesService, + ShardLimitValidator shardLimitValidator + ) { + this.allocationService = allocationService; + this.indexMetadataVerifier = indexMetadataVerifier; + this.indicesService = indicesService; + this.shardLimitValidator = shardLimitValidator; + } + + @Override + public ClusterTasksResult execute(ClusterState currentState, List tasks) throws Exception { + ClusterTasksResult.Builder builder = ClusterTasksResult.builder(); + ClusterState state = currentState; + + try { + // build an in-order de-duplicated array of all the indices to open + final Set indicesToOpen = new LinkedHashSet<>(tasks.size()); + for (OpenIndicesTask task : tasks) { + Collections.addAll(indicesToOpen, task.request.indices()); + } + Index[] indices = indicesToOpen.toArray(Index.EMPTY_ARRAY); + + // open them + state = openIndices(indices, state); + + // do a final reroute + state = allocationService.reroute(state, "indices opened"); + + for (OpenIndicesTask task : tasks) { + builder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); + } + } catch (Exception e) { + for (OpenIndicesTask task : tasks) { + builder.failure(task, e); + } + } + + return builder.build(state); + } + + private ClusterState openIndices(final Index[] indices, final ClusterState currentState) { + final List indicesToOpen = new ArrayList<>(indices.length); + for (Index index : indices) { + final IndexMetadata indexMetadata = currentState.metadata().getIndexSafe(index); + if (indexMetadata.getState() != IndexMetadata.State.OPEN) { + indicesToOpen.add(indexMetadata); + } else if (currentState.blocks().hasIndexBlockWithId(index.getName(), INDEX_CLOSED_BLOCK_ID)) { + indicesToOpen.add(indexMetadata); + } + } + + shardLimitValidator.validateShardLimit(currentState, indices); + if (indicesToOpen.isEmpty()) { + return currentState; + } + + logger.info(() -> { + final StringBuilder indexNames = new StringBuilder(); + Strings.collectionToDelimitedStringWithLimit( + indicesToOpen.stream().map(i -> (CharSequence) i.getIndex().toString()).toList(), + ",", + "", + "", + 512, + indexNames + ); + return new ParameterizedMessage("opening indices [{}]", indexNames); + }); + + final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); + final Version minIndexCompatibilityVersion = currentState.getNodes().getMaxNodeVersion().minimumIndexCompatibilityVersion(); + + for (IndexMetadata indexMetadata : indicesToOpen) { + final Index index = indexMetadata.getIndex(); + if (indexMetadata.getState() != IndexMetadata.State.OPEN) { + final Settings.Builder updatedSettings = Settings.builder().put(indexMetadata.getSettings()); + updatedSettings.remove(VERIFIED_BEFORE_CLOSE_SETTING.getKey()); + + IndexMetadata newIndexMetadata = IndexMetadata.builder(indexMetadata) + .state(IndexMetadata.State.OPEN) + .settingsVersion(indexMetadata.getSettingsVersion() + 1) + .settings(updatedSettings) + .timestampRange(IndexLongFieldRange.NO_SHARDS) + .build(); + + // The index might be closed because we couldn't import it due to an old incompatible + // version, so we need to verify its compatibility. + newIndexMetadata = indexMetadataVerifier.verifyIndexMetadata(newIndexMetadata, minIndexCompatibilityVersion); + try { + indicesService.verifyIndexMetadata(newIndexMetadata, newIndexMetadata); + } catch (Exception e) { + throw new ElasticsearchException("Failed to verify index " + index, e); + } + metadata.put(newIndexMetadata, true); + } + + // Always removes index closed blocks (note: this can fail on-going close index actions) + blocks.removeIndexBlockWithId(index.getName(), INDEX_CLOSED_BLOCK_ID); + } + + ClusterState updatedState = ClusterState.builder(currentState).metadata(metadata).blocks(blocks).build(); + + final RoutingTable.Builder routingTable = RoutingTable.builder(updatedState.routingTable()); + for (IndexMetadata previousIndexMetadata : indicesToOpen) { + if (previousIndexMetadata.getState() != IndexMetadata.State.OPEN) { + routingTable.addAsFromCloseToOpen(updatedState.metadata().getIndexSafe(previousIndexMetadata.getIndex())); + } + } + return ClusterState.builder(updatedState).routingTable(routingTable.build()).build(); + } + } + + private record OpenIndicesTask(OpenIndexClusterStateUpdateRequest request, ActionListener listener) + implements + ClusterStateTaskListener, + ClusterStateAckListener { + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + + @Override + public boolean mustAck(DiscoveryNode discoveryNode) { + return true; + } + + @Override + public void onAllNodesAcked(@Nullable Exception e) { + listener.onResponse(AcknowledgedResponse.of(e == null)); + } + + @Override + public void onAckTimeout() { + listener.onResponse(AcknowledgedResponse.FALSE); + } + + @Override + public TimeValue ackTimeout() { + return request.ackTimeout(); + } + } } diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index d4deb426e9b0c..a45b5e33af3c1 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -36,6 +36,7 @@ import org.elasticsearch.action.support.master.TransportMasterNodeActionUtils; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskExecutor.ClusterTasksResult; import org.elasticsearch.cluster.ClusterStateTaskExecutor.TaskResult; @@ -112,6 +113,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doCallRealMethod; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -474,11 +476,12 @@ private , Response extends ActionResp @SuppressWarnings("unchecked") private ClusterState executeClusterStateUpdateTask(ClusterState state, Runnable runnable) { ClusterState[] resultingState = new ClusterState[1]; + doCallRealMethod().when(clusterService).submitStateUpdateTask(anyString(), any(ClusterStateUpdateTask.class), any()); doAnswer(invocationOnMock -> { - ClusterStateUpdateTask task = (ClusterStateUpdateTask) invocationOnMock.getArguments()[1]; - ClusterStateTaskExecutor executor = (ClusterStateTaskExecutor) invocationOnMock - .getArguments()[2]; - ClusterTasksResult result = executor.execute(state, List.of(task)); + ClusterStateTaskListener task = (ClusterStateTaskListener) invocationOnMock.getArguments()[1]; + ClusterStateTaskExecutor executor = (ClusterStateTaskExecutor< + ClusterStateTaskListener>) invocationOnMock.getArguments()[3]; + ClusterTasksResult result = executor.execute(state, List.of(task)); for (TaskResult taskResult : result.executionResults().values()) { if (taskResult.isSuccess() == false) { throw taskResult.getFailure(); @@ -486,7 +489,8 @@ private ClusterState executeClusterStateUpdateTask(ClusterState state, Runnable } resultingState[0] = result.resultingState(); return null; - }).when(clusterService).submitStateUpdateTask(anyString(), any(ClusterStateUpdateTask.class), any()); + }).when(clusterService) + .submitStateUpdateTask(anyString(), any(ClusterStateTaskListener.class), any(ClusterStateTaskConfig.class), any()); runnable.run(); assertThat(resultingState[0], notNullValue()); return resultingState[0]; diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java index 2b702044ce92a..690625887d445 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/action/TransportFreezeIndexAction.java @@ -147,7 +147,7 @@ private void toggleFrozenSettings( .masterNodeTimeout(request.masterNodeTimeout()) .indices(concreteIndices) .waitForActiveShards(request.waitForActiveShards()); - indexStateService.openIndex( + indexStateService.openIndices( updateRequest, delegate.delegateFailure( (l, openIndexClusterStateUpdateResponse) -> l.onResponse( From 084f7ce845edb1e6f7939010d179f54159616ece Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 10 Feb 2022 11:48:04 -0800 Subject: [PATCH 045/167] Add version constant for 8.0.1 --- .ci/bwcVersions | 1 + server/src/main/java/org/elasticsearch/Version.java | 1 + 2 files changed, 2 insertions(+) diff --git a/.ci/bwcVersions b/.ci/bwcVersions index a8be643665224..3f3200264bad8 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -51,5 +51,6 @@ BWC_VERSION: - "7.17.0" - "7.17.1" - "8.0.0" + - "8.0.1" - "8.1.0" - "8.2.0" diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 078c10edba060..b8032448cb0e0 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -100,6 +100,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_7_17_0 = new Version(7_17_00_99, LUCENE_8_11_1); public static final Version V_7_17_1 = new Version(7_17_01_99, LUCENE_8_11_1); public static final Version V_8_0_0 = new Version(8_00_00_99, org.apache.lucene.util.Version.LUCENE_9_0_0); + public static final Version V_8_0_1 = new Version(8_00_01_99, org.apache.lucene.util.Version.LUCENE_9_0_0); public static final Version V_8_1_0 = new Version(8_01_00_99, org.apache.lucene.util.Version.LUCENE_9_0_0); public static final Version V_8_2_0 = new Version(8_02_00_99, org.apache.lucene.util.Version.LUCENE_9_0_0); public static final Version CURRENT = V_8_2_0; From 487b4ac3e95f0321718901281864ce3340634859 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 10 Feb 2022 11:57:26 -0800 Subject: [PATCH 046/167] Fix YAML REST compatibility tests --- .../rest/YamlRestCompatTestPluginFuncTest.groovy | 16 ++++++++-------- .../rest/compat/YamlRestCompatTestPlugin.java | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/YamlRestCompatTestPluginFuncTest.groovy b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/YamlRestCompatTestPluginFuncTest.groovy index ee5ff76aa9c41..0c02b6baefe25 100644 --- a/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/YamlRestCompatTestPluginFuncTest.groovy +++ b/build-tools-internal/src/integTest/groovy/org/elasticsearch/gradle/internal/test/rest/YamlRestCompatTestPluginFuncTest.groovy @@ -30,7 +30,7 @@ class YamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTest { def "yamlRestTestVxCompatTest does nothing when there are no tests"() { given: - addSubProject(":distribution:bwc:bugfix") << """ + addSubProject(":distribution:bwc:maintenance") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -53,11 +53,11 @@ class YamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTest { result.task(transformTask).outcome == TaskOutcome.NO_SOURCE } - def "yamlRestTestVxCompatTest executes and copies api and transforms tests from :bwc:bugfix"() { + def "yamlRestTestVxCompatTest executes and copies api and transforms tests from :bwc:maintenance"() { given: internalBuild() - addSubProject(":distribution:bwc:bugfix") << """ + addSubProject(":distribution:bwc:maintenance") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -90,8 +90,8 @@ class YamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTest { String api = "foo.json" String test = "10_basic.yml" //add the compatible test and api files, these are the prior version's normal yaml rest tests - file("distribution/bwc/bugfix/checkoutDir/rest-api-spec/src/main/resources/rest-api-spec/api/" + api) << "" - file("distribution/bwc/bugfix/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/" + test) << "" + file("distribution/bwc/maintenance/checkoutDir/rest-api-spec/src/main/resources/rest-api-spec/api/" + api) << "" + file("distribution/bwc/maintenance/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/" + test) << "" when: def result = gradleRunner("yamlRestTestV${compatibleVersion}CompatTest").build() @@ -136,7 +136,7 @@ class YamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTest { def "yamlRestTestVxCompatTest is wired into check and checkRestCompat"() { given: - addSubProject(":distribution:bwc:bugfix") << """ + addSubProject(":distribution:bwc:maintenance") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -180,7 +180,7 @@ class YamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTest { given: internalBuild() - addSubProject(":distribution:bwc:bugfix") << """ + addSubProject(":distribution:bwc:maintenance") << """ configurations { checkout } artifacts { checkout(new File(projectDir, "checkoutDir")) @@ -224,7 +224,7 @@ class YamlRestCompatTestPluginFuncTest extends AbstractRestResourcesFuncTest { setupRestResources([], []) - file("distribution/bwc/bugfix/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/test.yml" ) << """ + file("distribution/bwc/maintenance/checkoutDir/src/yamlRestTest/resources/rest-api-spec/test/test.yml" ) << """ "one": - do: do_.some.key_to_replace: diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java index e12ec5836c1d7..48d61b5b2d928 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java @@ -81,7 +81,7 @@ public void apply(Project project) { // copy compatible rest specs Configuration bwcMinorConfig = project.getConfigurations().create(BWC_MINOR_CONFIG_NAME); - Dependency bwcMinor = project.getDependencies().project(Map.of("path", ":distribution:bwc:bugfix", "configuration", "checkout")); + Dependency bwcMinor = project.getDependencies().project(Map.of("path", ":distribution:bwc:maintenance", "configuration", "checkout")); project.getDependencies().add(bwcMinorConfig.getName(), bwcMinor); Provider copyCompatYamlSpecTask = project.getTasks() From d31bdd6bf49706f612e7caf79095cfa3ca7a8023 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Thu, 10 Feb 2022 15:04:46 -0500 Subject: [PATCH 047/167] [DOCS] Remove unneeded callouts from snippets (#83798) These callouts aren't referenced anywhere. Leaving them in can be confusing. --- .../aggregations/metrics/max-aggregation.asciidoc | 8 ++++---- .../aggregations/metrics/min-aggregation.asciidoc | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/docs/reference/aggregations/metrics/max-aggregation.asciidoc b/docs/reference/aggregations/metrics/max-aggregation.asciidoc index 2435f800bb6da..13855a2285249 100644 --- a/docs/reference/aggregations/metrics/max-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/max-aggregation.asciidoc @@ -137,8 +137,8 @@ PUT metrics_index/_doc/1?refresh { "network.name" : "net-1", "latency_histo" : { - "values" : [0.1, 0.2, 0.3, 0.4, 0.5], <1> - "counts" : [3, 7, 23, 12, 6] <2> + "values" : [0.1, 0.2, 0.3, 0.4, 0.5], + "counts" : [3, 7, 23, 12, 6] } } @@ -146,8 +146,8 @@ PUT metrics_index/_doc/2?refresh { "network.name" : "net-2", "latency_histo" : { - "values" : [0.1, 0.2, 0.3, 0.4, 0.5], <1> - "counts" : [8, 17, 8, 7, 6] <2> + "values" : [0.1, 0.2, 0.3, 0.4, 0.5], + "counts" : [8, 17, 8, 7, 6] } } diff --git a/docs/reference/aggregations/metrics/min-aggregation.asciidoc b/docs/reference/aggregations/metrics/min-aggregation.asciidoc index d4c3135cc576e..d7bf8b478f563 100644 --- a/docs/reference/aggregations/metrics/min-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/min-aggregation.asciidoc @@ -137,8 +137,8 @@ PUT metrics_index/_doc/1?refresh { "network.name" : "net-1", "latency_histo" : { - "values" : [0.1, 0.2, 0.3, 0.4, 0.5], <1> - "counts" : [3, 7, 23, 12, 6] <2> + "values" : [0.1, 0.2, 0.3, 0.4, 0.5], + "counts" : [3, 7, 23, 12, 6] } } @@ -146,8 +146,8 @@ PUT metrics_index/_doc/2?refresh { "network.name" : "net-2", "latency_histo" : { - "values" : [0.1, 0.2, 0.3, 0.4, 0.5], <1> - "counts" : [8, 17, 8, 7, 6] <2> + "values" : [0.1, 0.2, 0.3, 0.4, 0.5], + "counts" : [8, 17, 8, 7, 6] } } From 39b4db96e84eb20995a43ed4f3b6281cc51049f8 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Thu, 10 Feb 2022 12:05:31 -0800 Subject: [PATCH 048/167] Fix formatting --- .../gradle/internal/rest/compat/YamlRestCompatTestPlugin.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java index 48d61b5b2d928..f20abf6018777 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/YamlRestCompatTestPlugin.java @@ -81,7 +81,8 @@ public void apply(Project project) { // copy compatible rest specs Configuration bwcMinorConfig = project.getConfigurations().create(BWC_MINOR_CONFIG_NAME); - Dependency bwcMinor = project.getDependencies().project(Map.of("path", ":distribution:bwc:maintenance", "configuration", "checkout")); + Dependency bwcMinor = project.getDependencies() + .project(Map.of("path", ":distribution:bwc:maintenance", "configuration", "checkout")); project.getDependencies().add(bwcMinorConfig.getName(), bwcMinor); Provider copyCompatYamlSpecTask = project.getTasks() From c47548ad2b62311f5cbb943853d3781ff30b5610 Mon Sep 17 00:00:00 2001 From: Simon Date: Fri, 11 Feb 2022 04:23:35 +0800 Subject: [PATCH 049/167] GET _index_template and GET _component_template request support query parameter flat_settings (#83297) `GET _index_template` and `GET _component_template` request with parameter `flat_settings=true` has no effect. For example, run these requests `settings` are not returned in a flat format. - request ```bash GET _index_template/ilm-history?pretty&flat_settings=true ``` - response ```json { "index_templates" : [ { "name" : "ilm-history", "index_template" : { "index_patterns" : [ "ilm-history-5*" ], "template" : { "settings" : { "index" : { "lifecycle" : { "name" : "ilm-history-ilm-policy" }, "number_of_shards" : "1", "auto_expand_replicas" : "0-1", "number_of_replicas" : "0" } }, "mappings" : { "dynamic" : false, "properties" : { "index_age" : { "type" : "long" }, "@timestamp" : { "format" : "epoch_millis", "type" : "date" }, "error_details" : { "type" : "text" }, "success" : { "type" : "boolean" }, "index" : { "type" : "keyword" }, "state" : { "dynamic" : true, "type" : "object", "properties" : { "phase" : { "type" : "keyword" }, "failed_step" : { "type" : "keyword" }, "phase_definition" : { "type" : "text" }, "action_time" : { "format" : "epoch_millis", "type" : "date" }, "phase_time" : { "format" : "epoch_millis", "type" : "date" }, "step_info" : { "type" : "text" }, "action" : { "type" : "keyword" }, "step" : { "type" : "keyword" }, "is_auto-retryable_error" : { "type" : "keyword" }, "creation_date" : { "format" : "epoch_millis", "type" : "date" }, "step_time" : { "format" : "epoch_millis", "type" : "date" } } }, "policy" : { "type" : "keyword" } } } }, "composed_of" : [ ], "priority" : 2147483647, "version" : 5, "_meta" : { "managed" : true, "description" : "index template for ILM history indices" }, "data_stream" : { "hidden" : true }, "allow_auto_create" : true } } ] } ``` - request: ``` GET _component_template/logs-settings?pretty&flat_settings=true ``` - response ```json { "component_templates" : [ { "name" : "logs-settings", "component_template" : { "template" : { "settings" : { "index" : { "lifecycle" : { "name" : "logs" }, "codec" : "best_compression", "query" : { "default_field" : [ "message" ] } } } }, "version" : 1, "_meta" : { "managed" : true, "description" : "default settings for the logs index template installed by x-pack" } } } ] } ``` --- docs/changelog/83297.yaml | 5 +++++ .../indices/template/get/GetComponentTemplateAction.java | 2 +- .../template/get/GetComposableIndexTemplateAction.java | 2 +- 3 files changed, 7 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/83297.yaml diff --git a/docs/changelog/83297.yaml b/docs/changelog/83297.yaml new file mode 100644 index 0000000000000..222163679595a --- /dev/null +++ b/docs/changelog/83297.yaml @@ -0,0 +1,5 @@ +pr: 83297 +summary: GET _index_template and GET _component_template request support query parameter flat_settings +area: ILM+SLM +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java index 907c1c4b1c9a3..64b1d9aa01ecb 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComponentTemplateAction.java @@ -127,7 +127,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws for (Map.Entry componentTemplate : this.componentTemplates.entrySet()) { builder.startObject(); builder.field(NAME.getPreferredName(), componentTemplate.getKey()); - builder.field(COMPONENT_TEMPLATE.getPreferredName(), componentTemplate.getValue()); + builder.field(COMPONENT_TEMPLATE.getPreferredName(), componentTemplate.getValue(), params); builder.endObject(); } builder.endArray(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java index 2376c77084311..e6574541de6a5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/get/GetComposableIndexTemplateAction.java @@ -137,7 +137,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws for (Map.Entry indexTemplate : this.indexTemplates.entrySet()) { builder.startObject(); builder.field(NAME.getPreferredName(), indexTemplate.getKey()); - builder.field(INDEX_TEMPLATE.getPreferredName(), indexTemplate.getValue()); + builder.field(INDEX_TEMPLATE.getPreferredName(), indexTemplate.getValue(), params); builder.endObject(); } builder.endArray(); From b3c942020a2cee909b9e66627ed720942644c897 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Thu, 10 Feb 2022 22:35:29 +0200 Subject: [PATCH 050/167] Add docs for elasticsearch-reconfigure-node (#83605) Co-authored-by: Adam Locke --- docs/reference/commands/index.asciidoc | 2 + .../commands/reconfigure-node.asciidoc | 64 +++++++++++++++++++ 2 files changed, 66 insertions(+) create mode 100644 docs/reference/commands/reconfigure-node.asciidoc diff --git a/docs/reference/commands/index.asciidoc b/docs/reference/commands/index.asciidoc index 3aa564d1a4017..e55cd89bd6af4 100644 --- a/docs/reference/commands/index.asciidoc +++ b/docs/reference/commands/index.asciidoc @@ -13,6 +13,7 @@ tasks from the command line: * <> * <> * <> +* <> * <> * <> * <> @@ -28,6 +29,7 @@ include::create-enrollment-token.asciidoc[] include::croneval.asciidoc[] include::keystore.asciidoc[] include::node-tool.asciidoc[] +include::reconfigure-node.asciidoc[] include::reset-password.asciidoc[] include::saml-metadata.asciidoc[] include::service-tokens-command.asciidoc[] diff --git a/docs/reference/commands/reconfigure-node.asciidoc b/docs/reference/commands/reconfigure-node.asciidoc new file mode 100644 index 0000000000000..f06aee9f94db8 --- /dev/null +++ b/docs/reference/commands/reconfigure-node.asciidoc @@ -0,0 +1,64 @@ +[[reconfigure-node]] +== elasticsearch-reconfigure-node + +The `elasticsearch-reconfigure-node` tool reconfigures an {es} node that was installed +through an RPM or DEB package to join an existing cluster with security features enabled. + +[discrete] +=== Synopsis + +[source,shell] +----------------------------------------------------- +bin/elasticsearch-reconfigure-node +[--enrollment-token] [-h, --help] [-E ] +[-s, --silent] [-v, --verbose] +----------------------------------------------------- + + +[discrete] +=== Description + +When installing {es} with a DEB or RPM package, the current node is assumed to +be the first node in the cluster. {es} enables and configures security +features on the node, generates a password for the `elastic` superuser, and +configures TLS for the HTTP and transport layers. + +Rather than form a single-node cluster, you can add a node to an existing +cluster where security features are already enabled and configured. Before +starting your new node, run the +<> tool +with the `-s node` option to generate an enrollment token on any node in your +existing cluster. On your new node, run the the +`elasticsearch-reconfigure-node` tool and pass the enrollment token as a +parameter. + +NOTE: This tool is intended only for use on DEB or RPM distributions of {es}. + +You must run this tool with `sudo` so that it can edit the necessary +files in your {es} installation configuration directory that are owned by +`root:elasticsearch`. + + +[discrete] +[[reconfigure-node-parameters]] +=== Parameters + +`--enrollment-token`:: The enrollment token, which can be generated on any of the +nodes in an existing, secured cluster. + +`-E `:: Configures a standard {es} or {xpack} setting. + +`-h, --help`:: Shows help information. + +`-s, --silent`:: Shows minimal output. + +`-v, --verbose`:: Shows verbose output. + +[discrete] +=== Examples + +The following example reconfigures an installed {es} node so that it can join an existing cluster when it starts for the first time. +[source,shell] +---- +sudo /usr/share/elasticsearch/elasticsearch-reconfigure-node --enrollment-token eyJ2ZXIiOiI4LjAuMCIsImFkciI6WyIxOTIuMTY4LjEuMTY6OTIwMCJdLCJmZ3IiOiI4NGVhYzkyMzAyMWQ1MjcyMmQxNTFhMTQwZmM2ODI5NmE5OWNiNmU0OGVhZjYwYWMxYzljM2I3ZDJjOTg2YTk3Iiwia2V5IjoiUy0yUjFINEJrNlFTMkNEY1dVV1g6QS0wSmJxM3hTRy1haWxoQTdPWVduZyJ9 +---- From 104e3540d1b490a9e960e4929d2689968ce224b2 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Thu, 10 Feb 2022 16:57:34 -0500 Subject: [PATCH 051/167] Remove LegacyCTRAListener from open-indices batching (#83807) --- docs/changelog/83760.yaml | 2 +- .../metadata/MetadataIndexStateService.java | 17 ++++++++++++++++- 2 files changed, 17 insertions(+), 2 deletions(-) diff --git a/docs/changelog/83760.yaml b/docs/changelog/83760.yaml index 05e7775d4401d..f4249f9c04771 100644 --- a/docs/changelog/83760.yaml +++ b/docs/changelog/83760.yaml @@ -1,5 +1,5 @@ pr: 83760 -summary: Batch open-indices +summary: Batch open-indices cluster state updates area: Indices APIs type: enhancement issues: [] diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java index 408fadd3bf04b..ddd449f9d265d 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java @@ -1078,7 +1078,17 @@ public ClusterTasksResult execute(ClusterState currentState, Li state = allocationService.reroute(state, "indices opened"); for (OpenIndicesTask task : tasks) { - builder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); + builder.success(task, new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + // listener is notified at the end of acking + } + + @Override + public void onFailure(Exception e) { + task.onFailure(e); + } + }); } } catch (Exception e) { for (OpenIndicesTask task : tasks) { @@ -1191,5 +1201,10 @@ public void onAckTimeout() { public TimeValue ackTimeout() { return request.ackTimeout(); } + + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + assert false : "not called"; + } } } From 77ace234ef4da42245f34051ac4f88aa87032160 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Fri, 11 Feb 2022 17:23:48 +1100 Subject: [PATCH 052/167] User Profile - Update APIs to work with domain (#83570) This PR updates the three existing profile APIs to work with the new security domain concept: * Domain info gets recoreded when activating profile * Domain info is returned appropriately based on the context * "Same" profile is returned based on checking realm name and domain definition --- docs/changelog/83570.yaml | 5 + .../core/security/action/profile/Profile.java | 10 +- .../core/security/authc/Authentication.java | 35 ++- .../core/security/authc/RealmConfig.java | 36 ++- .../core/security/authc/RealmDomain.java | 39 ++- .../xpack/security/profile/ProfileIT.java | 2 + .../AbstractProfileSingleNodeTestCase.java | 120 ++++++++++ .../profile/ProfileDomainSingleNodeTests.java | 225 ++++++++++++++++++ .../profile/ProfileSingleNodeTests.java | 169 +++---------- .../security/profile/ProfileDocument.java | 43 +--- .../security/profile/ProfileService.java | 84 ++++--- .../security/profile/ProfileServiceTests.java | 4 +- .../test/user_profile/10_basic.yml | 2 + 13 files changed, 561 insertions(+), 213 deletions(-) create mode 100644 docs/changelog/83570.yaml create mode 100644 x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/AbstractProfileSingleNodeTestCase.java create mode 100644 x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileDomainSingleNodeTests.java diff --git a/docs/changelog/83570.yaml b/docs/changelog/83570.yaml new file mode 100644 index 0000000000000..80cfa41adf495 --- /dev/null +++ b/docs/changelog/83570.yaml @@ -0,0 +1,5 @@ +pr: 83570 +summary: User Profile - Update APIs to work with domain +area: Security +type: enhancement +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/Profile.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/Profile.java index 3e199e05e2d3e..24e2f61f2908e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/Profile.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/Profile.java @@ -35,7 +35,7 @@ public record ProfileUser( String username, List roles, String realmName, - @Nullable String realmDomain, + @Nullable String domainName, String email, String fullName, String displayName, @@ -56,7 +56,7 @@ public ProfileUser(StreamInput in) throws IOException { } public QualifiedName qualifiedName() { - return new QualifiedName(username, realmDomain); + return new QualifiedName(username, domainName); } @Override @@ -65,8 +65,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("username", username); builder.field("roles", roles); builder.field("realm_name", realmName); - if (realmDomain != null) { - builder.field("realm_domain", realmDomain); + if (domainName != null) { + builder.field("realm_domain", domainName); } if (email != null) { builder.field("email", email); @@ -87,7 +87,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(username); out.writeStringCollection(roles); out.writeString(realmName); - out.writeOptionalString(realmDomain); + out.writeOptionalString(domainName); out.writeOptionalString(email); out.writeOptionalString(fullName); out.writeOptionalString(displayName); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java index edc01bfe27211..c21233436bd4a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java @@ -18,6 +18,8 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; @@ -39,6 +41,8 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef.newAnonymousRealmRef; import static org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef.newApiKeyRealmRef; import static org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef.newInternalAttachRealmRef; @@ -50,6 +54,7 @@ import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.ATTACH_REALM_TYPE; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.FALLBACK_REALM_NAME; import static org.elasticsearch.xpack.core.security.authc.AuthenticationField.FALLBACK_REALM_TYPE; +import static org.elasticsearch.xpack.core.security.authc.RealmDomain.REALM_DOMAIN_PARSER; // TODO(hub-cap) Clean this up after moving User over - This class can re-inherit its field AUTHENTICATION_KEY in AuthenticationField. // That interface can be removed @@ -442,7 +447,7 @@ public String toString() { return builder.toString(); } - public static class RealmRef implements Writeable { + public static class RealmRef implements Writeable, ToXContentObject { private final String nodeName; private final String name; @@ -481,6 +486,21 @@ public void writeTo(StreamOutput out) throws IOException { } } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field("name", name); + builder.field("type", type); + builder.field("node_name", nodeName); + if (domain != null) { + builder.field("domain", domain); + } + } + builder.endObject(); + return builder; + } + public String getNodeName() { return nodeName; } @@ -560,6 +580,19 @@ static RealmRef newApiKeyRealmRef(String nodeName) { } } + public static ConstructingObjectParser REALM_REF_PARSER = new ConstructingObjectParser<>( + "realm_ref", + false, + (args, v) -> new RealmRef((String) args[0], (String) args[1], (String) args[2], (RealmDomain) args[3]) + ); + + static { + REALM_REF_PARSER.declareString(constructorArg(), new ParseField("name")); + REALM_REF_PARSER.declareString(constructorArg(), new ParseField("type")); + REALM_REF_PARSER.declareString(constructorArg(), new ParseField("node_name")); + REALM_REF_PARSER.declareObject(optionalConstructorArg(), (p, c) -> REALM_DOMAIN_PARSER.parse(p, c), new ParseField("domain")); + } + // TODO is a newer version than the node's a valid value? public static Authentication newInternalAuthentication(User internalUser, Version version, String nodeName) { // TODO create a system user class, so that the type system guarantees that this is only invoked for internal users diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java index 017c87ae983eb..e827fea69b527 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmConfig.java @@ -13,12 +13,18 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Objects; import java.util.function.Function; import java.util.function.Supplier; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; + public class RealmConfig { final RealmIdentifier identifier; @@ -183,7 +189,7 @@ public boolean hasSetting(Setting.AffixSetting setting) { * (e.g. {@code xpack.security.authc.realms.native.native_realm.order}), it is often necessary to be able to * pass this pair of variables as a single type (e.g. in method parameters, or return values). */ - public static class RealmIdentifier implements Writeable { + public static class RealmIdentifier implements Writeable, ToXContentObject, Comparable { private final String type; private final String name; @@ -235,5 +241,33 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(type); out.writeString(name); } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field("name", name); + builder.field("type", type); + } + builder.endObject(); + return builder; + } + + @Override + public int compareTo(RealmIdentifier other) { + int result = name.compareTo(other.name); + return (result == 0) ? type.compareTo(other.type) : result; + } + } + + public static ConstructingObjectParser REALM_IDENTIFIER_PARSER = new ConstructingObjectParser<>( + "realm_identifier", + false, + (args, v) -> new RealmIdentifier((String) args[0], (String) args[1]) + ); + + static { + REALM_IDENTIFIER_PARSER.declareString(constructorArg(), new ParseField("name")); + REALM_IDENTIFIER_PARSER.declareString(constructorArg(), new ParseField("type")); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java index 14d044de9c343..8863953dc844d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/RealmDomain.java @@ -10,11 +10,19 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.List; import java.util.Set; -public record RealmDomain(String name, Set realms) implements Writeable { +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xpack.core.security.authc.RealmConfig.REALM_IDENTIFIER_PARSER; + +public record RealmDomain(String name, Set realms) implements Writeable, ToXContentObject { @Override public void writeTo(StreamOutput out) throws IOException { @@ -27,4 +35,33 @@ static RealmDomain readFrom(StreamInput in) throws IOException { Set realms = in.readSet(RealmConfig.RealmIdentifier::new); return new RealmDomain(domainName, realms); } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field("name", name); + // Sort to have stable order in display + builder.xContentList("realms", realms.stream().sorted().toList()); + } + builder.endObject(); + return builder; + } + + @Override + public String toString() { + return "RealmDomain{" + "name='" + name + '\'' + ", realms=" + realms + '}'; + } + + @SuppressWarnings("unchecked") + public static ConstructingObjectParser REALM_DOMAIN_PARSER = new ConstructingObjectParser<>( + "realm_domain", + false, + (args, v) -> new RealmDomain((String) args[0], Set.copyOf((List) args[1])) + ); + + static { + REALM_DOMAIN_PARSER.declareString(constructorArg(), new ParseField("name")); + REALM_DOMAIN_PARSER.declareObjectArray(constructorArg(), (p, c) -> REALM_IDENTIFIER_PARSER.parse(p, c), new ParseField("realms")); + } } diff --git a/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java b/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java index 9cb692ceecafc..f6effae62e65c 100644 --- a/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java +++ b/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java @@ -99,6 +99,8 @@ public void testGetProfile() throws IOException { assertOK(adminClient().performRequest(indexRequest)); final Map profileMap1 = doGetProfile(uid); + assertThat(castToMap(profileMap1.get("user")).get("realm_name"), equalTo("realm_name_1")); + assertThat(castToMap(profileMap1.get("user")).get("realm_domain"), equalTo("domainA")); assertThat(castToMap(profileMap1.get("data")), anEmptyMap()); // Retrieve application data along the profile diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/AbstractProfileSingleNodeTestCase.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/AbstractProfileSingleNodeTestCase.java new file mode 100644 index 0000000000000..bb5a873cf6a0e --- /dev/null +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/AbstractProfileSingleNodeTestCase.java @@ -0,0 +1,120 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.profile; + +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.test.SecuritySingleNodeTestCase; +import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileRequest; +import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileResponse; +import org.elasticsearch.xpack.core.security.action.profile.GetProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.GetProfileRequest; +import org.elasticsearch.xpack.core.security.action.profile.GetProfilesResponse; +import org.elasticsearch.xpack.core.security.action.profile.Profile; +import org.elasticsearch.xpack.core.security.action.token.CreateTokenAction; +import org.elasticsearch.xpack.core.security.action.token.CreateTokenRequest; +import org.elasticsearch.xpack.core.security.action.token.CreateTokenResponse; +import org.elasticsearch.xpack.core.security.action.user.PutUserAction; +import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; +import org.junit.Before; +import org.junit.BeforeClass; + +import java.security.AccessController; +import java.security.PrivilegedAction; +import java.util.Set; + +import static org.elasticsearch.test.SecuritySettingsSource.TEST_PASSWORD_HASHED; +import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +public abstract class AbstractProfileSingleNodeTestCase extends SecuritySingleNodeTestCase { + + protected static final String RAC_USER_NAME = "rac_user"; + protected static final String RAC_ROLE = "rac_role"; + protected static final SecureString NATIVE_RAC_USER_PASSWORD = new SecureString("native_rac_user_password".toCharArray()); + + // Needed for testing in IDE + @SuppressForbidden(reason = "sets the feature flag") + @BeforeClass + public static void enableFeature() { + AccessController.doPrivileged((PrivilegedAction) () -> System.setProperty("es.user_profile_feature_flag_enabled", "true")); + } + + @Override + protected Settings nodeSettings() { + final Settings.Builder builder = Settings.builder().put(super.nodeSettings()); + builder.put("xpack.security.authc.token.enabled", "true"); + return builder.build(); + } + + @Before + public void createNativeUsers() { + final PutUserRequest putUserRequest1 = new PutUserRequest(); + putUserRequest1.username(RAC_USER_NAME); + putUserRequest1.roles(RAC_ROLE); + final String nativeRacUserPasswordHash = new String(getFastStoredHashAlgoForTests().hash(NATIVE_RAC_USER_PASSWORD)); + putUserRequest1.passwordHash(nativeRacUserPasswordHash.toCharArray()); + putUserRequest1.email(RAC_USER_NAME + "@example.com"); + assertThat(client().execute(PutUserAction.INSTANCE, putUserRequest1).actionGet().created(), is(true)); + } + + @Override + protected String configUsers() { + return super.configUsers() + RAC_USER_NAME + ":" + TEST_PASSWORD_HASHED + "\n"; + } + + @Override + protected String configRoles() { + return super.configRoles() + "\n" + RAC_ROLE + ":\n" + " cluster:\n" + " - 'manage_own_api_key'\n" + " - 'monitor'\n"; + } + + @Override + protected String configUsersRoles() { + return super.configUsersRoles() + RAC_ROLE + ":" + RAC_USER_NAME + "\n"; + } + + protected Profile doActivateProfile(String username, SecureString password) { + // User and its access token should be associated to the same profile + return doActivateProfile(username, password, randomBoolean()); + } + + protected Profile doActivateProfile(String username, SecureString password, boolean useToken) { + final ActivateProfileRequest activateProfileRequest = new ActivateProfileRequest(); + if (useToken) { + final CreateTokenRequest createTokenRequest = new CreateTokenRequest("password", username, password.clone(), null, null, null); + final CreateTokenResponse createTokenResponse = client().execute(CreateTokenAction.INSTANCE, createTokenRequest).actionGet(); + activateProfileRequest.getGrant().setType("access_token"); + activateProfileRequest.getGrant().setAccessToken(new SecureString(createTokenResponse.getTokenString().toCharArray())); + } else { + activateProfileRequest.getGrant().setType("password"); + activateProfileRequest.getGrant().setUsername(username); + // clone the secureString because activate action closes it afterwards + activateProfileRequest.getGrant().setPassword(password.clone()); + } + + final ActivateProfileResponse activateProfileResponse = client().execute(ActivateProfileAction.INSTANCE, activateProfileRequest) + .actionGet(); + final Profile profile = activateProfileResponse.getProfile(); + assertThat(profile, notNullValue()); + assertThat(profile.user().username(), equalTo(username)); + assertThat(profile.applicationData(), anEmptyMap()); + return profile; + } + + protected Profile getProfile(String uid, Set dataKeys) { + final GetProfilesResponse getProfilesResponse = client().execute(GetProfileAction.INSTANCE, new GetProfileRequest(uid, dataKeys)) + .actionGet(); + assertThat(getProfilesResponse.getProfiles(), arrayWithSize(1)); + return getProfilesResponse.getProfiles()[0]; + } +} diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileDomainSingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileDomainSingleNodeTests.java new file mode 100644 index 0000000000000..2f939487f662e --- /dev/null +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileDomainSingleNodeTests.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.profile; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.action.profile.Profile; +import org.elasticsearch.xpack.core.security.action.user.PutUserAction; +import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.AuthenticationContext; +import org.elasticsearch.xpack.core.security.authc.RealmConfig; +import org.elasticsearch.xpack.core.security.authc.RealmDomain; +import org.elasticsearch.xpack.core.security.authc.Subject; +import org.elasticsearch.xpack.core.security.user.User; + +import java.time.Instant; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; +import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.INTERNAL_SECURITY_PROFILE_INDEX_8; +import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ALIAS; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class ProfileDomainSingleNodeTests extends AbstractProfileSingleNodeTestCase { + + @Override + protected Settings nodeSettings() { + final Settings.Builder builder = Settings.builder().put(super.nodeSettings()); + // Register both file and native realms under the same domain + builder.put("xpack.security.authc.domains.my_domain.realms", "file,index"); + return builder.build(); + } + + public void testActivateProfileUnderDomain() { + // Activate 1st time with the file realm user + final Profile profile1 = doActivateProfile(RAC_USER_NAME, TEST_PASSWORD_SECURE_STRING); + assertThat(profile1.user().username(), equalTo(RAC_USER_NAME)); + assertThat(profile1.user().realmName(), equalTo("file")); + assertThat(profile1.user().domainName(), equalTo("my_domain")); + assertThat(profile1.user().email(), nullValue()); + assertThat(profile1.user().fullName(), nullValue()); + + // Get the profile back by ID + assertThat(getProfile(profile1.uid(), Set.of()), equalTo(profile1)); + + // Activate 2nd time with the native realm user and it should get the same profile + // because they are under the same domain. User fields are updated to the native user's info + final Profile profile2 = doActivateProfile(RAC_USER_NAME, NATIVE_RAC_USER_PASSWORD); + assertThat(profile2.uid(), equalTo(profile1.uid())); + assertThat(profile2.user().username(), equalTo(RAC_USER_NAME)); + assertThat(profile2.user().realmName(), equalTo("index")); + assertThat(profile2.user().domainName(), equalTo("my_domain")); + assertThat(profile2.user().email(), equalTo(RAC_USER_NAME + "@example.com")); + assertThat(profile2.user().fullName(), nullValue()); + assertThat(profile2.user().roles(), containsInAnyOrder(RAC_ROLE)); + + // Activate 3rd time with the file realm user again and it should get the same profile + // User fields are updated to the file user's info again + final Profile profile3 = doActivateProfile(RAC_USER_NAME, TEST_PASSWORD_SECURE_STRING); + assertThat(profile3.uid(), equalTo(profile1.uid())); + assertThat(profile3.user().realmName(), equalTo("file")); + assertThat(profile3.user().domainName(), equalTo("my_domain")); + assertThat(profile3.user().email(), nullValue()); + assertThat(profile3.user().fullName(), nullValue()); + assertThat(profile3.user().roles(), containsInAnyOrder(RAC_ROLE)); + + // Update native rac user + final PutUserRequest putUserRequest1 = new PutUserRequest(); + putUserRequest1.username(RAC_USER_NAME); + putUserRequest1.roles(RAC_ROLE, "superuser"); + putUserRequest1.email(null); + putUserRequest1.fullName("Native RAC User"); + assertThat(client().execute(PutUserAction.INSTANCE, putUserRequest1).actionGet().created(), is(false)); + + // Activate again with the native RAC user to the same profile + final Profile profile4 = doActivateProfile(RAC_USER_NAME, NATIVE_RAC_USER_PASSWORD); + assertThat(profile4.uid(), equalTo(profile1.uid())); + assertThat(profile4.user().username(), equalTo(RAC_USER_NAME)); + assertThat(profile4.user().realmName(), equalTo("index")); + assertThat(profile4.user().domainName(), equalTo("my_domain")); + assertThat(profile4.user().email(), nullValue()); + assertThat(profile4.user().fullName(), equalTo("Native RAC User")); + assertThat(profile4.user().roles(), containsInAnyOrder(RAC_ROLE, "superuser")); + + // Get by ID immediately should get the same document and content as the response to activate + assertThat(getProfile(profile1.uid(), Set.of()), equalTo(profile4)); + } + + public void testGetProfileByAuthenticationUnderDomain() { + final ProfileService profileService = node().injector().getInstance(ProfileService.class); + + final String nodeName = randomAlphaOfLengthBetween(3, 8); + final RealmConfig.RealmIdentifier realmIdentifier1 = new RealmConfig.RealmIdentifier("realm_type_1", "realm_name_1"); + final RealmConfig.RealmIdentifier realmIdentifier2 = new RealmConfig.RealmIdentifier("realm_type_2", "realm_name_2"); + + // Domain name does not matter + final String domainName = randomFrom("domainA", randomAlphaOfLengthBetween(5, 12)); + // The recorded realm is realm_name_1, domain realms must contain the recorded realm + final Set domainRealms = randomBoolean() + ? Set.of(realmIdentifier1, realmIdentifier2) + : Set.of(realmIdentifier1); + final RealmDomain realmDomain = new RealmDomain(domainName, domainRealms); + + final RealmConfig.RealmIdentifier authenticationRealmIdentifier = randomFrom(domainRealms); + + final Authentication authentication = new Authentication( + new User("foo"), + new Authentication.RealmRef( + authenticationRealmIdentifier.getName(), + authenticationRealmIdentifier.getType(), + nodeName, + realmDomain + ), + null + ); + final Subject subject = AuthenticationContext.fromAuthentication(authentication).getEffectiveSubject(); + + // Profile does not exist yet + final PlainActionFuture future1 = new PlainActionFuture<>(); + profileService.getVersionedDocument(subject, future1); + assertThat(future1.actionGet(), nullValue()); + + // Index the document so it can be found + // The document is created with realm_name_1 under domainA (member realms are realm_name_1 and realm_name_2) + final String uid2 = indexDocument(); + final PlainActionFuture future2 = new PlainActionFuture<>(); + profileService.getVersionedDocument(subject, future2); + final ProfileService.VersionedDocument versionedDocument = future2.actionGet(); + assertThat(versionedDocument, notNullValue()); + assertThat(versionedDocument.doc().uid(), equalTo(uid2)); + + // Index it again to trigger duplicate exception + final String uid3 = indexDocument(); + final PlainActionFuture future3 = new PlainActionFuture<>(); + profileService.getVersionedDocument(subject, future3); + final ElasticsearchException e3 = expectThrows(ElasticsearchException.class, future3::actionGet); + + assertThat( + e3.getMessage(), + containsString( + "multiple [2] profiles [" + Stream.of(uid2, uid3).sorted().collect(Collectors.joining(",")) + "] found for user [foo]" + ) + ); + } + + public void testGetProfileByAuthenticationDomainless() { + final ProfileService profileService = node().injector().getInstance(ProfileService.class); + // The document is created with realm_name_1 under domainA (member realms are realm_name_1 and realm_name_2) + final String uid1 = indexDocument(); + final String nodeName = randomAlphaOfLengthBetween(3, 8); + final RealmConfig.RealmIdentifier realmIdentifier1 = new RealmConfig.RealmIdentifier("realm_type_1", "realm_name_1"); + final RealmConfig.RealmIdentifier realmIdentifier2 = new RealmConfig.RealmIdentifier("realm_type_2", "realm_name_2"); + + // Scenario 1 + // The recorded realm_name_1 is no longer part of a domain. + // Authentication for this realm still works for retrieving the same profile document + final Authentication authentication1 = new Authentication( + new User("foo"), + new Authentication.RealmRef(realmIdentifier1.getName(), realmIdentifier1.getType(), nodeName), + null + ); + final Subject subject1 = AuthenticationContext.fromAuthentication(authentication1).getEffectiveSubject(); + + final PlainActionFuture future1 = new PlainActionFuture<>(); + profileService.getVersionedDocument(subject1, future1); + final ProfileService.VersionedDocument versionedDocument1 = future1.actionGet(); + assertThat(versionedDocument1, notNullValue()); + assertThat(versionedDocument1.doc().uid(), equalTo(uid1)); + + // Scenario 2 + // The recorded realm_name_1 is no longer part of a domain. + // Authentication for realm_name_2 (which is still part of domainA) does not work for retrieving the profile document + final RealmDomain realmDomain1 = new RealmDomain("domainA", Set.of(realmIdentifier2)); + final Authentication authentication2 = new Authentication( + new User("foo"), + new Authentication.RealmRef(realmIdentifier2.getName(), realmIdentifier2.getType(), nodeName, realmDomain1), + null + ); + final Subject subject2 = AuthenticationContext.fromAuthentication(authentication2).getEffectiveSubject(); + + final PlainActionFuture future2 = new PlainActionFuture<>(); + profileService.getVersionedDocument(subject2, future2); + assertThat(future2.actionGet(), nullValue()); + + // Scenario 3 + // Both recorded realm_name_1 and the authentication realm_name_2 are no longer part of a domain. + final Authentication authentication3 = new Authentication( + new User("foo"), + new Authentication.RealmRef(realmIdentifier2.getName(), realmIdentifier2.getType(), nodeName), + null + ); + final Subject subject3 = AuthenticationContext.fromAuthentication(authentication3).getEffectiveSubject(); + + final PlainActionFuture future3 = new PlainActionFuture<>(); + profileService.getVersionedDocument(subject3, future3); + assertThat(future3.actionGet(), nullValue()); + } + + private String indexDocument() { + final String uid = randomAlphaOfLength(20); + final String source = ProfileServiceTests.SAMPLE_PROFILE_DOCUMENT_TEMPLATE.formatted(uid, Instant.now().toEpochMilli()); + client().prepareIndex(randomFrom(INTERNAL_SECURITY_PROFILE_INDEX_8, SECURITY_PROFILE_ALIAS)) + .setId("profile_" + uid) + .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) + .setSource(source, XContentType.JSON) + .get(); + return uid; + } +} diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileSingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileSingleNodeTests.java index d0b441fb11501..be85aa1eef66d 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileSingleNodeTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileSingleNodeTests.java @@ -7,84 +7,38 @@ package org.elasticsearch.xpack.security.profile; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; -import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.index.engine.DocumentMissingException; -import org.elasticsearch.test.SecuritySingleNodeTestCase; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileAction; -import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileRequest; -import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileResponse; -import org.elasticsearch.xpack.core.security.action.profile.GetProfileAction; -import org.elasticsearch.xpack.core.security.action.profile.GetProfileRequest; -import org.elasticsearch.xpack.core.security.action.profile.GetProfilesResponse; import org.elasticsearch.xpack.core.security.action.profile.Profile; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataRequest; import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; -import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.user.User; -import org.junit.BeforeClass; -import java.security.AccessController; -import java.security.PrivilegedAction; -import java.time.Instant; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import static org.elasticsearch.test.SecuritySettingsSource.TEST_PASSWORD_HASHED; import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.INTERNAL_SECURITY_PROFILE_INDEX_8; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ALIAS; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.arrayContaining; -import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; -import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; -public class ProfileSingleNodeTests extends SecuritySingleNodeTestCase { - - private static final String RAC_USER_NAME = "rac_user"; - - // Needed for testing in IDE - @SuppressForbidden(reason = "sets the feature flag") - @BeforeClass - public static void enableFeature() { - AccessController.doPrivileged((PrivilegedAction) () -> System.setProperty("es.user_profile_feature_flag_enabled", "true")); - } - - @Override - protected String configUsers() { - return super.configUsers() + RAC_USER_NAME + ":" + TEST_PASSWORD_HASHED + "\n"; - } - - @Override - protected String configRoles() { - return super.configRoles() + "rac_role:\n" + " cluster:\n" + " - 'manage_own_api_key'\n" + " - 'monitor'\n"; - } - - @Override - protected String configUsersRoles() { - return super.configUsersRoles() + "rac_role:" + RAC_USER_NAME + "\n"; - } +public class ProfileSingleNodeTests extends AbstractProfileSingleNodeTestCase { @Override protected Settings nodeSettings() { @@ -131,70 +85,35 @@ public void testProfileIndexAutoCreation() { assertThat(userProfileProperties.keySet(), hasItems("uid", "enabled", "last_synchronized", "user", "access", "application_data")); } - public void testGetProfileByAuthentication() { - final ProfileService profileService = node().injector().getInstance(ProfileService.class); - final Authentication authentication = new Authentication( - new User("foo"), - new Authentication.RealmRef("realm_name_1", "realm_type_1", randomAlphaOfLengthBetween(3, 8)), - null - ); - - // Profile does not exist yet - final PlainActionFuture future1 = new PlainActionFuture<>(); - profileService.getVersionedDocument(authentication, future1); - assertThat(future1.actionGet(), nullValue()); - - // Index the document so it can be found - final String uid2 = indexDocument(); - final PlainActionFuture future2 = new PlainActionFuture<>(); - profileService.getVersionedDocument(authentication, future2); - final ProfileService.VersionedDocument versionedDocument = future2.actionGet(); - assertThat(versionedDocument, notNullValue()); - assertThat(versionedDocument.doc().uid(), equalTo(uid2)); - - // Index it again to trigger duplicate exception - final String uid3 = indexDocument(); - final PlainActionFuture future3 = new PlainActionFuture<>(); - profileService.getVersionedDocument(authentication, future3); - final ElasticsearchException e3 = expectThrows(ElasticsearchException.class, future3::actionGet); - - assertThat( - e3.getMessage(), - containsString( - "multiple [2] profiles [" + Stream.of(uid2, uid3).sorted().collect(Collectors.joining(",")) + "] found for user [foo]" - ) - ); - } - public void testActivateProfile() { final Profile profile1 = doActivateProfile(RAC_USER_NAME, TEST_PASSWORD_SECURE_STRING); assertThat(profile1.user().username(), equalTo(RAC_USER_NAME)); + assertThat(profile1.user().roles(), contains(RAC_ROLE)); + assertThat(profile1.user().realmName(), equalTo("file")); + assertThat(profile1.user().domainName(), equalTo("my_domain")); assertThat(profile1.user().email(), nullValue()); assertThat(profile1.user().fullName(), nullValue()); - + // Get by ID immediately should get the same document and content as the response to activate assertThat(getProfile(profile1.uid(), Set.of()), equalTo(profile1)); // activate again should be getting the same profile final Profile profile2 = doActivateProfile(RAC_USER_NAME, TEST_PASSWORD_SECURE_STRING); assertThat(profile2.uid(), equalTo(profile1.uid())); - - // Create another rac user in the native realm - final PutUserRequest putUserRequest1 = new PutUserRequest(); - putUserRequest1.username(RAC_USER_NAME); - putUserRequest1.roles("rac_role"); - final SecureString nativeRacUserPassword = new SecureString("native_rac_user_password".toCharArray()); - final String nativeRacUserPasswordHash = new String(getFastStoredHashAlgoForTests().hash(nativeRacUserPassword)); - putUserRequest1.passwordHash(nativeRacUserPasswordHash.toCharArray()); - putUserRequest1.email(RAC_USER_NAME + "@example.com"); - assertThat(client().execute(PutUserAction.INSTANCE, putUserRequest1).actionGet().created(), is(true)); - - // Since file and native realms are not in the same domain yet, the new profile should be a different one - final Profile profile3 = doActivateProfile(RAC_USER_NAME, nativeRacUserPassword); - assertThat(profile3.uid(), not(equalTo(profile1.uid()))); + // Get by ID immediately should get the same document and content as the response to activate + assertThat(getProfile(profile2.uid(), Set.of()), equalTo(profile2)); + + // Since file and native realms are not in the same domain, the new profile must be a different one + final Profile profile3 = doActivateProfile(RAC_USER_NAME, NATIVE_RAC_USER_PASSWORD); + assertThat(profile3.uid(), not(equalTo(profile1.uid()))); // NOT the same profile as the file user + assertThat(profile3.user().username(), equalTo(RAC_USER_NAME)); + assertThat(profile3.user().realmName(), equalTo("index")); + assertThat(profile3.user().domainName(), nullValue()); assertThat(profile3.user().email(), equalTo(RAC_USER_NAME + "@example.com")); assertThat(profile3.user().fullName(), nullValue()); - assertThat(profile3.user().roles(), containsInAnyOrder("rac_role")); + assertThat(profile3.user().roles(), contains(RAC_ROLE)); assertThat(profile3.access(), anEmptyMap()); + // Get by ID immediately should get the same document and content as the response to activate + assertThat(getProfile(profile3.uid(), Set.of()), equalTo(profile3)); // Manually inserting some application data client().prepareUpdate(randomFrom(INTERNAL_SECURITY_PROFILE_INDEX_8, SECURITY_PROFILE_ALIAS), "profile_" + profile3.uid()) @@ -224,21 +143,23 @@ public void testActivateProfile() { assertThat(profile4.applicationData(), equalTo(Map.of("my_app", Map.of("theme", "default")))); // Update native rac user - final PutUserRequest putUserRequest2 = new PutUserRequest(); - putUserRequest2.username(RAC_USER_NAME); - putUserRequest2.roles("rac_role", "superuser"); - putUserRequest2.email(null); - putUserRequest2.fullName("Native RAC User"); - assertThat(client().execute(PutUserAction.INSTANCE, putUserRequest2).actionGet().created(), is(false)); + final PutUserRequest putUserRequest1 = new PutUserRequest(); + putUserRequest1.username(RAC_USER_NAME); + putUserRequest1.roles(RAC_ROLE, "superuser"); + putUserRequest1.email(null); + putUserRequest1.fullName("Native RAC User"); + assertThat(client().execute(PutUserAction.INSTANCE, putUserRequest1).actionGet().created(), is(false)); // Activate again should see the updated user info - final Profile profile5 = doActivateProfile(RAC_USER_NAME, nativeRacUserPassword); + final Profile profile5 = doActivateProfile(RAC_USER_NAME, NATIVE_RAC_USER_PASSWORD); assertThat(profile5.uid(), equalTo(profile3.uid())); assertThat(profile5.user().email(), nullValue()); assertThat(profile5.user().fullName(), equalTo("Native RAC User")); - assertThat(profile5.user().roles(), containsInAnyOrder("rac_role", "superuser")); + assertThat(profile5.user().roles(), containsInAnyOrder(RAC_ROLE, "superuser")); // Re-activate should not change access assertThat(profile5.access(), equalTo(Map.of("my_app", Map.of("tag", "prod")))); + // Get by ID immediately should get the same document and content as the response to activate + assertThat(getProfile(profile5.uid(), Set.of()), equalTo(profile5)); // Re-activate should not change application data assertThat(getProfile(profile5.uid(), Set.of("my_app")).applicationData(), equalTo(Map.of("my_app", Map.of("theme", "default")))); } @@ -301,38 +222,4 @@ public void testUpdateProfileData() { () -> client().execute(UpdateProfileDataAction.INSTANCE, updateProfileDataRequest3).actionGet() ); } - - private Profile doActivateProfile(String username, SecureString password) { - final ActivateProfileRequest activateProfileRequest = new ActivateProfileRequest(); - activateProfileRequest.getGrant().setType("password"); - activateProfileRequest.getGrant().setUsername(username); - // clone the secureString because activate action closes it afterwards - activateProfileRequest.getGrant().setPassword(password.clone()); - - final ActivateProfileResponse activateProfileResponse = client().execute(ActivateProfileAction.INSTANCE, activateProfileRequest) - .actionGet(); - final Profile profile = activateProfileResponse.getProfile(); - assertThat(profile, notNullValue()); - assertThat(profile.user().username(), equalTo(username)); - assertThat(profile.applicationData(), anEmptyMap()); - return profile; - } - - private Profile getProfile(String uid, Set dataKeys) { - final GetProfilesResponse getProfilesResponse = client().execute(GetProfileAction.INSTANCE, new GetProfileRequest(uid, dataKeys)) - .actionGet(); - assertThat(getProfilesResponse.getProfiles(), arrayWithSize(1)); - return getProfilesResponse.getProfiles()[0]; - } - - private String indexDocument() { - final String uid = randomAlphaOfLength(20); - final String source = ProfileServiceTests.SAMPLE_PROFILE_DOCUMENT_TEMPLATE.formatted(uid, Instant.now().toEpochMilli()); - client().prepareIndex(randomFrom(INTERNAL_SECURITY_PROFILE_INDEX_8, SECURITY_PROFILE_ALIAS)) - .setId("profile_" + uid) - .setRefreshPolicy(WriteRequest.RefreshPolicy.WAIT_UNTIL) - .setSource(source, XContentType.JSON) - .get(); - return uid; - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileDocument.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileDocument.java index f2ad6c6fce35f..80062d4f77344 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileDocument.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileDocument.java @@ -10,7 +10,6 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.ObjectParserHelper; -import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; @@ -30,6 +29,7 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.core.security.authc.Authentication.REALM_REF_PARSER; public record ProfileDocument( String uid, @@ -55,17 +55,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject("user"); builder.field("username", username); builder.field("roles", roles); - builder.startObject("realm"); - builder.field("name", realm.getName()); - builder.field("type", realm.getType()); - builder.field("node_name", realm.getNodeName()); - builder.endObject(); - if (email != null) { - builder.field("email", email); - } - if (fullName != null) { - builder.field("full_name", fullName); - } + builder.field("realm", realm); + builder.field("email", email); + builder.field("full_name", fullName); if (displayName != null) { builder.field("display_name", displayName); } @@ -74,8 +66,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - public Profile.ProfileUser toProfileUser(@Nullable String realmDomain) { - return new Profile.ProfileUser(username, roles, realm.getName(), realmDomain, email, fullName, displayName, active); + public Profile.ProfileUser toProfileUser() { + final String domainName = realm.getDomain() != null ? realm.getDomain().name() : null; + return new Profile.ProfileUser(username, roles, realm.getName(), domainName, email, fullName, displayName, active); } } @@ -161,27 +154,13 @@ public static ProfileDocument fromXContent(XContentParser parser) { (args, v) -> (ProfileDocument) args[0] ); - // TODO:This is a copy from Authentication class. This version ignores unknown fields so that it currently ignores the domain field - // The support will be added later when authentication update is finalised. - public static ConstructingObjectParser REALM_REF_PARSER = new ConstructingObjectParser<>( - "realm_ref", - true, - (args, v) -> new Authentication.RealmRef((String) args[0], (String) args[1], (String) args[2]) - ); - - static { - REALM_REF_PARSER.declareString(constructorArg(), new ParseField("name")); - REALM_REF_PARSER.declareString(constructorArg(), new ParseField("type")); - REALM_REF_PARSER.declareString(constructorArg(), new ParseField("node_name")); - } - static { PROFILE_DOC_USER_PARSER.declareString(constructorArg(), new ParseField("username")); PROFILE_DOC_USER_PARSER.declareStringArray(constructorArg(), new ParseField("roles")); - PROFILE_DOC_USER_PARSER.declareObject(constructorArg(), (p, c) -> REALM_REF_PARSER.parse(p, null), new ParseField("realm")); - PROFILE_DOC_USER_PARSER.declareString(optionalConstructorArg(), new ParseField("email")); - PROFILE_DOC_USER_PARSER.declareString(optionalConstructorArg(), new ParseField("full_name")); - PROFILE_DOC_USER_PARSER.declareString(optionalConstructorArg(), new ParseField("display_name")); + PROFILE_DOC_USER_PARSER.declareObject(constructorArg(), (p, c) -> REALM_REF_PARSER.parse(p, c), new ParseField("realm")); + PROFILE_DOC_USER_PARSER.declareStringOrNull(optionalConstructorArg(), new ParseField("email")); + PROFILE_DOC_USER_PARSER.declareStringOrNull(optionalConstructorArg(), new ParseField("full_name")); + PROFILE_DOC_USER_PARSER.declareStringOrNull(optionalConstructorArg(), new ParseField("display_name")); PROFILE_DOC_USER_PARSER.declareBoolean(constructorArg(), new ParseField("active")); PROFILE_DOC_PARSER.declareString(constructorArg(), new ParseField("uid")); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java index 0267d14402fee..52abce6f90ac5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java @@ -28,10 +28,12 @@ import org.elasticsearch.action.update.UpdateRequestBuilder; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -84,10 +86,10 @@ public ProfileService(Settings settings, Clock clock, Client client, SecurityInd } public void getProfile(String uid, @Nullable Set dataKeys, ActionListener listener) { - getVersionedDocument(uid, listener.map(versionedDocument -> { - // TODO: replace null with actual domain lookup - return versionedDocument != null ? versionedDocument.toProfile(null, dataKeys) : null; - })); + getVersionedDocument( + uid, + listener.map(versionedDocument -> versionedDocument != null ? versionedDocument.toProfile(dataKeys) : null) + ); } // TODO: with request when we take request body for profile activation @@ -119,7 +121,7 @@ public void activateProfile(Authentication authentication, ActionListener { + getVersionedDocument(subject, ActionListener.wrap(versionedDocument -> { if (versionedDocument == null) { createNewProfile(subject, listener); } else { @@ -183,16 +185,33 @@ private void getVersionedDocument(String uid, ActionListener } // Package private for testing - void getVersionedDocument(Authentication authentication, ActionListener listener) { + void getVersionedDocument(Subject subject, ActionListener listener) { tryFreezeAndCheckIndex(listener).ifPresent(frozenProfileIndex -> { - final SearchRequest searchRequest = client.prepareSearch(SECURITY_PROFILE_ALIAS) - .setQuery( - QueryBuilders.boolQuery() - .must(QueryBuilders.termQuery("user_profile.user.username", authentication.getUser().principal())) - // TODO: this will be replaced by domain lookup and reverse lookup - .must(QueryBuilders.termQuery("user_profile.user.realm.name", authentication.getSourceRealm().getName())) - ) - .request(); + final BoolQueryBuilder boolQuery = QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery("user_profile.user.username", subject.getUser().principal())); + if (subject.getRealm().getDomain() == null) { + boolQuery.filter(QueryBuilders.termQuery("user_profile.user.realm.name", subject.getRealm().getName())) + .filter(QueryBuilders.termQuery("user_profile.user.realm.type", subject.getRealm().getType())); + } else { + logger.debug( + () -> new ParameterizedMessage( + "searching existing profile document for user [{}] from any of the realms [{}] under domain [{}]", + subject.getUser().principal(), + Strings.collectionToCommaDelimitedString(subject.getRealm().getDomain().realms()), + subject.getRealm().getDomain().name() + ) + ); + subject.getRealm().getDomain().realms().forEach(realmIdentifier -> { + boolQuery.should( + QueryBuilders.boolQuery() + .filter(QueryBuilders.termQuery("user_profile.user.realm.name", realmIdentifier.getName())) + .filter(QueryBuilders.termQuery("user_profile.user.realm.type", realmIdentifier.getType())) + ); + }); + boolQuery.minimumShouldMatch(1); + } + + final SearchRequest searchRequest = client.prepareSearch(SECURITY_PROFILE_ALIAS).setQuery(boolQuery).request(); frozenProfileIndex.checkIndexVersionThenExecute( listener::onFailure, () -> executeAsyncWithOrigin( @@ -206,8 +225,8 @@ void getVersionedDocument(Authentication authentication, ActionListener listener) TransportSingleItemBulkWriteAction.wrapBulkResponse(ActionListener.wrap(indexResponse -> { assert docId.equals(indexResponse.getId()); // TODO: replace with actual domain information - listener.onResponse( - new VersionedDocument(profileDocument, indexResponse.getPrimaryTerm(), indexResponse.getSeqNo()).toProfile(null) + final VersionedDocument versionedDocument = new VersionedDocument( + profileDocument, + indexResponse.getPrimaryTerm(), + indexResponse.getSeqNo() ); + listener.onResponse(versionedDocument.toProfile(Set.of())); }, listener::onFailure)) ) ); @@ -274,7 +299,7 @@ private void updateProfileForActivate(Subject subject, VersionedDocument version ), listener.map( updateResponse -> new VersionedDocument(profileDocument, updateResponse.getPrimaryTerm(), updateResponse.getSeqNo()) - .toProfile(null) + .toProfile(Set.of()) ) ); } @@ -400,14 +425,13 @@ private ProfileDocument updateWithSubject(ProfileDocument doc, Subject subject) // Package private for testing record VersionedDocument(ProfileDocument doc, long primaryTerm, long seqNo) { - Profile toProfile(@Nullable String realmDomain) { - return toProfile(realmDomain, Set.of()); - } - - Profile toProfile(@Nullable String realmDomain, @Nullable Set dataKeys) { + /** + * Convert the index document to the user-facing Profile by filtering through the application data + */ + Profile toProfile(Set dataKeys) { + assert dataKeys != null : "data keys must not be null"; final Map applicationData; - // NOTE null is the same as empty which means not retrieving any application data - if (dataKeys == null || dataKeys.isEmpty()) { + if (dataKeys.isEmpty()) { applicationData = Map.of(); } else { applicationData = XContentHelper.convertToMap(doc.applicationData(), false, XContentType.JSON, dataKeys, null).v2(); @@ -417,7 +441,7 @@ Profile toProfile(@Nullable String realmDomain, @Nullable Set dataKeys) doc.uid(), doc.enabled(), doc.lastSynchronized(), - doc.user().toProfileUser(realmDomain), + doc.user().toProfileUser(), doc.access(), applicationData, new Profile.VersionControl(primaryTerm, seqNo) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java index a4e9c2dde5ad1..b21b9dd7a7bef 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/profile/ProfileServiceTests.java @@ -137,7 +137,7 @@ public void testGetProfileByUid() { final PlainActionFuture future = new PlainActionFuture<>(); - final Set dataKeys = randomFrom(Set.of("app1"), Set.of("app2"), Set.of("app1", "app2"), Set.of(), null); + final Set dataKeys = randomFrom(Set.of("app1"), Set.of("app2"), Set.of("app1", "app2"), Set.of()); profileService.getProfile(uid, dataKeys, future); final Profile profile = future.actionGet(); @@ -163,7 +163,7 @@ public void testGetProfileByUid() { "foo", List.of("role1", "role2"), "realm_name_1", - null, + "domainA", "foo@example.com", "User Foo", "Curious Foo", diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/user_profile/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/user_profile/10_basic.yml index f6d727b69099d..c9c6346d3fd38 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/user_profile/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/user_profile/10_basic.yml @@ -39,6 +39,8 @@ teardown: - match: { "user.username" : "joe" } - match: { "user.roles" : [ "superuser" ] } - match: { "user.full_name" : "Bazooka Joe" } + - match: { "user.realm_name" : "default_native" } + - is_false: "user.realm_domain" - is_true: _doc - set: { uid: profile_uid } From 2d79dcc5ba81f113d1fe1914c0a2343f756b9ae8 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Fri, 11 Feb 2022 18:06:34 +1100 Subject: [PATCH 053/167] Remove enable/disableUser methods from HLRC (#83626) Removes the `enableUser` and `disableUser` methods from the High Level Rest Client's `SecurityClient` and replaces existing usage in tests with a helper method. Relates: #83423 --- .../elasticsearch/client/SecurityClient.java | 64 ------------------- .../client/SecurityRequestConverters.java | 23 ------- .../client/security/DisableUserRequest.java | 19 ------ .../client/security/EnableUserRequest.java | 19 ------ .../security/SetUserEnabledRequest.java | 41 ------------ .../esnative/ReservedRealmIntegTests.java | 14 ++-- .../test/SecurityClientTestHelper.java | 24 +++++++ 7 files changed, 29 insertions(+), 175 deletions(-) delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/DisableUserRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/EnableUserRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/SetUserEnabledRequest.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityClientTestHelper.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java index 83790585ee119..3ae8cd027f92a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java @@ -8,7 +8,6 @@ package org.elasticsearch.client; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.security.AuthenticateRequest; import org.elasticsearch.client.security.AuthenticateResponse; import org.elasticsearch.client.security.ChangePasswordRequest; @@ -24,8 +23,6 @@ import org.elasticsearch.client.security.DeleteRoleResponse; import org.elasticsearch.client.security.DeleteUserRequest; import org.elasticsearch.client.security.DeleteUserResponse; -import org.elasticsearch.client.security.DisableUserRequest; -import org.elasticsearch.client.security.EnableUserRequest; import org.elasticsearch.client.security.GetApiKeyRequest; import org.elasticsearch.client.security.GetApiKeyResponse; import org.elasticsearch.client.security.GetRolesRequest; @@ -125,67 +122,6 @@ public PutRoleMappingResponse putRoleMapping(final PutRoleMappingRequest request ); } - /** - * Enable a native realm or built-in user synchronously. - * See - * the docs for more. - * - * @param request the request with the user to enable - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@code true} if the request succeeded (the user is enabled) - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public boolean enableUser(EnableUserRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequest( - request, - SecurityRequestConverters::enableUser, - options, - RestHighLevelClient::convertExistsResponse, - emptySet() - ); - } - - /** - * Disable a native realm or built-in user synchronously. - * See - * the docs for more. - * - * @param request the request with the user to disable - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@code true} if the request succeeded (the user is disabled) - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public boolean disableUser(DisableUserRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequest( - request, - SecurityRequestConverters::disableUser, - options, - RestHighLevelClient::convertExistsResponse, - emptySet() - ); - } - - /** - * Disable a native realm or built-in user asynchronously. - * See - * the docs for more. - * - * @param request the request with the user to disable - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener the listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable disableUserAsync(DisableUserRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsync( - request, - SecurityRequestConverters::disableUser, - options, - RestHighLevelClient::convertExistsResponse, - listener, - emptySet() - ); - } - /** * Authenticate the current user and return all the information about the authenticated user. * See diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java index 8adcbe71e288a..d1d0e5fea692d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java @@ -19,8 +19,6 @@ import org.elasticsearch.client.security.DeleteRoleMappingRequest; import org.elasticsearch.client.security.DeleteRoleRequest; import org.elasticsearch.client.security.DeleteUserRequest; -import org.elasticsearch.client.security.DisableUserRequest; -import org.elasticsearch.client.security.EnableUserRequest; import org.elasticsearch.client.security.GetApiKeyRequest; import org.elasticsearch.client.security.GetRolesRequest; import org.elasticsearch.client.security.InvalidateApiKeyRequest; @@ -29,7 +27,6 @@ import org.elasticsearch.client.security.PutRoleMappingRequest; import org.elasticsearch.client.security.PutRoleRequest; import org.elasticsearch.client.security.PutUserRequest; -import org.elasticsearch.client.security.SetUserEnabledRequest; import org.elasticsearch.common.Strings; import java.io.IOException; @@ -89,26 +86,6 @@ static Request putRoleMapping(final PutRoleMappingRequest putRoleMappingRequest) return request; } - static Request enableUser(EnableUserRequest enableUserRequest) { - return setUserEnabled(enableUserRequest); - } - - static Request disableUser(DisableUserRequest disableUserRequest) { - return setUserEnabled(disableUserRequest); - } - - private static Request setUserEnabled(SetUserEnabledRequest setUserEnabledRequest) { - String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_security/user") - .addPathPart(setUserEnabledRequest.getUsername()) - .addPathPart(setUserEnabledRequest.isEnabled() ? "_enable" : "_disable") - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - params.withRefreshPolicy(setUserEnabledRequest.getRefreshPolicy()); - request.addParameters(params.asMap()); - return request; - } - static Request clearRealmCache(ClearRealmCacheRequest clearRealmCacheRequest) { RequestConverters.EndpointBuilder builder = new RequestConverters.EndpointBuilder().addPathPartAsIs("_security/realm"); if (clearRealmCacheRequest.getRealms().isEmpty() == false) { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DisableUserRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DisableUserRequest.java deleted file mode 100644 index 2b9df8d14ab73..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DisableUserRequest.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -/** - * Request object to disable a native realm or built-in user. - */ -public final class DisableUserRequest extends SetUserEnabledRequest { - - public DisableUserRequest(String username, RefreshPolicy refreshPolicy) { - super(false, username, refreshPolicy); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/EnableUserRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/EnableUserRequest.java deleted file mode 100644 index 96324909dda2b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/EnableUserRequest.java +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -/** - * Request object to enable a native realm or built-in user. - */ -public final class EnableUserRequest extends SetUserEnabledRequest { - - public EnableUserRequest(String username, RefreshPolicy refreshPolicy) { - super(true, username, refreshPolicy); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/SetUserEnabledRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/SetUserEnabledRequest.java deleted file mode 100644 index 927a4dc1bb437..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/SetUserEnabledRequest.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * Abstract request object to enable or disable a built-in or native user. - */ -public abstract class SetUserEnabledRequest implements Validatable { - - private final boolean enabled; - private final String username; - private final RefreshPolicy refreshPolicy; - - SetUserEnabledRequest(boolean enabled, String username, RefreshPolicy refreshPolicy) { - this.enabled = enabled; - this.username = Objects.requireNonNull(username, "username is required"); - this.refreshPolicy = refreshPolicy == null ? RefreshPolicy.getDefault() : refreshPolicy; - } - - public boolean isEnabled() { - return enabled; - } - - public String getUsername() { - return username; - } - - public RefreshPolicy getRefreshPolicy() { - return refreshPolicy; - } -} diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java index 7b503538b24f4..63cca22082a37 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java @@ -10,12 +10,11 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.security.ChangePasswordRequest; -import org.elasticsearch.client.security.DisableUserRequest; -import org.elasticsearch.client.security.EnableUserRequest; import org.elasticsearch.client.security.RefreshPolicy; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.NativeRealmIntegTestCase; +import org.elasticsearch.test.SecurityClientTestHelper; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.user.APMSystemUser; import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; @@ -98,7 +97,8 @@ public void testAuthenticateAfterEnablingUser() throws IOException { RemoteMonitoringUser.NAME ); for (String username : usernames) { - restClient.security().enableUser(new EnableUserRequest(username, RefreshPolicy.getDefault()), SECURITY_REQUEST_OPTIONS); + SecurityClientTestHelper.setUserEnabled(getRestClient(), username, true); + ClusterHealthResponse response = client().filterWithHeader( singletonMap("Authorization", basicAuthHeaderValue(username, getReservedPassword())) ).admin().cluster().prepareHealth().get(); @@ -159,9 +159,7 @@ public void testDisablingUser() throws Exception { assertThat(response.getClusterName(), is(cluster().getClusterName())); // disable user - final boolean disabled = restClient.security() - .disableUser(new DisableUserRequest(ElasticUser.NAME, RefreshPolicy.getDefault()), SECURITY_REQUEST_OPTIONS); - assertTrue(disabled); + SecurityClientTestHelper.setUserEnabled(getRestClient(), ElasticUser.NAME, false); ElasticsearchSecurityException elasticsearchSecurityException = expectThrows( ElasticsearchSecurityException.class, () -> client().filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(ElasticUser.NAME, getReservedPassword()))) @@ -173,9 +171,7 @@ public void testDisablingUser() throws Exception { assertThat(elasticsearchSecurityException.getMessage(), containsString("authenticate")); // enable - final boolean enabled = restClient.security() - .enableUser(new EnableUserRequest(ElasticUser.NAME, RefreshPolicy.getDefault()), SECURITY_REQUEST_OPTIONS); - assertTrue(enabled); + SecurityClientTestHelper.setUserEnabled(getRestClient(), ElasticUser.NAME, true); response = client().filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(ElasticUser.NAME, getReservedPassword()))) .admin() .cluster() diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityClientTestHelper.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityClientTestHelper.java new file mode 100644 index 0000000000000..5aa0df18e80d1 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityClientTestHelper.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.test; + +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RestClient; + +import java.io.IOException; + +public class SecurityClientTestHelper { + + public static void setUserEnabled(RestClient client, String username, boolean enabled) throws IOException { + final String endpoint = "/_security/user/" + username + "/" + (enabled ? "_enable" : "_disable"); + final Request request = new Request(HttpPut.METHOD_NAME, endpoint); + request.setOptions(SecuritySettingsSource.SECURITY_REQUEST_OPTIONS); + client.performRequest(request); + } +} From 57be4569c3b9bc16b7173e3bc8d56ee866d1013d Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 11 Feb 2022 08:19:48 +0000 Subject: [PATCH 054/167] Remove LegacyCTRAListener usage in NodeRemovalCSTE (#83797) Relates #83784 --- .../NodeRemovalClusterStateTaskExecutor.java | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeRemovalClusterStateTaskExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeRemovalClusterStateTaskExecutor.java index e6c86ad3fe569..999b0ba8f7134 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/NodeRemovalClusterStateTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/NodeRemovalClusterStateTaskExecutor.java @@ -9,6 +9,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.ClusterStateTaskListener; @@ -39,7 +40,7 @@ public void onNoLongerMaster() { @Override public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - onClusterStateProcessed.run(); + assert false : "not called"; } @Override @@ -67,7 +68,17 @@ public ClusterTasksResult execute(final ClusterState currentState, final L } else { logger.debug("node [{}] does not exist in cluster state, ignoring", task); } - resultBuilder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); + resultBuilder.success(task, new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + task.onClusterStateProcessed.run(); + } + + @Override + public void onFailure(Exception e) { + task.onFailure(e); + } + }); } final ClusterState finalState; From ca78d928aa8693e9f3f83d8dd37cbd2fcf6ee622 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 11 Feb 2022 08:22:51 +0000 Subject: [PATCH 055/167] Remove LegacyCTRAListener usage in MasterService (#83796) Relates #83784 --- .../health/TransportClusterHealthAction.java | 2 +- .../cluster/ClusterStateTaskExecutor.java | 14 ++++++++--- .../cluster/LocalMasterServiceTask.java | 24 +++++++++++++++---- 3 files changed, 32 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java index 5e2034b6905c9..24cde795cf1ec 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/health/TransportClusterHealthAction.java @@ -112,7 +112,7 @@ private void waitForEventsAndExecuteHealth( if (request.local()) { new LocalMasterServiceTask(request.waitForEvents()) { @Override - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + protected void onPublicationComplete() { final long timeoutInMillis = Math.max(0, endTimeRelativeMillis - threadPool.relativeTimeInMillis()); final TimeValue newTimeout = TimeValue.timeValueMillis(timeoutInMillis); request.timeout(newTimeout); diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java index d5d1743706bd1..25384e38b612e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterStateTaskExecutor.java @@ -157,9 +157,17 @@ public ClusterTasksResult execute(ClusterState currentState, List tasks) t assert tasks.size() == 1 : "this only supports a single task but received " + tasks; final T task = tasks.get(0); final ClusterState newState = task.execute(currentState); - return ClusterTasksResult.builder() - .success(task, new LegacyClusterTaskResultActionListener(task, currentState)) - .build(newState); + return ClusterTasksResult.builder().success(task, new ActionListener<>() { + @Override + public void onResponse(ClusterState publishedState) { + task.clusterStateProcessed(currentState, publishedState); + } + + @Override + public void onFailure(Exception e) { + task.onFailure(e); + } + }).build(newState); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/LocalMasterServiceTask.java b/server/src/main/java/org/elasticsearch/cluster/LocalMasterServiceTask.java index 87462cffcadce..ba137b4440a83 100644 --- a/server/src/main/java/org/elasticsearch/cluster/LocalMasterServiceTask.java +++ b/server/src/main/java/org/elasticsearch/cluster/LocalMasterServiceTask.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.cluster; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.service.MasterService; import org.elasticsearch.common.Priority; @@ -23,7 +24,14 @@ public LocalMasterServiceTask(Priority priority) { this.priority = priority; } - public void execute(ClusterState currentState) throws Exception {} + protected void execute(ClusterState currentState) throws Exception {} + + @Override + public final void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + assert false : "not called"; + } + + protected void onPublicationComplete() {} public void submit(MasterService masterService, String source) { masterService.submitStateUpdateTask( @@ -51,9 +59,17 @@ public ClusterTasksResult execute(ClusterState currentSt assert tasks.size() == 1 && tasks.get(0) == thisTask : "expected one-element task list containing current object but was " + tasks; thisTask.execute(currentState); - return ClusterTasksResult.builder() - .success(thisTask, new LegacyClusterTaskResultActionListener(thisTask, currentState)) - .build(currentState); + return ClusterTasksResult.builder().success(thisTask, new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + onPublicationComplete(); + } + + @Override + public void onFailure(Exception e) { + LocalMasterServiceTask.this.onFailure(e); + } + }).build(currentState); } } ); From b09cd19fde800871ecc1a0c6e9806f82c989c2c3 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Fri, 11 Feb 2022 10:14:00 +0100 Subject: [PATCH 056/167] fix: backward compatibility with version 7.17.0 (#83715) PR #83339 was meant to be included in version 7.17.0 but didn't happen. It landed in version 7.17.1. As a result, version checks in the code are incorrect. Instead of checking against version 7.17.0 we need to adjust the code in such a way that it operates correctly with version 7.17.0. Version 7.17.0 is released and, as as result, it represents the 'reference' version we would like all following versions to interoperate with. Unfortunately this means that interoperability between version 7.17.0 and version 8.0.0 is broken. In summary this fix needs to be backported to: * 7.17.1: so to fix interoperability of version 7.17.1 with 7.17.0 * 8.0.1: so to fix interoperability of version 8.0.1 with 7.17.0 * 8.1.0 (currently BC): so to fix interoperability of version 8.1.0 with 7.17.0 * master (8.2): so to fix interoperability of version 8.2.0 with 7.17.0 Also, when reading and writing the range key, version 7.17.0 checks against version 6.4.0 which is not possible right now. As a result, we just check against the earliest possible version. --- docs/changelog/83715.yaml | 5 +++++ .../test/search.aggregation/40_range.yml | 4 ++-- .../aggregations/bucket/range/InternalRange.java | 14 ++++++++++++-- 3 files changed, 19 insertions(+), 4 deletions(-) create mode 100644 docs/changelog/83715.yaml diff --git a/docs/changelog/83715.yaml b/docs/changelog/83715.yaml new file mode 100644 index 0000000000000..5fab17dbe2be8 --- /dev/null +++ b/docs/changelog/83715.yaml @@ -0,0 +1,5 @@ +pr: 83715 +summary: "Fix: backward compatibility with version 7.17.0" +area: Aggregations +type: bug +issues: [] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/40_range.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/40_range.yml index 8039ebc68f0fa..88760b99714a6 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/40_range.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/search.aggregation/40_range.yml @@ -142,8 +142,8 @@ setup: --- "Float range": - skip: - version: " - 7.16.99" - reason: Bug fixed in 8.1.0 and backported to 7.17.0 + version: " - 7.17.0" + reason: Bug fixed in 8.1.0 and backported to 7.17.1 - do: search: index: test diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java index 9d93e0bfdafb5..fa5595f049cee 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java @@ -266,11 +266,21 @@ public InternalRange(StreamInput in) throws IOException { String key = in.getVersion().onOrAfter(Version.V_7_17_1) ? in.readOptionalString() : in.readString(); double from = in.readDouble(); if (in.getVersion().onOrAfter(Version.V_7_17_0)) { - in.readOptionalDouble(); + final Double originalFrom = in.readOptionalDouble(); + if (originalFrom != null) { + from = originalFrom; + } else { + from = Double.NEGATIVE_INFINITY; + } } double to = in.readDouble(); if (in.getVersion().onOrAfter(Version.V_7_17_0)) { - in.readOptionalDouble(); + final Double originalTo = in.readOptionalDouble(); + if (originalTo != null) { + to = originalTo; + } else { + to = Double.POSITIVE_INFINITY; + } } long docCount = in.readVLong(); InternalAggregations aggregations = InternalAggregations.readFrom(in); From e851efbaa1d0048909a96bdb942d6a2ebb462504 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Fri, 11 Feb 2022 10:26:25 +0100 Subject: [PATCH 057/167] Keep track of aliased indices (#80873) The PR adds a top level `aliasedIndices` map to `Metadata` that keeps track of all the aliases that are defined in each `IndexMetadata`. This `aliasedIndices` map isn't rebuild on each `Metadata#build(...)` invocation, but rather updated in place each time alias definitions are added or removed from `IndexMetadata` instances. New `Metadata.Builder` instances reuse the previous `aliasedIndices` map. This change is beneficial because it will allow the `indicesLookup` sorted map to always be build lazily (which is what this PR is also doing). Prior to this change building the `indicesLookup` sorted map was required for certain alias validation, but this is no longer the case, since that validation can now use `aliasedIndices` map. Building the `indicesLookup` sorted map can get expensive in clusters with many indices and always being able to delay this computation to after a new cluster state has been build and published, can reduce load on the cluster state update thread on the elected master node. Also two helper methods have been added to `Metadata` class to easily and efficiently get access to aliases and the indices these aliases refer to. Logically aliases are often seen as independent resources, but this isn't the case and getting access to an entire alias requires checking all `IndexMetadata` instances. This change will make this easier, without changing how we store aliases in the cluster state. Changing that would be much more difficult. --- .../cluster/ClusterStateDiffIT.java | 4 +- .../cluster/metadata/IndexAbstraction.java | 74 +---- .../cluster/metadata/Metadata.java | 263 ++++++++++++++---- .../allocation/IndexMetadataUpdater.java | 2 +- .../cluster/service/MasterService.java | 4 +- .../MetadataCreateIndexServiceTests.java | 9 + .../MetadataDeleteIndexServiceTests.java | 2 + .../MetadataIndexAliasesServiceTests.java | 6 + .../cluster/metadata/MetadataTests.java | 167 +++++++---- .../core/ilm/CopyExecutionStateStep.java | 2 +- .../xpack/core/ilm/CopySettingsStep.java | 2 +- .../core/ilm/GenerateSnapshotNameStep.java | 1 - .../core/ilm/GenerateUniqueIndexNameStep.java | 2 +- .../core/ilm/InitializePolicyContextStep.java | 2 +- .../xpack/core/ilm/PhaseCacheManagement.java | 4 +- .../ReplaceDataStreamBackingIndexStep.java | 2 +- .../ilm/UpdateRolloverLifecycleDateStep.java | 4 +- .../xpack/ilm/IndexLifecycleRunner.java | 2 - .../xpack/ilm/IndexLifecycleTransition.java | 1 - 19 files changed, 358 insertions(+), 195 deletions(-) diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java index 6603a6ee5aee6..ed71f7845bb61 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterStateDiffIT.java @@ -570,9 +570,7 @@ public IndexMetadata randomChange(IndexMetadata part) { } break; case 2: - builder.settings( - Settings.builder().put(part.getSettings()).put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) - ); + builder.settings(Settings.builder().put(part.getSettings()).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0)); break; default: throw new IllegalArgumentException("Shouldn't be here"); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java index a372649a6ce2f..b974b774b22e7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.Index; @@ -26,7 +25,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Locale; -import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.stream.Collectors; @@ -251,20 +249,12 @@ public Alias(AliasMetadata aliasMetadata, List indices) { } else if (writeIndices.size() == 1) { this.writeIndex = writeIndices.get(0).getIndex(); } else { - List writeIndicesStrings = writeIndices.stream().map(i -> i.getIndex().getName()).collect(Collectors.toList()); - throw new IllegalStateException( - "alias [" - + aliasName - + "] has more than one write index [" - + Strings.collectionToCommaDelimitedString(writeIndicesStrings) - + "]" - ); + throw new IllegalStateException("write indices size can only be 0 or 1, but is [" + writeIndices.size() + "]"); } this.isHidden = aliasMetadata.isHidden() == null ? false : aliasMetadata.isHidden(); this.isSystem = indices.stream().allMatch(IndexMetadata::isSystem); dataStreamAlias = false; - validateAliasProperties(indices); } public Alias(DataStreamAlias dataStreamAlias, List indicesOfAllDataStreams, Index writeIndexOfWriteDataStream) { @@ -321,68 +311,6 @@ public List getAliases() { return null; } - private void validateAliasProperties(List referenceIndexMetadatas) { - // Validate hidden status - final Map> groupedByHiddenStatus = referenceIndexMetadatas.stream() - .collect(Collectors.groupingBy(idxMeta -> Boolean.TRUE.equals(idxMeta.getAliases().get(aliasName).isHidden()))); - if (isNonEmpty(groupedByHiddenStatus.get(true)) && isNonEmpty(groupedByHiddenStatus.get(false))) { - List hiddenOn = groupedByHiddenStatus.get(true) - .stream() - .map(idx -> idx.getIndex().getName()) - .collect(Collectors.toList()); - List nonHiddenOn = groupedByHiddenStatus.get(false) - .stream() - .map(idx -> idx.getIndex().getName()) - .collect(Collectors.toList()); - throw new IllegalStateException( - "alias [" - + aliasName - + "] has is_hidden set to true on indices [" - + Strings.collectionToCommaDelimitedString(hiddenOn) - + "] but does not have is_hidden set to true on indices [" - + Strings.collectionToCommaDelimitedString(nonHiddenOn) - + "]; alias must have the same is_hidden setting " - + "on all indices" - ); - } - - // Validate system status - - final Map> groupedBySystemStatus = referenceIndexMetadatas.stream() - .collect(Collectors.groupingBy(IndexMetadata::isSystem)); - // If the alias has either all system or all non-system, then no more validation is required - if (isNonEmpty(groupedBySystemStatus.get(false)) && isNonEmpty(groupedBySystemStatus.get(true))) { - final List newVersionSystemIndices = groupedBySystemStatus.get(true) - .stream() - .filter(i -> i.getCreationVersion().onOrAfter(IndexNameExpressionResolver.SYSTEM_INDEX_ENFORCEMENT_VERSION)) - .map(i -> i.getIndex().getName()) - .sorted() // reliable error message for testing - .collect(Collectors.toList()); - - if (newVersionSystemIndices.isEmpty() == false) { - final List nonSystemIndices = groupedBySystemStatus.get(false) - .stream() - .map(i -> i.getIndex().getName()) - .sorted() // reliable error message for testing - .collect(Collectors.toList()); - throw new IllegalStateException( - "alias [" - + aliasName - + "] refers to both system indices " - + newVersionSystemIndices - + " and non-system indices: " - + nonSystemIndices - + ", but aliases must refer to either system or" - + " non-system indices, not both" - ); - } - } - } - - private boolean isNonEmpty(List idxMetas) { - return (Objects.isNull(idxMetas) || idxMetas.isEmpty()) == false; - } - @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index 41f13470fbec2..975d68bdea9c8 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -203,6 +203,7 @@ default boolean isRestorable() { private final Settings settings; private final DiffableStringMap hashesOfConsistentSettings; private final ImmutableOpenMap indices; + private final ImmutableOpenMap> aliasedIndices; private final ImmutableOpenMap templates; private final ImmutableOpenMap customs; @@ -233,6 +234,7 @@ private Metadata( int totalNumberOfShards, int totalOpenIndexShards, ImmutableOpenMap indices, + ImmutableOpenMap> aliasedIndices, ImmutableOpenMap templates, ImmutableOpenMap customs, String[] allIndices, @@ -254,6 +256,7 @@ private Metadata( this.settings = settings; this.hashesOfConsistentSettings = hashesOfConsistentSettings; this.indices = indices; + this.aliasedIndices = aliasedIndices; this.customs = customs; this.templates = templates; this.totalNumberOfShards = totalNumberOfShards; @@ -282,6 +285,7 @@ public Metadata withIncrementedVersion() { totalNumberOfShards, totalOpenIndexShards, indices, + aliasedIndices, templates, customs, allIndices, @@ -339,15 +343,6 @@ public Version oldestIndexVersion() { return this.oldestIndexVersion; } - public boolean hasAlias(String alias) { - IndexAbstraction indexAbstraction = getIndicesLookup().get(alias); - if (indexAbstraction != null) { - return indexAbstraction.getType() == IndexAbstraction.Type.ALIAS; - } else { - return false; - } - } - public boolean equalsAliases(Metadata other) { for (IndexMetadata otherIndex : other.indices().values()) { IndexMetadata thisIndex = index(otherIndex.getIndex()); @@ -383,6 +378,10 @@ public SortedMap getIndicesLookup() { return indicesLookup; } + public boolean sameIndicesLookup(Metadata other) { + return this.indicesLookup == other.indicesLookup; + } + /** * Finds the specific index aliases that point to the requested concrete indices directly * or that match with the indices via wildcards. @@ -787,6 +786,36 @@ public ImmutableOpenMap getIndices() { return indices(); } + /** + * Returns whether an alias exists with provided alias name. + * + * @param aliasName The provided alias name + * @return whether an alias exists with provided alias name + */ + public boolean hasAlias(String aliasName) { + return aliasedIndices.containsKey(aliasName) || dataStreamAliases().containsKey(aliasName); + } + + /** + * Returns all the indices that the alias with the provided alias name refers to. + * These are aliased indices. Not that, this only return indices that have been aliased + * and not indices that are behind a data stream or data stream alias. + * + * @param aliasName The provided alias name + * @return all aliased indices by the alias with the provided alias name + */ + public Set aliasedIndices(String aliasName) { + Objects.requireNonNull(aliasName); + return aliasedIndices.getOrDefault(aliasName, Set.of()); + } + + /** + * @return the names of all indices aliases. + */ + public Set aliasedIndices() { + return aliasedIndices.keySet(); + } + public ImmutableOpenMap templates() { return this.templates; } @@ -1136,6 +1165,7 @@ public static class Builder { private DiffableStringMap hashesOfConsistentSettings = DiffableStringMap.EMPTY; private final ImmutableOpenMap.Builder indices; + private final ImmutableOpenMap.Builder> aliasedIndices; private final ImmutableOpenMap.Builder templates; private final ImmutableOpenMap.Builder customs; @@ -1155,15 +1185,17 @@ public Builder() { this.hashesOfConsistentSettings = metadata.hashesOfConsistentSettings; this.version = metadata.version; this.indices = ImmutableOpenMap.builder(metadata.indices); + this.aliasedIndices = ImmutableOpenMap.builder(metadata.aliasedIndices); this.templates = ImmutableOpenMap.builder(metadata.templates); this.customs = ImmutableOpenMap.builder(metadata.customs); - previousIndicesLookup = metadata.getIndicesLookup(); + this.previousIndicesLookup = metadata.indicesLookup; this.mappingsByHash = new HashMap<>(metadata.mappingsByHash); } private Builder(Map mappingsByHash) { clusterUUID = UNKNOWN_CLUSTER_UUID; indices = ImmutableOpenMap.builder(); + aliasedIndices = ImmutableOpenMap.builder(); templates = ImmutableOpenMap.builder(); customs = ImmutableOpenMap.builder(); indexGraveyard(IndexGraveyard.builder().build()); // create new empty index graveyard to initialize @@ -1177,6 +1209,7 @@ public Builder put(IndexMetadata.Builder indexMetadataBuilder) { dedupeMapping(indexMetadataBuilder); IndexMetadata indexMetadata = indexMetadataBuilder.build(); IndexMetadata previous = indices.put(indexMetadata.getIndex().getName(), indexMetadata); + updateAliases(previous, indexMetadata); if (unsetPreviousIndicesLookup(previous, indexMetadata)) { previousIndicesLookup = null; } @@ -1193,6 +1226,7 @@ public Builder put(IndexMetadata indexMetadata, boolean incrementVersion) { indexMetadata = IndexMetadata.builder(indexMetadata).version(indexMetadata.getVersion() + 1).build(); } IndexMetadata previous = indices.put(indexMetadata.getIndex().getName(), indexMetadata); + updateAliases(previous, indexMetadata); if (unsetPreviousIndicesLookup(previous, indexMetadata)) { previousIndicesLookup = null; } @@ -1246,7 +1280,8 @@ public IndexMetadata getSafe(Index index) { public Builder remove(String index) { previousIndicesLookup = null; - indices.remove(index); + IndexMetadata previous = indices.remove(index); + updateAliases(previous, null); return this; } @@ -1255,6 +1290,7 @@ public Builder removeAllIndices() { indices.clear(); mappingsByHash.clear(); + aliasedIndices.clear(); return this; } @@ -1267,6 +1303,67 @@ public Builder indices(ImmutableOpenMap indices) { return this; } + void updateAliases(IndexMetadata previous, IndexMetadata current) { + if (previous == null && current != null) { + for (var cursor : current.getAliases()) { + putAlias(cursor.key, current.getIndex()); + } + } else if (previous != null && current == null) { + for (var cursor : previous.getAliases()) { + removeAlias(cursor.key, previous.getIndex()); + } + } else if (previous != null && current != null) { + if (Objects.equals(previous.getAliases(), current.getAliases())) { + return; + } + + for (var currentCursor : current.getAliases()) { + if (previous.getAliases().containsKey(currentCursor.key) == false) { + putAlias(currentCursor.key, current.getIndex()); + } + } + for (var previousCursor : previous.getAliases()) { + if (current.getAliases().containsKey(previousCursor.key) == false) { + removeAlias(previousCursor.key, current.getIndex()); + } + } + } + } + + private Builder putAlias(String alias, Index index) { + Objects.requireNonNull(alias); + Objects.requireNonNull(index); + + Set indices = new HashSet<>(aliasedIndices.getOrDefault(alias, Set.of())); + if (indices.add(index) == false) { + return this; // indices already contained this index + } + aliasedIndices.put(alias, Collections.unmodifiableSet(indices)); + return this; + } + + private Builder removeAlias(String alias, Index index) { + Objects.requireNonNull(alias); + Objects.requireNonNull(index); + + Set indices = aliasedIndices.get(alias); + if (indices == null || indices.isEmpty()) { + throw new IllegalStateException("Cannot remove non-existent alias [" + alias + "] for index [" + index.getName() + "]"); + } + + indices = new HashSet<>(indices); + if (indices.remove(index) == false) { + throw new IllegalStateException("Cannot remove non-existent alias [" + alias + "] for index [" + index.getName() + "]"); + } + + if (indices.isEmpty()) { + aliasedIndices.remove(alias); // for consistency, we don't store empty sets, so null it out + } else { + aliasedIndices.put(alias, Collections.unmodifiableSet(indices)); + } + return this; + } + public Builder put(IndexTemplateMetadata.Builder template) { return put(template.build()); } @@ -1618,18 +1715,6 @@ public Builder generateClusterUuidIfNeeded() { * @return a new Metadata instance */ public Metadata build() { - return build(true); - } - - /** - * @param builtIndicesLookupEagerly Controls whether indices lookup should be build as part of the execution of this method - * or after when needed. Almost all of the time indices lookup should be built eagerly, however - * in certain cases when Metdata instances are build that are not published and - * many indices have been defined then it makes sense to skip building indices lookup. - * - * @return a new Metadata instance - */ - public Metadata build(boolean builtIndicesLookupEagerly) { // TODO: We should move these datastructures to IndexNameExpressionResolver, this will give the following benefits: // 1) The datastructures will be rebuilt only when needed. Now during serializing we rebuild these datastructures // while these datastructures aren't even used. @@ -1639,7 +1724,6 @@ public Metadata build(boolean builtIndicesLookupEagerly) { final List visibleOpenIndices = new ArrayList<>(); final List allClosedIndices = new ArrayList<>(); final List visibleClosedIndices = new ArrayList<>(); - final Set indicesAliases = new HashSet<>(); final ImmutableOpenMap indicesMap = indices.build(); final Set allIndices = indicesMap.keySet(); @@ -1662,23 +1746,24 @@ public Metadata build(boolean builtIndicesLookupEagerly) { visibleClosedIndices.add(name); } } - indexMetadata.getAliases().keysIt().forEachRemaining(indicesAliases::add); oldestIndexVersionId = Math.min(oldestIndexVersionId, indexMetadata.getCompatibilityVersion().id); } + var aliasedIndices = this.aliasedIndices.build(); + for (var cursor : aliasedIndices) { + List aliasIndices = cursor.value.stream() + .map(idx -> indicesMap.get(idx.getName())) + .collect(Collectors.toList()); + validateAlias(cursor.key, aliasIndices); + } final DataStreamMetadata dataStreamMetadata = (DataStreamMetadata) this.customs.get(DataStreamMetadata.TYPE); - ensureNoNameCollisions(indicesAliases, indicesMap, allIndices, dataStreamMetadata); + ensureNoNameCollisions(aliasedIndices.keySet(), indicesMap, allIndices, dataStreamMetadata); + assert assertDataStreams(indicesMap, dataStreamMetadata); - SortedMap indicesLookup; + SortedMap indicesLookup = null; if (previousIndicesLookup != null) { assert previousIndicesLookup.equals(buildIndicesLookup(dataStreamMetadata, indicesMap)); indicesLookup = previousIndicesLookup; - } else { - if (builtIndicesLookupEagerly) { - indicesLookup = buildIndicesLookup(dataStreamMetadata, indicesMap); - } else { - indicesLookup = null; - } } purgeUnusedEntries(indicesMap); @@ -1715,6 +1800,7 @@ public Metadata build(boolean builtIndicesLookupEagerly) { totalNumberOfShards, totalOpenIndexShards, indicesMap, + aliasedIndices, templates.build(), customs.build(), allIndicesArray, @@ -1904,32 +1990,108 @@ static SortedMap buildIndicesLookup( assert existing == null : "duplicate for " + entry.getKey(); } - validateDataStreams(indicesLookup, dataStreamMetadata); return Collections.unmodifiableSortedMap(indicesLookup); } - static void validateDataStreams(SortedMap indicesLookup, @Nullable DataStreamMetadata dsMetadata) { + private static boolean isNonEmpty(List idxMetas) { + return (Objects.isNull(idxMetas) || idxMetas.isEmpty()) == false; + } + + private static void validateAlias(String aliasName, List indexMetadatas) { + // Validate write indices + List writeIndices = indexMetadatas.stream() + .filter(idxMeta -> Boolean.TRUE.equals(idxMeta.getAliases().get(aliasName).writeIndex())) + .map(im -> im.getIndex().getName()) + .collect(Collectors.toList()); + if (writeIndices.size() > 1) { + throw new IllegalStateException( + "alias [" + + aliasName + + "] has more than one write index [" + + Strings.collectionToCommaDelimitedString(writeIndices) + + "]" + ); + } + + // Validate hidden status + final Map> groupedByHiddenStatus = indexMetadatas.stream() + .collect(Collectors.groupingBy(idxMeta -> Boolean.TRUE.equals(idxMeta.getAliases().get(aliasName).isHidden()))); + if (isNonEmpty(groupedByHiddenStatus.get(true)) && isNonEmpty(groupedByHiddenStatus.get(false))) { + List hiddenOn = groupedByHiddenStatus.get(true) + .stream() + .map(idx -> idx.getIndex().getName()) + .collect(Collectors.toList()); + List nonHiddenOn = groupedByHiddenStatus.get(false) + .stream() + .map(idx -> idx.getIndex().getName()) + .collect(Collectors.toList()); + throw new IllegalStateException( + "alias [" + + aliasName + + "] has is_hidden set to true on indices [" + + Strings.collectionToCommaDelimitedString(hiddenOn) + + "] but does not have is_hidden set to true on indices [" + + Strings.collectionToCommaDelimitedString(nonHiddenOn) + + "]; alias must have the same is_hidden setting " + + "on all indices" + ); + } + + // Validate system status + final Map> groupedBySystemStatus = indexMetadatas.stream() + .collect(Collectors.groupingBy(IndexMetadata::isSystem)); + // If the alias has either all system or all non-system, then no more validation is required + if (isNonEmpty(groupedBySystemStatus.get(false)) && isNonEmpty(groupedBySystemStatus.get(true))) { + final List newVersionSystemIndices = groupedBySystemStatus.get(true) + .stream() + .filter(i -> i.getCreationVersion().onOrAfter(IndexNameExpressionResolver.SYSTEM_INDEX_ENFORCEMENT_VERSION)) + .map(i -> i.getIndex().getName()) + .sorted() // reliable error message for testing + .collect(Collectors.toList()); + + if (newVersionSystemIndices.isEmpty() == false) { + final List nonSystemIndices = groupedBySystemStatus.get(false) + .stream() + .map(i -> i.getIndex().getName()) + .sorted() // reliable error message for testing + .collect(Collectors.toList()); + throw new IllegalStateException( + "alias [" + + aliasName + + "] refers to both system indices " + + newVersionSystemIndices + + " and non-system indices: " + + nonSystemIndices + + ", but aliases must refer to either system or" + + " non-system indices, not both" + ); + } + } + } + + static boolean assertDataStreams(ImmutableOpenMap indices, @Nullable DataStreamMetadata dsMetadata) { if (dsMetadata != null) { // Sanity check, because elsewhere a more user friendly error should have occurred: - List conflictingAliases = indicesLookup.values() - .stream() - .filter(ia -> ia.getType() == IndexAbstraction.Type.ALIAS) - .filter(ia -> ia.isDataStreamRelated() == false) - .filter(ia -> { - for (Index index : ia.getIndices()) { - if (indicesLookup.get(index.getName()).getParentDataStream() != null) { - return true; + List conflictingAliases = null; + + for (var dataStream : dsMetadata.dataStreams().values()) { + for (var index : dataStream.getIndices()) { + IndexMetadata im = indices.get(index.getName()); + if (im != null && im.getAliases().isEmpty() == false) { + for (var alias : im.getAliases().values()) { + if (conflictingAliases == null) { + conflictingAliases = new LinkedList<>(); + } + conflictingAliases.add(alias.alias()); } } - - return false; - }) - .map(IndexAbstraction::getName) - .collect(Collectors.toList()); - if (conflictingAliases.isEmpty() == false) { - throw new IllegalStateException("aliases " + conflictingAliases + " cannot refer to backing indices of data streams"); + } + } + if (conflictingAliases != null) { + throw new AssertionError("aliases " + conflictingAliases + " cannot refer to backing indices of data streams"); } } + return true; } public static void toXContent(Metadata metadata, XContentBuilder builder, ToXContent.Params params) throws IOException { @@ -2128,5 +2290,4 @@ public Metadata fromXContent(XContentParser parser) throws IOException { return Builder.fromXContent(parser); } }; - } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/IndexMetadataUpdater.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/IndexMetadataUpdater.java index 05a0286c26f38..7f90ffc46712c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/IndexMetadataUpdater.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/IndexMetadataUpdater.java @@ -131,7 +131,7 @@ public Metadata applyChanges(Metadata oldMetadata, RoutingTable newRoutingTable) if (metadataBuilder != null) { Metadata newMetadata = metadataBuilder.build(); - assert oldMetadata.getIndicesLookup() == newMetadata.getIndicesLookup(); + assert oldMetadata.sameIndicesLookup(newMetadata); return newMetadata; } else { return oldMetadata; diff --git a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java index 7b559f9cce1bc..676b5eb937225 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -446,7 +446,6 @@ private ClusterState patchVersions(ClusterState previousClusterState, ClusterTas if (previousClusterState != newClusterState) { // only the master controls the version numbers - final var previousIndicesLookup = newClusterState.metadata().getIndicesLookup(); Builder builder = incrementVersion(newClusterState); if (previousClusterState.routingTable() != newClusterState.routingTable()) { builder.routingTable(newClusterState.routingTable().withIncrementedVersion()); @@ -455,8 +454,9 @@ private ClusterState patchVersions(ClusterState previousClusterState, ClusterTas builder.metadata(newClusterState.metadata().withIncrementedVersion()); } + final var previousMetadata = newClusterState.metadata(); newClusterState = builder.build(); - assert previousIndicesLookup == newClusterState.metadata().getIndicesLookup(); + assert previousMetadata.sameIndicesLookup(newClusterState.metadata()); } return newClusterState; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 939fda916d40b..4db0885f37c0f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -95,6 +95,7 @@ import static org.elasticsearch.cluster.metadata.MetadataCreateIndexService.resolveAndValidateAliases; import static org.elasticsearch.index.IndexSettings.INDEX_SOFT_DELETES_SETTING; import static org.elasticsearch.indices.ShardLimitValidatorTests.createTestShardLimitService; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; @@ -1019,6 +1020,14 @@ public void testClusterStateCreateIndex() { assertThat(updatedClusterState.blocks().getIndexBlockWithId("test", INDEX_READ_ONLY_BLOCK.id()), is(INDEX_READ_ONLY_BLOCK)); assertThat(updatedClusterState.routingTable().index("test"), is(notNullValue())); assertThat(allocationRerouted.get(), is(true)); + + Metadata metadata = updatedClusterState.metadata(); + IndexAbstraction alias = metadata.getIndicesLookup().get("alias1"); + assertNotNull(alias); + assertThat(alias.getType(), equalTo(IndexAbstraction.Type.ALIAS)); + Index index = metadata.index("test").getIndex(); + assertThat(alias.getIndices(), contains(index)); + assertThat(metadata.aliasedIndices("alias1"), contains(index)); } public void testClusterStateCreateIndexWithMetadataTransaction() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java index d9fd497cb27e1..d704ef141cc60 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java @@ -37,6 +37,7 @@ import java.util.stream.IntStream; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -143,6 +144,7 @@ public void testDeleteIndexWithAnAlias() { assertNull(after.routingTable().index(index)); assertNull(after.blocks().indices().get(index)); assertNull(after.metadata().getIndicesLookup().get(alias)); + assertThat(after.metadata().aliasedIndices(alias), empty()); } public void testDeleteBackingIndexForDataStream() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java index d88f0a8b3530c..da49f58f8a3ae 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataIndexAliasesServiceTests.java @@ -33,6 +33,7 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; @@ -77,6 +78,7 @@ public void testAddAndRemove() { assertThat(alias.getType(), equalTo(IndexAbstraction.Type.ALIAS)); assertThat(alias.getIndices(), contains(after.metadata().index(index).getIndex())); assertAliasesVersionIncreased(index, before, after); + assertThat(after.metadata().aliasedIndices("test"), contains(after.metadata().index(index).getIndex())); // Remove the alias from it while adding another one before = after; @@ -85,17 +87,21 @@ public void testAddAndRemove() { Arrays.asList(new AliasAction.Remove(index, "test", null), new AliasAction.Add(index, "test_2", null, null, null, null, null)) ); assertNull(after.metadata().getIndicesLookup().get("test")); + assertThat(after.metadata().aliasedIndices("test"), empty()); alias = after.metadata().getIndicesLookup().get("test_2"); assertNotNull(alias); assertThat(alias.getType(), equalTo(IndexAbstraction.Type.ALIAS)); assertThat(alias.getIndices(), contains(after.metadata().index(index).getIndex())); assertAliasesVersionIncreased(index, before, after); + assertThat(after.metadata().aliasedIndices("test_2"), contains(after.metadata().index(index).getIndex())); // Now just remove on its own before = after; after = service.applyAliasActions(before, singletonList(new AliasAction.Remove(index, "test_2", randomBoolean()))); assertNull(after.metadata().getIndicesLookup().get("test")); + assertThat(after.metadata().aliasedIndices("test"), empty()); assertNull(after.metadata().getIndicesLookup().get("test_2")); + assertThat(after.metadata().aliasedIndices("test_2"), empty()); assertAliasesVersionIncreased(index, before, after); } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java index aa1b4c4f4ba6b..f12b325c535bd 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.cluster.ClusterModule; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.coordination.CoordinationMetadata; import org.elasticsearch.cluster.coordination.CoordinationMetadata.VotingConfigExclusion; @@ -52,15 +53,15 @@ import java.util.Objects; import java.util.Set; import java.util.SortedMap; -import java.util.TreeMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; +import java.util.stream.IntStream; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createBackingIndex; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createFirstBackingIndex; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.createTimestampField; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.newInstance; -import static org.elasticsearch.cluster.metadata.Metadata.Builder.validateDataStreams; +import static org.elasticsearch.cluster.metadata.Metadata.Builder.assertDataStreams; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -1178,7 +1179,7 @@ public void testBuilderRejectsAliasThatRefersToDataStreamBackingIndex() { .put(idx, false) .put(newInstance(dataStreamName, createTimestampField("@timestamp"), List.of(idx.getIndex()))); - IllegalStateException e = expectThrows(IllegalStateException.class, b::build); + AssertionError e = expectThrows(AssertionError.class, b::build); assertThat(e.getMessage(), containsString("aliases [" + conflictingName + "] cannot refer to backing indices of data streams")); } @@ -1399,7 +1400,7 @@ public void testSerialization() throws IOException { public void testValidateDataStreamsNoConflicts() { Metadata metadata = createIndices(5, 10, "foo-datastream").metadata; // don't expect any exception when validating a system without indices that would conflict with future backing indices - validateDataStreams(metadata.getIndicesLookup(), (DataStreamMetadata) metadata.customs().get(DataStreamMetadata.TYPE)); + assertDataStreams(metadata.getIndices(), (DataStreamMetadata) metadata.customs().get(DataStreamMetadata.TYPE)); } public void testValidateDataStreamsIgnoresIndicesWithoutCounter() { @@ -1423,7 +1424,7 @@ public void testValidateDataStreamsIgnoresIndicesWithoutCounter() { .build(); // don't expect any exception when validating against non-backing indices that don't conform to the backing indices naming // convention - validateDataStreams(metadata.getIndicesLookup(), (DataStreamMetadata) metadata.customs().get(DataStreamMetadata.TYPE)); + assertDataStreams(metadata.getIndices(), (DataStreamMetadata) metadata.customs().get(DataStreamMetadata.TYPE)); } public void testValidateDataStreamsAllowsNamesThatStartsWithPrefix() { @@ -1437,51 +1438,7 @@ public void testValidateDataStreamsAllowsNamesThatStartsWithPrefix() { .build(); // don't expect any exception when validating against (potentially backing) indices that can't create conflict because of // additional text before number - validateDataStreams(metadata.getIndicesLookup(), (DataStreamMetadata) metadata.customs().get(DataStreamMetadata.TYPE)); - } - - public void testValidateDataStreamsAllowsPrefixedBackingIndices() { - String dataStreamName = "foo-datastream"; - int generations = 10; - List backingIndices = new ArrayList<>(generations); - for (int i = 1; i <= generations; i++) { - IndexMetadata idx; - if (i % 2 == 0 && i < generations) { - idx = IndexMetadata.builder("shrink-" + DataStream.getDefaultBackingIndexName(dataStreamName, i)) - .settings(ESTestCase.settings(Version.CURRENT).put("index.hidden", true)) - .numberOfShards(1) - .numberOfReplicas(1) - .build(); - } else { - idx = createBackingIndex(dataStreamName, i).build(); - } - backingIndices.add(idx); - } - DataStream dataStream = newInstance( - dataStreamName, - createTimestampField("@timestamp"), - backingIndices.stream().map(IndexMetadata::getIndex).collect(Collectors.toList()) - ); - - IndexAbstraction.DataStream dataStreamAbstraction = new IndexAbstraction.DataStream(dataStream, List.of()); - // manually building the indices lookup as going through Metadata.Builder#build would trigger the validate method already - SortedMap indicesLookup = new TreeMap<>(); - for (IndexMetadata indexMeta : backingIndices) { - indicesLookup.put(indexMeta.getIndex().getName(), new IndexAbstraction.ConcreteIndex(indexMeta, dataStreamAbstraction)); - } - - for (int i = 1; i <= generations; i++) { - // for the indices that we added in the data stream with a "shrink-" prefix, add the non-prefixed indices to the lookup - if (i % 2 == 0 && i < generations) { - IndexMetadata indexMeta = createBackingIndex(dataStreamName, i).build(); - indicesLookup.put(indexMeta.getIndex().getName(), new IndexAbstraction.ConcreteIndex(indexMeta, dataStreamAbstraction)); - } - } - DataStreamMetadata dataStreamMetadata = new DataStreamMetadata(Map.of(dataStreamName, dataStream), Map.of()); - - // prefixed indices with a lower generation than the data stream's generation are allowed even if the non-prefixed, matching the - // data stream backing indices naming pattern, indices are already in the system - validateDataStreams(indicesLookup, dataStreamMetadata); + assertDataStreams(metadata.getIndices(), (DataStreamMetadata) metadata.customs().get(DataStreamMetadata.TYPE)); } public void testValidateDataStreamsForNullDataStreamMetadata() { @@ -1490,7 +1447,7 @@ public void testValidateDataStreamsForNullDataStreamMetadata() { .build(); try { - validateDataStreams(metadata.getIndicesLookup(), null); + assertDataStreams(metadata.getIndices(), null); } catch (Exception e) { fail("did not expect exception when validating a system without any data streams but got " + e.getMessage()); } @@ -1895,6 +1852,114 @@ public void testReuseIndicesLookup() { } } + public void testAliasedIndices() { + int numAliases = randomIntBetween(32, 64); + int numIndicesPerAlias = randomIntBetween(8, 16); + + Metadata.Builder builder = Metadata.builder(); + for (int i = 0; i < numAliases; i++) { + String aliasName = "alias-" + i; + for (int j = 0; j < numIndicesPerAlias; j++) { + AliasMetadata.Builder alias = new AliasMetadata.Builder(aliasName); + if (j == 0) { + alias.writeIndex(true); + } + + String indexName = aliasName + "-" + j; + builder.put( + IndexMetadata.builder(indexName) + .settings(settings(Version.CURRENT)) + .creationDate(randomNonNegativeLong()) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(alias) + ); + } + } + + Metadata metadata = builder.build(); + for (int i = 0; i < numAliases; i++) { + String aliasName = "alias-" + i; + Set result = metadata.aliasedIndices(aliasName); + Index[] expected = IntStream.range(0, numIndicesPerAlias) + .mapToObj(j -> aliasName + "-" + j) + .map(name -> new Index(name, ClusterState.UNKNOWN_UUID)) + .toArray(Index[]::new); + assertThat(result, containsInAnyOrder(expected)); + } + + // Add a new alias and index + builder = Metadata.builder(metadata); + String newAliasName = "alias-new"; + { + builder.put( + IndexMetadata.builder(newAliasName + "-1") + .settings(settings(Version.CURRENT)) + .creationDate(randomNonNegativeLong()) + .numberOfShards(1) + .numberOfReplicas(0) + .putAlias(new AliasMetadata.Builder(newAliasName).writeIndex(true)) + ); + } + metadata = builder.build(); + assertThat(metadata.aliasedIndices(), hasSize(numAliases + 1)); + assertThat(metadata.aliasedIndices(newAliasName), contains(new Index(newAliasName + "-1", ClusterState.UNKNOWN_UUID))); + + // Remove the new alias/index + builder = Metadata.builder(metadata); + { + builder.remove(newAliasName + "-1"); + } + metadata = builder.build(); + assertThat(metadata.aliasedIndices(), hasSize(numAliases)); + assertThat(metadata.aliasedIndices(newAliasName), empty()); + + // Add a new alias that points to existing indices + builder = Metadata.builder(metadata); + { + IndexMetadata.Builder imBuilder = new IndexMetadata.Builder(metadata.index("alias-1-0")); + imBuilder.putAlias(new AliasMetadata.Builder(newAliasName)); + builder.put(imBuilder); + + imBuilder = new IndexMetadata.Builder(metadata.index("alias-2-1")); + imBuilder.putAlias(new AliasMetadata.Builder(newAliasName)); + builder.put(imBuilder); + + imBuilder = new IndexMetadata.Builder(metadata.index("alias-3-2")); + imBuilder.putAlias(new AliasMetadata.Builder(newAliasName)); + builder.put(imBuilder); + } + metadata = builder.build(); + assertThat(metadata.aliasedIndices(), hasSize(numAliases + 1)); + assertThat( + metadata.aliasedIndices(newAliasName), + containsInAnyOrder( + new Index("alias-1-0", ClusterState.UNKNOWN_UUID), + new Index("alias-2-1", ClusterState.UNKNOWN_UUID), + new Index("alias-3-2", ClusterState.UNKNOWN_UUID) + ) + ); + + // Remove the new alias that points to existing indices + builder = Metadata.builder(metadata); + { + IndexMetadata.Builder imBuilder = new IndexMetadata.Builder(metadata.index("alias-1-0")); + imBuilder.removeAlias(newAliasName); + builder.put(imBuilder); + + imBuilder = new IndexMetadata.Builder(metadata.index("alias-2-1")); + imBuilder.removeAlias(newAliasName); + builder.put(imBuilder); + + imBuilder = new IndexMetadata.Builder(metadata.index("alias-3-2")); + imBuilder.removeAlias(newAliasName); + builder.put(imBuilder); + } + metadata = builder.build(); + assertThat(metadata.aliasedIndices(), hasSize(numAliases)); + assertThat(metadata.aliasedIndices(newAliasName), empty()); + } + public static final String SYSTEM_ALIAS_NAME = "system_alias"; public void testHiddenAliasValidation() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CopyExecutionStateStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CopyExecutionStateStep.java index bd95c0c64a9e1..b12b4c033e5a9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CopyExecutionStateStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CopyExecutionStateStep.java @@ -97,7 +97,7 @@ public ClusterState performAction(Index index, ClusterState clusterState) { Metadata.Builder newMetadata = Metadata.builder(clusterState.getMetadata()) .put(IndexMetadata.builder(targetIndexMetadata).putCustom(ILM_CUSTOM_METADATA_KEY, relevantTargetCustomData.build().asMap())); - return ClusterState.builder(clusterState).metadata(newMetadata.build(false)).build(); + return ClusterState.builder(clusterState).metadata(newMetadata).build(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CopySettingsStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CopySettingsStep.java index 0be46f8c0bf4b..e9cbf4c29408b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CopySettingsStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CopySettingsStep.java @@ -92,7 +92,7 @@ public ClusterState performAction(Index index, ClusterState clusterState) { .put( IndexMetadata.builder(targetIndexMetadata).settingsVersion(targetIndexMetadata.getSettingsVersion() + 1).settings(settings) ); - return ClusterState.builder(clusterState).metadata(newMetaData.build(false)).build(); + return ClusterState.builder(clusterState).metadata(newMetaData).build(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java index d71ba55806ee7..cd75de3a0fcdf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateSnapshotNameStep.java @@ -103,7 +103,6 @@ public ClusterState performAction(Index index, ClusterState clusterState) { .metadata( Metadata.builder(clusterState.getMetadata()) .put(IndexMetadata.builder(indexMetadata).putCustom(ILM_CUSTOM_METADATA_KEY, newCustomData.build().asMap())) - .build(false) ) .build(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateUniqueIndexNameStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateUniqueIndexNameStep.java index 96a8c6f8471a3..b01d55982605b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateUniqueIndexNameStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/GenerateUniqueIndexNameStep.java @@ -98,7 +98,7 @@ public ClusterState performAction(Index index, ClusterState clusterState) { IndexMetadata.Builder indexMetadataBuilder = IndexMetadata.builder(indexMetadata); indexMetadataBuilder.putCustom(ILM_CUSTOM_METADATA_KEY, newCustomData.build().asMap()); - newClusterStateBuilder.metadata(Metadata.builder(clusterState.getMetadata()).put(indexMetadataBuilder).build(false)); + newClusterStateBuilder.metadata(Metadata.builder(clusterState.getMetadata()).put(indexMetadataBuilder)); return newClusterStateBuilder.build(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/InitializePolicyContextStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/InitializePolicyContextStep.java index 7f9869d93c0d0..07e699fbe26a8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/InitializePolicyContextStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/InitializePolicyContextStep.java @@ -69,7 +69,7 @@ public ClusterState performAction(Index index, ClusterState clusterState) { newCustomData.setIndexCreationDate(indexMetadata.getCreationDate()); indexMetadataBuilder.putCustom(ILM_CUSTOM_METADATA_KEY, newCustomData.build().asMap()); - newClusterStateBuilder.metadata(Metadata.builder(clusterState.getMetadata()).put(indexMetadataBuilder).build(false)); + newClusterStateBuilder.metadata(Metadata.builder(clusterState.getMetadata()).put(indexMetadataBuilder)); return newClusterStateBuilder.build(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/PhaseCacheManagement.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/PhaseCacheManagement.java index 8d6479182d5fc..2944f8516f0db 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/PhaseCacheManagement.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/PhaseCacheManagement.java @@ -54,7 +54,7 @@ public static ClusterState refreshPhaseDefinition( final IndexMetadata idxMeta = state.metadata().index(index); Metadata.Builder metadataBuilder = Metadata.builder(state.metadata()); refreshPhaseDefinition(metadataBuilder, idxMeta, updatedPolicy); - return ClusterState.builder(state).metadata(metadataBuilder.build(false)).build(); + return ClusterState.builder(state).metadata(metadataBuilder.build()).build(); } /** @@ -122,7 +122,7 @@ public static ClusterState updateIndicesForPolicy( ) { Metadata.Builder mb = Metadata.builder(state.metadata()); if (updateIndicesForPolicy(mb, state, xContentRegistry, client, oldPolicy, newPolicy, licenseState)) { - return ClusterState.builder(state).metadata(mb.build(false)).build(); + return ClusterState.builder(state).metadata(mb).build(); } return state; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStep.java index 84c2d074b82fc..1a4c58a91250b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStep.java @@ -115,7 +115,7 @@ public ClusterState performAction(Index index, ClusterState clusterState) { Metadata.Builder newMetaData = Metadata.builder(clusterState.getMetadata()) .put(dataStream.getDataStream().replaceBackingIndex(index, targetIndexMetadata.getIndex())); - return ClusterState.builder(clusterState).metadata(newMetaData.build(false)).build(); + return ClusterState.builder(clusterState).metadata(newMetaData).build(); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UpdateRolloverLifecycleDateStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UpdateRolloverLifecycleDateStep.java index 29ae815625437..793a3e9b6fe46 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UpdateRolloverLifecycleDateStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/UpdateRolloverLifecycleDateStep.java @@ -74,9 +74,7 @@ public ClusterState performAction(Index index, ClusterState currentState) { IndexMetadata.Builder newIndexMetadata = IndexMetadata.builder(indexMetadata); newIndexMetadata.putCustom(ILM_CUSTOM_METADATA_KEY, newLifecycleState.build().asMap()); - return ClusterState.builder(currentState) - .metadata(Metadata.builder(currentState.metadata()).put(newIndexMetadata).build(false)) - .build(); + return ClusterState.builder(currentState).metadata(Metadata.builder(currentState.metadata()).put(newIndexMetadata)).build(); } private static String getRolloverTarget(Index index, ClusterState currentState) { diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunner.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunner.java index ed82a16fe237e..c66618185245f 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunner.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleRunner.java @@ -66,8 +66,6 @@ class IndexLifecycleRunner { builder.failure(task, e); } } - // Trigger indices lookup creation and related validation - state.metadata().getIndicesLookup(); return builder.build(state); }; diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransition.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransition.java index bf81d3af7c02c..6c91c78d3644b 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransition.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/IndexLifecycleTransition.java @@ -430,7 +430,6 @@ public static ClusterState.Builder newClusterStateWithLifecycleState( IndexMetadata.builder(clusterState.getMetadata().index(index)) .putCustom(ILM_CUSTOM_METADATA_KEY, lifecycleState.asMap()) ) - .build(false) ); return newClusterStateBuilder; } From 6b1f74821409fd4a86d91c2c8bd56bd768d1401d Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Fri, 11 Feb 2022 10:43:06 +0100 Subject: [PATCH 058/167] repository integrity health indicator services (#83445) Adding implementations that will check snapshot repository corruption flag and report health based on it. Co-authored-by: Tanguy Leroux --- docs/changelog/83445.yaml | 5 + ...IntegrityHealthIndicatorServiceTestIT.java | 81 +++++++++++++ .../common/util/CollectionUtils.java | 3 + .../health/HealthIndicatorDetails.java | 2 +- .../java/org/elasticsearch/node/Node.java | 4 +- ...sitoryIntegrityHealthIndicatorService.java | 103 +++++++++++++++++ .../common/util/CollectionUtilsTests.java | 11 ++ ...yIntegrityHealthIndicatorServiceTests.java | 109 ++++++++++++++++++ .../AbstractSnapshotIntegTestCase.java | 4 + 9 files changed, 320 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/83445.yaml create mode 100644 server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTestIT.java create mode 100644 server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java create mode 100644 server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java diff --git a/docs/changelog/83445.yaml b/docs/changelog/83445.yaml new file mode 100644 index 0000000000000..8e6fcbfe6e2cf --- /dev/null +++ b/docs/changelog/83445.yaml @@ -0,0 +1,5 @@ +pr: 83445 +summary: Repository integrity health indicator services +area: Health +type: enhancement +issues: [] diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTestIT.java new file mode 100644 index 0000000000000..ae2fc82c75273 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTestIT.java @@ -0,0 +1,81 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.snapshots; + +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.health.GetHealthAction; +import org.elasticsearch.health.HealthStatus; +import org.elasticsearch.repositories.RepositoryData; +import org.elasticsearch.repositories.RepositoryException; +import org.elasticsearch.repositories.blobstore.BlobStoreRepository; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; + +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.RED; +import static org.elasticsearch.health.ServerHealthComponents.SNAPSHOT; +import static org.elasticsearch.snapshots.RepositoryIntegrityHealthIndicatorService.NAME; +import static org.elasticsearch.test.hamcrest.ThrowableAssertions.assertThatThrows; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; + +public class RepositoryIntegrityHealthIndicatorServiceTestIT extends AbstractSnapshotIntegTestCase { + + public void testRepositoryIntegrityHealthIndicator() throws IOException, InterruptedException { + + var client = client(); + + var repository = "test-repo"; + var location = randomRepoPath(); + + createRepository( + repository, + "fs", + Settings.builder() + .put("location", location) + .put("compress", false) + // Don't cache repository data because the test manually modifies the repository data + .put(BlobStoreRepository.CACHE_REPOSITORY_DATA.getKey(), false) + ); + + assertSnapshotRepositoryHealth("Indicator should be green after empty repository is created", client, GREEN); + + createIndex("test-index-1"); + indexRandomDocs("test-index-1", randomIntBetween(1, 10)); + createFullSnapshot(repository, "snapshot-1"); + + assertSnapshotRepositoryHealth("Indicator should be green after successful snapshot is taken", client, GREEN); + + corruptRepository(repository, location); + // Currently, the health indicator is not proactively checking the repository and + // instead relies on other operations to detect and flag repository corruption + assertThatThrows( + () -> createFullSnapshot(repository, "snapshot-2"), + RepositoryException.class, + containsString("[" + repository + "] Could not read repository data") + ); + + assertSnapshotRepositoryHealth("Indicator should be red after file is deleted from the repository", client, RED); + + deleteRepository(repository); + } + + private void assertSnapshotRepositoryHealth(String message, Client client, HealthStatus status) { + var response = client.execute(GetHealthAction.INSTANCE, new GetHealthAction.Request()).actionGet(); + assertThat(message, response.findComponent(SNAPSHOT).findIndicator(NAME).status(), equalTo(status)); + } + + private void corruptRepository(String name, Path location) throws IOException { + final RepositoryData repositoryData = getRepositoryData(name); + Files.delete(location.resolve("index-" + repositoryData.getGenId())); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/util/CollectionUtils.java b/server/src/main/java/org/elasticsearch/common/util/CollectionUtils.java index 54954c80665e2..1dfc2398b5270 100644 --- a/server/src/main/java/org/elasticsearch/common/util/CollectionUtils.java +++ b/server/src/main/java/org/elasticsearch/common/util/CollectionUtils.java @@ -346,4 +346,7 @@ public static List wrapUnmodifiableOrEmptySingleton(List list) { return list.isEmpty() ? List.of() : Collections.unmodifiableList(list); } + public static List limitSize(List list, int size) { + return list.size() <= size ? list : list.subList(0, size); + } } diff --git a/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java b/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java index e085b9981e496..1817012cd6640 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java +++ b/server/src/main/java/org/elasticsearch/health/HealthIndicatorDetails.java @@ -18,7 +18,7 @@ public interface HealthIndicatorDetails extends ToXContentObject { HealthIndicatorDetails EMPTY = new HealthIndicatorDetails() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder; + return builder.startObject().endObject(); } }; } diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 3c4d699160799..e86ebc4494b11 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -167,6 +167,7 @@ import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.shutdown.PluginShutdownService; import org.elasticsearch.snapshots.InternalSnapshotsInfoService; +import org.elasticsearch.snapshots.RepositoryIntegrityHealthIndicatorService; import org.elasticsearch.snapshots.RestoreService; import org.elasticsearch.snapshots.SnapshotShardsService; import org.elasticsearch.snapshots.SnapshotsInfoService; @@ -901,7 +902,8 @@ protected Node( ); List serverHealthIndicatorServices = List.of( - new InstanceHasMasterHealthIndicatorService(clusterService) + new InstanceHasMasterHealthIndicatorService(clusterService), + new RepositoryIntegrityHealthIndicatorService(clusterService) ); List pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) .stream() diff --git a/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java new file mode 100644 index 0000000000000..d744dc215b3ac --- /dev/null +++ b/server/src/main/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorService.java @@ -0,0 +1,103 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.snapshots; + +import org.elasticsearch.cluster.metadata.RepositoriesMetadata; +import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.health.HealthIndicatorDetails; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.HealthIndicatorService; +import org.elasticsearch.health.SimpleHealthIndicatorDetails; +import org.elasticsearch.repositories.RepositoryData; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.common.Strings.collectionToDelimitedStringWithLimit; +import static org.elasticsearch.common.util.CollectionUtils.limitSize; +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.RED; +import static org.elasticsearch.health.ServerHealthComponents.SNAPSHOT; + +/** + * This indicator reports health for snapshot repositories. + * + * Indicator will report RED status when any of snapshot repositories is marked as corrupted. + * Data might not be backed up in such cases. + * + * Corrupted repository most likely need to be manually cleaned and a new snapshot needs to be created from scratch. + */ +public class RepositoryIntegrityHealthIndicatorService implements HealthIndicatorService { + + public static final String NAME = "repository_integrity"; + + private final ClusterService clusterService; + + public RepositoryIntegrityHealthIndicatorService(ClusterService clusterService) { + this.clusterService = clusterService; + } + + @Override + public String name() { + return NAME; + } + + @Override + public String component() { + return SNAPSHOT; + } + + @Override + public HealthIndicatorResult calculate() { + var snapshotMetadata = clusterService.state().metadata().custom(RepositoriesMetadata.TYPE, RepositoriesMetadata.EMPTY); + + if (snapshotMetadata.repositories().isEmpty()) { + return createIndicator(GREEN, "No repositories configured.", HealthIndicatorDetails.EMPTY); + } + + var corrupted = snapshotMetadata.repositories() + .stream() + .filter(repository -> repository.generation() == RepositoryData.CORRUPTED_REPO_GEN) + .map(RepositoryMetadata::name) + .toList(); + + var totalRepositories = snapshotMetadata.repositories().size(); + var corruptedRepositories = corrupted.size(); + + if (corrupted.isEmpty()) { + return createIndicator( + GREEN, + "No corrupted repositories.", + new SimpleHealthIndicatorDetails(Map.of("total_repositories", totalRepositories)) + ); + } + + return createIndicator( + RED, + createCorruptedRepositorySummary(corrupted), + new SimpleHealthIndicatorDetails( + Map.of( + "total_repositories", + totalRepositories, + "corrupted_repositories", + corruptedRepositories, + "corrupted", + limitSize(corrupted, 10) + ) + ) + ); + } + + private static String createCorruptedRepositorySummary(List corrupted) { + var message = new StringBuilder().append("Detected [").append(corrupted.size()).append("] corrupted repositories: "); + collectionToDelimitedStringWithLimit(corrupted, ",", "[", "].", 1024, message); + return message.toString(); + } +} diff --git a/server/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java b/server/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java index 18af051fa70f1..8ab7d03fec62f 100644 --- a/server/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/CollectionUtilsTests.java @@ -27,6 +27,7 @@ import static java.util.Collections.emptyMap; import static org.elasticsearch.common.util.CollectionUtils.eagerPartition; +import static org.elasticsearch.common.util.CollectionUtils.limitSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -177,4 +178,14 @@ public void testEnsureNoSelfReferences() { } } + + public void testLimitSizeOfShortList() { + var shortList = randomList(0, 10, () -> "item"); + assertThat(limitSize(shortList, 10), equalTo(shortList)); + } + + public void testLimitSizeOfLongList() { + var longList = randomList(10, 100, () -> "item"); + assertThat(limitSize(longList, 10), equalTo(longList.subList(0, 10))); + } } diff --git a/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java new file mode 100644 index 0000000000000..7c7d5f6395f43 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/snapshots/RepositoryIntegrityHealthIndicatorServiceTests.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.snapshots; + +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.RepositoriesMetadata; +import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.health.HealthIndicatorDetails; +import org.elasticsearch.health.HealthIndicatorResult; +import org.elasticsearch.health.SimpleHealthIndicatorDetails; +import org.elasticsearch.test.ESTestCase; + +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.common.util.CollectionUtils.appendToCopy; +import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.RED; +import static org.elasticsearch.health.ServerHealthComponents.SNAPSHOT; +import static org.elasticsearch.repositories.RepositoryData.CORRUPTED_REPO_GEN; +import static org.elasticsearch.repositories.RepositoryData.EMPTY_REPO_GEN; +import static org.elasticsearch.snapshots.RepositoryIntegrityHealthIndicatorService.NAME; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class RepositoryIntegrityHealthIndicatorServiceTests extends ESTestCase { + + public void testIsGreenWhenAllRepositoriesAreNotCorrupted() { + var repos = randomList(1, 10, () -> createRepositoryMetadata("healthy-repo", false)); + var clusterState = createClusterStateWith(new RepositoriesMetadata(repos)); + var service = createRepositoryCorruptionHealthIndicatorService(clusterState); + + assertThat( + service.calculate(), + equalTo( + new HealthIndicatorResult( + NAME, + SNAPSHOT, + GREEN, + "No corrupted repositories.", + new SimpleHealthIndicatorDetails(Map.of("total_repositories", repos.size())) + ) + ) + ); + } + + public void testIsRedWhenAtLeastOneRepoIsCorrupted() { + var repos = appendToCopy( + randomList(1, 10, () -> createRepositoryMetadata("healthy-repo", false)), + createRepositoryMetadata("corrupted-repo", true) + ); + var clusterState = createClusterStateWith(new RepositoriesMetadata(repos)); + var service = createRepositoryCorruptionHealthIndicatorService(clusterState); + + assertThat( + service.calculate(), + equalTo( + new HealthIndicatorResult( + NAME, + SNAPSHOT, + RED, + "Detected [1] corrupted repositories: [corrupted-repo].", + new SimpleHealthIndicatorDetails( + Map.of("total_repositories", repos.size(), "corrupted_repositories", 1, "corrupted", List.of("corrupted-repo")) + ) + ) + ) + ); + } + + public void testIsGreenWhenNoMetadata() { + var clusterState = createClusterStateWith(null); + var service = createRepositoryCorruptionHealthIndicatorService(clusterState); + + assertThat( + service.calculate(), + equalTo(new HealthIndicatorResult(NAME, SNAPSHOT, GREEN, "No repositories configured.", HealthIndicatorDetails.EMPTY)) + ); + } + + private static ClusterState createClusterStateWith(RepositoriesMetadata metadata) { + var builder = ClusterState.builder(new ClusterName("test-cluster")); + if (metadata != null) { + builder.metadata(Metadata.builder().putCustom(RepositoriesMetadata.TYPE, metadata)); + } + return builder.build(); + } + + private static RepositoryMetadata createRepositoryMetadata(String name, boolean corrupted) { + return new RepositoryMetadata(name, "uuid", "s3", Settings.EMPTY, corrupted ? CORRUPTED_REPO_GEN : EMPTY_REPO_GEN, EMPTY_REPO_GEN); + } + + private static RepositoryIntegrityHealthIndicatorService createRepositoryCorruptionHealthIndicatorService(ClusterState clusterState) { + var clusterService = mock(ClusterService.class); + when(clusterService.state()).thenReturn(clusterState); + return new RepositoryIntegrityHealthIndicatorService(clusterService); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index faca8ead9f5e6..4c9e43900b2f0 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -334,6 +334,10 @@ public static void createRepository(Logger logger, String repoName, String type) createRepository(logger, repoName, type, randomRepositorySettings(), true); } + protected void deleteRepository(String repoName) { + assertAcked(client().admin().cluster().prepareDeleteRepository(repoName)); + } + public static Settings.Builder randomRepositorySettings() { final Settings.Builder settings = Settings.builder(); settings.put("location", randomRepoPath()).put("compress", randomBoolean()); From 586378b97d991acd649c9f6c9be881230e14656a Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 11 Feb 2022 10:10:14 +0000 Subject: [PATCH 059/167] Submit batches of joins as single tasks (#83803) Today the `MasterService` permits clients to submit a batch of tasks which it guarantees to execute together, but the only place that this functionality is used in production code is for completing an election. It was done this way so that each join could succeed or fail independently, but since #83562 we can track the status of joins through to completion without needing them all to be separate tasks. This commit introduces a `JoinTask` which represents the whole batch of joins as a single task. It also gives us a place to hang the strange `_FINISH_ELECTION_` task that was used to flag whether a batch was an election-completing batch or not. Relates #83784 --- .../cluster/coordination/JoinHelper.java | 66 +++------ .../cluster/coordination/JoinTask.java | 94 ++++++++++++ .../coordination/JoinTaskExecutor.java | 140 ++++++------------ .../coordination/JoinTaskExecutorTests.java | 4 +- .../metadata/AutoExpandReplicasTests.java | 2 +- .../indices/cluster/ClusterStateChanges.java | 46 +++--- ...ClusterStateServiceRandomUpdatesTests.java | 4 +- 7 files changed, 187 insertions(+), 169 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/cluster/coordination/JoinTask.java diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java index 7ee8a10fd7c37..0de30b9cd7c40 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskConfig; -import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.coordination.Coordinator.Mode; import org.elasticsearch.cluster.metadata.Metadata; @@ -104,8 +103,7 @@ public class JoinHelper { private final long term = currentTermSupplier.getAsLong(); @Override - public ClusterTasksResult execute(ClusterState currentState, List joiningTasks) - throws Exception { + public ClusterTasksResult execute(ClusterState currentState, List joinTasks) { // The current state that MasterService uses might have been updated by a (different) master in a higher term already // Stop processing the current cluster state update, as there's no point in continuing to compute it as // it will later be rejected by Coordinator.publish(...) anyhow @@ -114,7 +112,7 @@ public ClusterTasksResult execute(ClusterState currentSta throw new NotMasterException( "Higher term encountered (current: " + currentState.term() + " > used: " + term + "), there is a newer master" ); - } else if (currentState.nodes().getMasterNodeId() == null && joiningTasks.stream().anyMatch(Task::isBecomeMasterTask)) { + } else if (currentState.nodes().getMasterNodeId() == null && joinTasks.stream().anyMatch(JoinTask::isBecomingMaster)) { assert currentState.term() < term : "there should be at most one become master task per election (= by term)"; final CoordinationMetadata coordinationMetadata = CoordinationMetadata.builder(currentState.coordinationMetadata()) .term(term) @@ -124,7 +122,7 @@ public ClusterTasksResult execute(ClusterState currentSta } else if (currentState.nodes().isLocalNodeElectedMaster()) { assert currentState.term() == term : "term should be stable for the same master"; } - return super.execute(currentState, joiningTasks); + return super.execute(currentState, joinTasks); } }; @@ -293,7 +291,7 @@ public void sendJoinRequest(DiscoveryNode destination, long term, Optional // Typically we're already connected to the destination at this point, the PeerFinder holds a reference to this connection to // keep it open, but we need to acquire our own reference to keep the connection alive through the joining process. - transportService.connectToNode(destination, new ActionListener() { + transportService.connectToNode(destination, new ActionListener<>() { @Override public void onResponse(Releasable connectionReference) { logger.trace("acquired connection for joining join {} with {}", destination, joinRequest); @@ -361,31 +359,6 @@ public void handleException(TransportException exp) { }); } - static class JoinTaskListener implements ClusterStateTaskListener { - private final JoinTaskExecutor.Task task; - private final ActionListener joinListener; - - JoinTaskListener(JoinTaskExecutor.Task task, ActionListener joinListener) { - this.task = task; - this.joinListener = joinListener; - } - - @Override - public void onFailure(Exception e) { - joinListener.onFailure(e); - } - - @Override - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - joinListener.onResponse(null); - } - - @Override - public String toString() { - return "JoinTaskListener{task=" + task + "}"; - } - } - interface JoinAccumulator { void handleJoinRequest(DiscoveryNode sender, ActionListener joinListener); @@ -395,11 +368,7 @@ default void close(Mode newMode) {} class LeaderJoinAccumulator implements JoinAccumulator { @Override public void handleJoinRequest(DiscoveryNode sender, ActionListener joinListener) { - final JoinTaskExecutor.Task task = new JoinTaskExecutor.Task( - sender, - joinReasonService.getJoinReason(sender, Mode.LEADER), - joinListener - ); + final JoinTask task = JoinTask.singleNode(sender, joinReasonService.getJoinReason(sender, Mode.LEADER), joinListener); assert joinTaskExecutor != null; masterService.submitStateUpdateTask("node-join", task, ClusterStateTaskConfig.build(Priority.URGENT), joinTaskExecutor); } @@ -454,21 +423,20 @@ public void close(Mode newMode) { assert closed == false : "CandidateJoinAccumulator closed"; closed = true; if (newMode == Mode.LEADER) { - final List pendingAsTasks = new ArrayList<>(); - joinRequestAccumulator.forEach( - (node, listener) -> pendingAsTasks.add( - new JoinTaskExecutor.Task(node, joinReasonService.getJoinReason(node, Mode.CANDIDATE), listener) - ) - ); - - final String stateUpdateSource = "elected-as-master ([" + pendingAsTasks.size() + "] nodes joined)"; + final JoinTask joinTask = JoinTask.completingElection(joinRequestAccumulator.entrySet().stream().map(entry -> { + final DiscoveryNode discoveryNode = entry.getKey(); + final ActionListener listener = entry.getValue(); + return new JoinTask.NodeJoinTask( + discoveryNode, + joinReasonService.getJoinReason(discoveryNode, Mode.CANDIDATE), + listener + ); + })); - pendingAsTasks.add(JoinTaskExecutor.newBecomeMasterTask()); - pendingAsTasks.add(JoinTaskExecutor.newFinishElectionTask()); joinTaskExecutor = joinTaskExecutorGenerator.get(); - masterService.submitStateUpdateTasks( - stateUpdateSource, - pendingAsTasks, + masterService.submitStateUpdateTask( + "elected-as-master ([" + joinTask.nodeCount() + "] nodes joined)", + joinTask, ClusterStateTaskConfig.build(Priority.URGENT), joinTaskExecutor ); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTask.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTask.java new file mode 100644 index 0000000000000..142823d878446 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTask.java @@ -0,0 +1,94 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cluster.coordination; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.node.DiscoveryNode; + +import java.util.Collections; +import java.util.List; +import java.util.Objects; +import java.util.stream.Stream; + +public record JoinTask(List nodeJoinTasks, boolean isBecomingMaster) implements ClusterStateTaskListener { + + public static JoinTask singleNode(DiscoveryNode node, String reason, ActionListener listener) { + return new JoinTask(List.of(new NodeJoinTask(node, reason, listener)), false); + } + + public static JoinTask completingElection(Stream nodeJoinTaskStream) { + return new JoinTask(nodeJoinTaskStream.toList(), true); + } + + public JoinTask(List nodeJoinTasks, boolean isBecomingMaster) { + this.nodeJoinTasks = Collections.unmodifiableList(nodeJoinTasks); + this.isBecomingMaster = isBecomingMaster; + } + + public int nodeCount() { + return nodeJoinTasks.size(); + } + + @Override + public void onFailure(Exception e) { + for (NodeJoinTask nodeJoinTask : nodeJoinTasks) { + nodeJoinTask.listener.onFailure(e); + } + } + + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + assert false : "not called"; + } + + @Override + public String toString() { + final StringBuilder stringBuilder = new StringBuilder(); + + if (isBecomingMaster) { + stringBuilder.append("_FINISH_ELECTION_"); + } + + for (NodeJoinTask nodeJoinTask : nodeJoinTasks) { + if (stringBuilder.isEmpty() == false) { + stringBuilder.append(", "); + } + nodeJoinTask.appendDescription(stringBuilder); + } + + return stringBuilder.toString(); + } + + public Iterable nodes() { + return () -> nodeJoinTasks.stream().map(j -> j.node).iterator(); + } + + public record NodeJoinTask(DiscoveryNode node, String reason, ActionListener listener) { + + public NodeJoinTask(DiscoveryNode node, String reason, ActionListener listener) { + this.node = Objects.requireNonNull(node); + this.reason = reason; + this.listener = listener; + } + + @Override + public String toString() { + final StringBuilder stringBuilder = new StringBuilder(); + appendDescription(stringBuilder); + return stringBuilder.toString(); + } + + public void appendDescription(StringBuilder stringBuilder) { + node.appendDescriptionWithoutAttributes(stringBuilder); + stringBuilder.append(' ').append(reason); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java index 740dda3035666..2e54505cca556 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinTaskExecutor.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateTaskExecutor; -import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.NotMasterException; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -37,73 +36,33 @@ import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; -public class JoinTaskExecutor implements ClusterStateTaskExecutor { +public class JoinTaskExecutor implements ClusterStateTaskExecutor { private static final Logger logger = LogManager.getLogger(JoinTaskExecutor.class); private final AllocationService allocationService; private final RerouteService rerouteService; - public record Task(DiscoveryNode node, String reason, ActionListener listener) implements ClusterStateTaskListener { - - @Override - public String toString() { - if (node == null) { - return reason; - } - - final StringBuilder stringBuilder = new StringBuilder(); - node.appendDescriptionWithoutAttributes(stringBuilder); - stringBuilder.append(' ').append(reason); - return stringBuilder.toString(); - } - - public boolean isBecomeMasterTask() { - return reason.equals(BECOME_MASTER_TASK_REASON); - } - - public boolean isFinishElectionTask() { - return reason.equals(FINISH_ELECTION_TASK_REASON); - } - - private static final String BECOME_MASTER_TASK_REASON = "_BECOME_MASTER_TASK_"; - private static final String FINISH_ELECTION_TASK_REASON = "_FINISH_ELECTION_"; - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - - @Override - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - listener.onResponse(null); - } - - } - public JoinTaskExecutor(AllocationService allocationService, RerouteService rerouteService) { this.allocationService = allocationService; this.rerouteService = rerouteService; } @Override - public ClusterTasksResult execute(ClusterState currentState, List joiningNodes) throws Exception { - final ClusterTasksResult.Builder results = ClusterTasksResult.builder(); + public ClusterTasksResult execute(ClusterState currentState, List joinTasks) { + final ClusterTasksResult.Builder results = ClusterTasksResult.builder(); + + final boolean isBecomingMaster = joinTasks.stream().anyMatch(JoinTask::isBecomingMaster); final DiscoveryNodes currentNodes = currentState.nodes(); boolean nodesChanged = false; ClusterState.Builder newState; - if (joiningNodes.size() == 1 && joiningNodes.get(0).isFinishElectionTask()) { - final Task task = joiningNodes.get(0); - return results.success(task, new LegacyClusterTaskResultActionListener(task, currentState)).build(currentState); - } else if (currentNodes.getMasterNode() == null && joiningNodes.stream().anyMatch(Task::isBecomeMasterTask)) { - assert joiningNodes.stream().anyMatch(Task::isFinishElectionTask) - : "becoming a master but election is not finished " + joiningNodes; + if (currentNodes.getMasterNode() == null && isBecomingMaster) { // use these joins to try and become the master. // Note that we don't have to do any validation of the amount of joining nodes - the commit // during the cluster state publishing guarantees that we have enough - newState = becomeMasterAndTrimConflictingNodes(currentState, joiningNodes); + newState = becomeMasterAndTrimConflictingNodes(currentState, joinTasks); nodesChanged = true; } else if (currentNodes.isLocalNodeElectedMaster() == false) { logger.trace("processing node joins, but we are not the master. current master: {}", currentNodes.getMasterNode()); @@ -122,34 +81,48 @@ public ClusterTasksResult execute(ClusterState currentState, List jo final boolean enforceVersionBarrier = currentState.getBlocks().hasGlobalBlock(STATE_NOT_RECOVERED_BLOCK) == false; // processing any joins Map joiniedNodeNameIds = new HashMap<>(); - for (final Task joinTask : joiningNodes) { - if (joinTask.isBecomeMasterTask() || joinTask.isFinishElectionTask()) { - // noop - } else if (currentNodes.nodeExistsWithSameRoles(joinTask.node())) { - logger.debug("received a join request for an existing node [{}]", joinTask.node()); - } else { - final DiscoveryNode node = joinTask.node(); - try { - if (enforceVersionBarrier) { - ensureVersionBarrier(node.getVersion(), minClusterNodeVersion); - } - ensureNodesCompatibility(node.getVersion(), minClusterNodeVersion, maxClusterNodeVersion); - // we do this validation quite late to prevent race conditions between nodes joining and importing dangling indices - // we have to reject nodes that don't support all indices we have in this cluster - ensureIndexCompatibility(node.getVersion(), currentState.getMetadata()); - nodesBuilder.add(node); - nodesChanged = true; - minClusterNodeVersion = Version.min(minClusterNodeVersion, node.getVersion()); - maxClusterNodeVersion = Version.max(maxClusterNodeVersion, node.getVersion()); - if (node.isMasterNode()) { - joiniedNodeNameIds.put(node.getName(), node.getId()); + for (final JoinTask joinTask : joinTasks) { + final List onTaskSuccess = new ArrayList<>(joinTask.nodeCount()); + for (final JoinTask.NodeJoinTask nodeJoinTask : joinTask.nodeJoinTasks()) { + final DiscoveryNode node = nodeJoinTask.node(); + if (currentNodes.nodeExistsWithSameRoles(node)) { + logger.debug("received a join request for an existing node [{}]", node); + } else { + try { + if (enforceVersionBarrier) { + ensureVersionBarrier(node.getVersion(), minClusterNodeVersion); + } + ensureNodesCompatibility(node.getVersion(), minClusterNodeVersion, maxClusterNodeVersion); + // we do this validation quite late to prevent race conditions between nodes joining and importing dangling indices + // we have to reject nodes that don't support all indices we have in this cluster + ensureIndexCompatibility(node.getVersion(), currentState.getMetadata()); + nodesBuilder.add(node); + nodesChanged = true; + minClusterNodeVersion = Version.min(minClusterNodeVersion, node.getVersion()); + maxClusterNodeVersion = Version.max(maxClusterNodeVersion, node.getVersion()); + if (node.isMasterNode()) { + joiniedNodeNameIds.put(node.getName(), node.getId()); + } + } catch (IllegalArgumentException | IllegalStateException e) { + onTaskSuccess.add(() -> nodeJoinTask.listener().onFailure(e)); + continue; } - } catch (IllegalArgumentException | IllegalStateException e) { - results.failure(joinTask, e); - continue; } + onTaskSuccess.add(() -> nodeJoinTask.listener().onResponse(null)); } - results.success(joinTask, new LegacyClusterTaskResultActionListener(joinTask, currentState)); + results.success(joinTask, new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + for (Runnable joinCompleter : onTaskSuccess) { + joinCompleter.run(); + } + } + + @Override + public void onFailure(Exception e) { + joinTask.onFailure(e); + } + }); } if (nodesChanged) { @@ -203,17 +176,14 @@ public ClusterTasksResult execute(ClusterState currentState, List jo } } - protected ClusterState.Builder becomeMasterAndTrimConflictingNodes(ClusterState currentState, List joiningNodes) { + protected ClusterState.Builder becomeMasterAndTrimConflictingNodes(ClusterState currentState, List joinTasks) { assert currentState.nodes().getMasterNodeId() == null : currentState; DiscoveryNodes currentNodes = currentState.nodes(); DiscoveryNodes.Builder nodesBuilder = DiscoveryNodes.builder(currentNodes); nodesBuilder.masterNodeId(currentState.nodes().getLocalNodeId()); - for (final Task joinTask : joiningNodes) { - if (joinTask.isBecomeMasterTask() || joinTask.isFinishElectionTask()) { - // noop - } else { - final DiscoveryNode joiningNode = joinTask.node(); + for (final JoinTask joinTask : joinTasks) { + for (final DiscoveryNode joiningNode : joinTask.nodes()) { final DiscoveryNode nodeWithSameId = nodesBuilder.get(joiningNode.getId()); if (nodeWithSameId != null && nodeWithSameId.equals(joiningNode) == false) { logger.debug("removing existing node [{}], which conflicts with incoming join from [{}]", nodeWithSameId, joiningNode); @@ -249,18 +219,6 @@ public boolean runOnlyOnMaster() { return false; } - public static Task newBecomeMasterTask() { - return new Task(null, Task.BECOME_MASTER_TASK_REASON, ActionListener.wrap(() -> {})); - } - - /** - * a task that is used to signal the election is stopped and we should process pending joins. - * it may be used in combination with {@link JoinTaskExecutor#newBecomeMasterTask()} - */ - public static Task newFinishElectionTask() { - return new Task(null, Task.FINISH_ELECTION_TASK_REASON, ActionListener.wrap(() -> {})); - } - /** * Ensures that all indices are compatible with the given node version. This will ensure that all indices in the given metadata * will not be created with a newer version of elasticsearch as well as that all indices are newer or equal to the minimum index diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinTaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinTaskExecutorTests.java index 0fe9752c9979a..b5a0b429b2c9a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/JoinTaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/JoinTaskExecutorTests.java @@ -172,10 +172,10 @@ public void testUpdatesNodeWithNewRoles() throws Exception { .nodes(DiscoveryNodes.builder().add(masterNode).localNodeId(masterNode.getId()).masterNodeId(masterNode.getId()).add(bwcNode)) .build(); - final ClusterStateTaskExecutor.ClusterTasksResult result = joinTaskExecutor.execute( + final ClusterStateTaskExecutor.ClusterTasksResult result = joinTaskExecutor.execute( clusterState, List.of( - new JoinTaskExecutor.Task( + JoinTask.singleNode( actualNode, "test", ActionListener.wrap(() -> { throw new AssertionError("should not complete publication"); }) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java index 128035c187089..db1410d2c3c48 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/AutoExpandReplicasTests.java @@ -248,7 +248,7 @@ public void testOnlyAutoExpandAllocationFilteringAfterAllNodesUpgraded() { // is the // master - state = cluster.addNodes(state, Collections.singletonList(newNode)); + state = cluster.addNode(state, newNode); // use allocation filtering state = cluster.updateSettings( diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java index a45b5e33af3c1..e5a891e1d1d95 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/ClusterStateChanges.java @@ -48,6 +48,7 @@ import org.elasticsearch.cluster.action.shard.ShardStateAction.StartedShardEntry; import org.elasticsearch.cluster.action.shard.ShardStateAction.StartedShardUpdateTask; import org.elasticsearch.cluster.block.ClusterBlock; +import org.elasticsearch.cluster.coordination.JoinTask; import org.elasticsearch.cluster.coordination.JoinTaskExecutor; import org.elasticsearch.cluster.coordination.NodeRemovalClusterStateTaskExecutor; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -94,7 +95,6 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; -import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; @@ -349,39 +349,37 @@ public ClusterState reroute(ClusterState state, ClusterRerouteRequest request) { return execute(transportClusterRerouteAction, request, state); } - public ClusterState addNodes(ClusterState clusterState, List nodes) { + public ClusterState addNode(ClusterState clusterState, DiscoveryNode discoveryNode) { return runTasks( joinTaskExecutor, clusterState, - nodes.stream() - .map( - node -> new JoinTaskExecutor.Task( - node, - "dummy reason", - ActionListener.wrap(() -> { throw new AssertionError("should not complete publication"); }) - ) + List.of( + JoinTask.singleNode( + discoveryNode, + "dummy reason", + ActionListener.wrap(() -> { throw new AssertionError("should not complete publication"); }) ) - .toList() + ) ); } public ClusterState joinNodesAndBecomeMaster(ClusterState clusterState, List nodes) { - List joinNodes = new ArrayList<>(); - joinNodes.add(JoinTaskExecutor.newBecomeMasterTask()); - joinNodes.add(JoinTaskExecutor.newFinishElectionTask()); - joinNodes.addAll( - nodes.stream() - .map( - node -> new JoinTaskExecutor.Task( - node, - "dummy reason", - ActionListener.wrap(() -> { throw new AssertionError("should not complete publication"); }) - ) + return runTasks( + joinTaskExecutor, + clusterState, + List.of( + JoinTask.completingElection( + nodes.stream() + .map( + node -> new JoinTask.NodeJoinTask( + node, + "dummy reason", + ActionListener.wrap(() -> { throw new AssertionError("should not complete publication"); }) + ) + ) ) - .toList() + ) ); - - return runTasks(joinTaskExecutor, clusterState, joinNodes); } public ClusterState removeNodes(ClusterState clusterState, List nodes) { diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java index d9bb3aa220317..57d82cb834b7f 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java @@ -463,7 +463,7 @@ public ClusterState randomlyUpdateClusterState( if (randomBoolean()) { // add node if (state.nodes().getSize() < 10) { - state = cluster.addNodes(state, Collections.singletonList(createNode())); + state = cluster.addNode(state, createNode()); updateNodes(state, clusterStateServiceMap, indicesServiceSupplier); } } else { @@ -476,7 +476,7 @@ public ClusterState randomlyUpdateClusterState( } if (randomBoolean()) { // and add it back - state = cluster.addNodes(state, Collections.singletonList(discoveryNode)); + state = cluster.addNode(state, discoveryNode); updateNodes(state, clusterStateServiceMap, indicesServiceSupplier); } } From dd4d442b05367bc0a019d25f9f37bd837907fcb7 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 11 Feb 2022 13:13:35 +0000 Subject: [PATCH 060/167] Accept only single tasks at master service (#83829) Today `MasterService` (and `TaskBatcher`) allow callers to submit a collection of tasks that will be executed all at once. Support for batches of tasks makes things more complicated than they need to be, noting that (since #83803) in production code we only ever submit single tasks. This commit specializes things to accept only single tasks. --- .../cluster/service/ClusterService.java | 4 +- .../cluster/service/MasterService.java | 66 +++------- .../cluster/service/TaskBatcher.java | 86 ++++--------- .../cluster/service/MasterServiceTests.java | 116 +++++------------- .../cluster/service/TaskBatcherTests.java | 98 +++++---------- .../cluster/service/TaskExecutorTests.java | 22 ++-- 6 files changed, 115 insertions(+), 277 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java index 9f97e63c14a3b..24aa87f3bec36 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterService.java @@ -28,8 +28,6 @@ import org.elasticsearch.node.Node; import org.elasticsearch.threadpool.ThreadPool; -import java.util.List; - public class ClusterService extends AbstractLifecycleComponent { private final MasterService masterService; @@ -259,7 +257,7 @@ public void submitStateUpdateTask( ClusterStateTaskConfig config, ClusterStateTaskExecutor executor ) { - masterService.submitStateUpdateTasks(source, List.of(task), config, executor); + masterService.submitStateUpdateTask(source, task, config, executor); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java index 676b5eb937225..85d7435b7fd5b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -46,7 +46,6 @@ import org.elasticsearch.threadpool.ThreadPool; import java.util.Arrays; -import java.util.Collection; import java.util.List; import java.util.Locale; import java.util.Map; @@ -150,13 +149,9 @@ class Batcher extends TaskBatcher { } @Override - protected void onTimeout(List tasks, TimeValue timeout) { + protected void onTimeout(BatchedTask task, TimeValue timeout) { threadPool.generic() - .execute( - () -> tasks.forEach( - task -> ((UpdateTask) task).onFailure(new ProcessClusterEventTimeoutException(timeout, task.source)) - ) - ); + .execute(() -> ((UpdateTask) task).onFailure(new ProcessClusterEventTimeoutException(timeout, task.source))); } @Override @@ -506,7 +501,21 @@ public void submitStateUpdateTask( ClusterStateTaskConfig config, ClusterStateTaskExecutor executor ) { - submitStateUpdateTasks(source, List.of(task), config, executor); + if (lifecycle.started() == false) { + return; + } + final ThreadContext threadContext = threadPool.getThreadContext(); + final Supplier supplier = threadContext.newRestorableContext(true); + try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { + threadContext.markAsSystemContext(); + taskBatcher.submitTask(taskBatcher.new UpdateTask(config.priority(), source, task, supplier, executor), config.timeout()); + } catch (EsRejectedExecutionException e) { + // ignore cases where we are shutting down..., there is really nothing interesting + // to be done here... + if (lifecycle.stoppedOrClosed() == false) { + throw e; + } + } } /** @@ -903,47 +912,6 @@ void onNoLongerMaster() { } } - /** - * Submits a batch of cluster state update tasks; submitted updates are guaranteed to be processed together, - * potentially with more tasks of the same executor. - * - * @param source the source of the cluster state update task - * @param tasks a collection of update tasks, which implement {@link ClusterStateTaskListener} so that they are notified when they - * are executed; tasks that also implement {@link ClusterStateAckListener} are notified on acks too. - * @param config the cluster state update task configuration - * @param executor the cluster state update task executor; tasks - * that share the same executor will be executed - * batches on this executor - * @param the type of the cluster state update task state - * - */ - public void submitStateUpdateTasks( - final String source, - final Collection tasks, - final ClusterStateTaskConfig config, - final ClusterStateTaskExecutor executor - ) { - if (lifecycle.started() == false) { - return; - } - final ThreadContext threadContext = threadPool.getThreadContext(); - final Supplier supplier = threadContext.newRestorableContext(true); - try (ThreadContext.StoredContext ignore = threadContext.stashContext()) { - threadContext.markAsSystemContext(); - - List safeTasks = tasks.stream() - .map(task -> taskBatcher.new UpdateTask(config.priority(), source, task, supplier, executor)) - .toList(); - taskBatcher.submitTasks(safeTasks, config.timeout()); - } catch (EsRejectedExecutionException e) { - // ignore cases where we are shutting down..., there is really nothing interesting - // to be done here... - if (lifecycle.stoppedOrClosed() == false) { - throw e; - } - } - } - private static class MasterServiceStarvationWatcher implements PrioritizedEsThreadPoolExecutor.StarvationWatcher { private final long warnThreshold; diff --git a/server/src/main/java/org/elasticsearch/cluster/service/TaskBatcher.java b/server/src/main/java/org/elasticsearch/cluster/service/TaskBatcher.java index 60ebffc9cbc0e..7c8aca8fb1469 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/TaskBatcher.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/TaskBatcher.java @@ -19,15 +19,12 @@ import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.IdentityHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.Function; -import java.util.stream.Collectors; /** * Batching support for {@link PrioritizedEsThreadPoolExecutor} @@ -45,86 +42,50 @@ public TaskBatcher(Logger logger, PrioritizedEsThreadPoolExecutor threadExecutor this.threadExecutor = threadExecutor; } - public void submitTasks(List tasks, @Nullable TimeValue timeout) throws EsRejectedExecutionException { - if (tasks.isEmpty()) { - return; - } - final BatchedTask firstTask = tasks.get(0); - assert tasks.stream().allMatch(t -> t.batchingKey == firstTask.batchingKey) - : "tasks submitted in a batch should share the same batching key: " + tasks; - // convert to an identity map to check for dups based on task identity - - tasksPerBatchingKey.compute(firstTask.batchingKey, (k, existingTasks) -> { - assert assertNoDuplicateTasks(tasks, existingTasks); + public void submitTask(BatchedTask task, @Nullable TimeValue timeout) throws EsRejectedExecutionException { + tasksPerBatchingKey.compute(task.batchingKey, (k, existingTasks) -> { if (existingTasks == null) { - return Collections.synchronizedSet(new LinkedHashSet<>(tasks)); + existingTasks = Collections.synchronizedSet(new LinkedHashSet<>()); + } else { + assert assertNoDuplicateTasks(task, existingTasks); } - existingTasks.addAll(tasks); + existingTasks.add(task); return existingTasks; }); if (timeout != null) { - threadExecutor.execute(firstTask, timeout, () -> onTimeoutInternal(tasks, timeout)); + threadExecutor.execute(task, timeout, () -> onTimeoutInternal(task, timeout)); } else { - threadExecutor.execute(firstTask); + threadExecutor.execute(task); } } - private static boolean assertNoDuplicateTasks(List tasks, Set existingTasks) { - final Map tasksIdentity = tasks.stream() - .collect( - Collectors.toMap( - BatchedTask::getTask, - Function.identity(), - (a, b) -> { throw new AssertionError("cannot add duplicate task: " + a); }, - IdentityHashMap::new - ) - ); - if (existingTasks == null) { - return true; - } - for (BatchedTask existing : existingTasks) { - // check that there won't be two tasks with the same identity for the same batching key - BatchedTask duplicateTask = tasksIdentity.get(existing.getTask()); - assert duplicateTask == null - : "task [" - + duplicateTask.describeTasks(Collections.singletonList(existing)) - + "] with source [" - + duplicateTask.source - + "] is already queued"; + private static boolean assertNoDuplicateTasks(BatchedTask task, Set existingTasks) { + for (final var existingTask : existingTasks) { + assert existingTask.getTask() != task.getTask() + : "task [" + task.describeTasks(List.of(task)) + "] with source [" + task.source + "] is already queued"; } return true; } - private void onTimeoutInternal(List tasks, TimeValue timeout) { - final ArrayList toRemove = new ArrayList<>(); - for (BatchedTask task : tasks) { - if (task.processed.getAndSet(true) == false) { - logger.debug("task [{}] timed out after [{}]", task.source, timeout); - toRemove.add(task); - } - } - if (toRemove.isEmpty() == false) { - BatchedTask firstTask = toRemove.get(0); - Object batchingKey = firstTask.batchingKey; - assert tasks.stream().allMatch(t -> t.batchingKey == batchingKey) - : "tasks submitted in a batch should share the same batching key: " + tasks; - tasksPerBatchingKey.computeIfPresent(batchingKey, (key, existingTasks) -> { - toRemove.forEach(existingTasks::remove); - if (existingTasks.isEmpty()) { - return null; - } - return existingTasks; - }); - onTimeout(toRemove, timeout); + private void onTimeoutInternal(BatchedTask task, TimeValue timeout) { + if (task.processed.getAndSet(true)) { + return; } + + logger.debug("task [{}] timed out after [{}]", task.source, timeout); + tasksPerBatchingKey.computeIfPresent(task.batchingKey, (key, existingTasks) -> { + existingTasks.remove(task); + return existingTasks.isEmpty() ? null : existingTasks; + }); + onTimeout(task, timeout); } /** * Action to be implemented by the specific batching implementation. * All tasks have the same batching key. */ - protected abstract void onTimeout(List tasks, TimeValue timeout); + protected abstract void onTimeout(BatchedTask task, TimeValue timeout); void runIfNotProcessed(BatchedTask updateTask) { // if this task is already processed, it shouldn't execute other tasks with same batching key that arrived later, @@ -135,6 +96,7 @@ void runIfNotProcessed(BatchedTask updateTask) { final Set pending = tasksPerBatchingKey.remove(updateTask.batchingKey); if (pending != null) { // pending is a java.util.Collections.SynchronizedSet so we can safely iterate holding its mutex + // noinspection SynchronizationOnLocalVariableOrMethodParameter synchronized (pending) { for (BatchedTask task : pending) { if (task.processed.getAndSet(true) == false) { diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index 4da46f329026b..334cda2b05dc2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -56,7 +56,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; @@ -67,13 +66,12 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; -import java.util.stream.IntStream; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; -import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; @@ -534,14 +532,14 @@ public ClusterTasksResult execute(ClusterState currentState, List ta final var submitThreads = new Thread[between(1, 10)]; for (int i = 0; i < submitThreads.length; i++) { final var executor = randomFrom(executors); - final var tasks = randomList(1, 10, Task::new); - executor.addExpectedTaskCount(tasks.size()); + final var task = new Task(); + executor.addExpectedTaskCount(1); submitThreads[i] = new Thread(() -> { try { assertTrue(submissionLatch.await(10, TimeUnit.SECONDS)); - masterService.submitStateUpdateTasks( + masterService.submitStateUpdateTask( Thread.currentThread().getName(), - tasks, + task, ClusterStateTaskConfig.build(randomFrom(Priority.values())), executor ); @@ -656,21 +654,13 @@ class TaskExecutor implements ClusterStateTaskExecutor { private final AtomicInteger assigned = new AtomicInteger(); private final AtomicInteger batches = new AtomicInteger(); private final AtomicInteger published = new AtomicInteger(); - private final List> assignments = new ArrayList<>(); + private final List assignments = new ArrayList<>(); @Override public ClusterTasksResult execute(ClusterState currentState, List tasks) throws Exception { - int totalCount = 0; - for (Set group : assignments) { - long count = tasks.stream().filter(group::contains).count(); - assertThat( - "batched set should be executed together or not at all. Expected " + group + "s. Executing " + tasks, - count, - anyOf(equalTo(0L), equalTo((long) group.size())) - ); - totalCount += count; + for (Task task : tasks) { + assertThat("All tasks should belong to this executor", assignments, hasItem(task)); } - assertThat("All tasks should belong to this executor", totalCount, equalTo(tasks.size())); tasks.forEach(Task::execute); executed.addAndGet(tasks.size()); ClusterState maybeUpdatedClusterState = currentState; @@ -699,16 +689,16 @@ public void clusterStatePublished(ClusterStatePublicationEvent clusterPublicatio } // randomly assign tasks to executors - List>> assignments = new ArrayList<>(); + List> assignments = new ArrayList<>(); AtomicInteger totalTasks = new AtomicInteger(); for (int i = 0; i < numberOfThreads; i++) { for (int j = 0; j < taskSubmissionsPerThread; j++) { var executor = randomFrom(executors); - var tasks = Set.copyOf(randomList(1, 3, () -> new Task(totalTasks.getAndIncrement()))); + var task = new Task(totalTasks.getAndIncrement()); - assignments.add(Tuple.tuple(executor, tasks)); - executor.assigned.addAndGet(tasks.size()); - executor.assignments.add(tasks); + assignments.add(Tuple.tuple(executor, task)); + executor.assigned.incrementAndGet(); + executor.assignments.add(task); } } processedStatesLatch.set(new CountDownLatch(totalTasks.get())); @@ -723,24 +713,15 @@ public void clusterStatePublished(ClusterStatePublicationEvent clusterPublicatio barrier.await(); for (int j = 0; j < taskSubmissionsPerThread; j++) { var assignment = assignments.get(index * taskSubmissionsPerThread + j); - var tasks = assignment.v2(); + var task = assignment.v2(); var executor = assignment.v1(); - submittedTasks.addAndGet(tasks.size()); - if (tasks.size() == 1) { - masterService.submitStateUpdateTask( - threadName, - tasks.iterator().next(), - ClusterStateTaskConfig.build(randomFrom(Priority.values())), - executor - ); - } else { - masterService.submitStateUpdateTasks( - threadName, - tasks, - ClusterStateTaskConfig.build(randomFrom(Priority.values())), - executor - ); - } + submittedTasks.incrementAndGet(); + masterService.submitStateUpdateTask( + threadName, + task, + ClusterStateTaskConfig.build(randomFrom(Priority.values())), + executor + ); } barrier.await(); } catch (BrokenBarrierException | InterruptedException e) { @@ -836,26 +817,13 @@ public void onFailure(Exception e) { } ); - int toSubmit = taskCount; - - while (toSubmit > 0) { - final int batchSize = between(1, toSubmit); - toSubmit -= batchSize; + for (int i = 0; i < taskCount; i++) { try (ThreadContext.StoredContext ignored = threadContext.newStoredContext(false)) { final String testContextHeaderValue = randomAlphaOfLength(10); threadContext.putHeader(testContextHeaderName, testContextHeaderValue); - - final List tasks = IntStream.range(0, batchSize) - .mapToObj(i -> new Task(testContextHeaderValue)) - .collect(Collectors.toList()); - - final ClusterStateTaskConfig clusterStateTaskConfig = ClusterStateTaskConfig.build(Priority.NORMAL); - - if (batchSize == 1 && randomBoolean()) { - masterService.submitStateUpdateTask("test", tasks.get(0), clusterStateTaskConfig, executor); - } else { - masterService.submitStateUpdateTasks("test", tasks, clusterStateTaskConfig, executor); - } + final var task = new Task(testContextHeaderValue); + final var clusterStateTaskConfig = ClusterStateTaskConfig.build(Priority.NORMAL); + masterService.submitStateUpdateTask("test", task, clusterStateTaskConfig, executor); } } @@ -928,14 +896,11 @@ public void onFailure(Exception e) { int toSubmit = between(1, 10); final CountDownLatch publishSuccessCountdown = new CountDownLatch(toSubmit); - while (toSubmit > 0) { - final int batchSize = between(1, toSubmit); - toSubmit -= batchSize; + for (int i = 0; i < toSubmit; i++) { try (ThreadContext.StoredContext ignored = threadContext.newStoredContext(false)) { - final String testContextHeaderValue = randomAlphaOfLength(10); + final var testContextHeaderValue = randomAlphaOfLength(10); threadContext.putHeader(testContextHeaderName, testContextHeaderValue); - - final List tasks = IntStream.range(0, batchSize).mapToObj(i -> new Task(new ActionListener<>() { + final var task = new Task(new ActionListener<>() { @Override public void onResponse(ClusterState clusterState) { assertEquals(testContextHeaderValue, threadContext.getHeader(testContextHeaderName)); @@ -947,15 +912,10 @@ public void onResponse(ClusterState clusterState) { public void onFailure(Exception e) { throw new AssertionError(e); } - })).collect(Collectors.toList()); + }); final ClusterStateTaskConfig clusterStateTaskConfig = ClusterStateTaskConfig.build(Priority.NORMAL); - - if (batchSize == 1 && randomBoolean()) { - masterService.submitStateUpdateTask("test", tasks.get(0), clusterStateTaskConfig, executor); - } else { - masterService.submitStateUpdateTasks("test", tasks, clusterStateTaskConfig, executor); - } + masterService.submitStateUpdateTask("test", task, clusterStateTaskConfig, executor); } } @@ -976,14 +936,11 @@ public void onFailure(Exception e) { toSubmit = between(1, 10); final CountDownLatch publishFailureCountdown = new CountDownLatch(toSubmit); - while (toSubmit > 0) { - final int batchSize = between(1, toSubmit); - toSubmit -= batchSize; + for (int i = 0; i < toSubmit; i++) { try (ThreadContext.StoredContext ignored = threadContext.newStoredContext(false)) { final String testContextHeaderValue = randomAlphaOfLength(10); threadContext.putHeader(testContextHeaderName, testContextHeaderValue); - - final List tasks = IntStream.range(0, batchSize).mapToObj(i -> new Task(new ActionListener<>() { + final var task = new Task(new ActionListener<>() { @Override public void onResponse(ClusterState clusterState) { throw new AssertionError("should not succeed"); @@ -996,15 +953,10 @@ public void onFailure(Exception e) { assertThat(e.getMessage(), equalTo(exceptionMessage)); publishFailureCountdown.countDown(); } - })).collect(Collectors.toList()); + }); final ClusterStateTaskConfig clusterStateTaskConfig = ClusterStateTaskConfig.build(Priority.NORMAL); - - if (batchSize == 1 && randomBoolean()) { - masterService.submitStateUpdateTask("test", tasks.get(0), clusterStateTaskConfig, executor); - } else { - masterService.submitStateUpdateTasks("test", tasks, clusterStateTaskConfig, executor); - } + masterService.submitStateUpdateTask("test", task, clusterStateTaskConfig, executor); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/service/TaskBatcherTests.java b/server/src/test/java/org/elasticsearch/cluster/service/TaskBatcherTests.java index 54894fcd65980..7eeae04ceba9e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/TaskBatcherTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/TaskBatcherTests.java @@ -9,32 +9,25 @@ package org.elasticsearch.cluster.service; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.metadata.ProcessClusterEventTimeoutException; import org.elasticsearch.common.Priority; import org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.core.Tuple; import org.junit.Before; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; -import java.util.Map; -import java.util.Set; import java.util.concurrent.BrokenBarrierException; -import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasToString; @@ -47,7 +40,7 @@ public void setUpBatchingTaskExecutor() throws Exception { taskBatcher = new TestTaskBatcher(logger, threadExecutor); } - class TestTaskBatcher extends TaskBatcher { + static class TestTaskBatcher extends TaskBatcher { TestTaskBatcher(Logger logger, PrioritizedEsThreadPoolExecutor threadExecutor) { super(logger, threadExecutor); @@ -58,20 +51,13 @@ class TestTaskBatcher extends TaskBatcher { protected void run(Object batchingKey, List tasks, String tasksSummary) { List updateTasks = (List) tasks; ((TestExecutor) batchingKey).execute(updateTasks.stream().map(t -> t.task).collect(Collectors.toList())); - updateTasks.forEach(updateTask -> updateTask.listener.processed(updateTask.source)); + updateTasks.forEach(updateTask -> updateTask.listener.processed()); } @Override - protected void onTimeout(List tasks, TimeValue timeout) { + protected void onTimeout(BatchedTask task, TimeValue timeout) { threadPool.generic() - .execute( - () -> tasks.forEach( - task -> ((UpdateTask) task).listener.onFailure( - task.source, - new ProcessClusterEventTimeoutException(timeout, task.source) - ) - ) - ); + .execute(() -> ((UpdateTask) task).listener.onFailure(new ProcessClusterEventTimeoutException(timeout, task.source))); } class UpdateTask extends BatchedTask { @@ -99,20 +85,7 @@ protected void submitTask(String source, TestTask testTask) { } private void submitTask(String source, T task, ClusterStateTaskConfig config, TestExecutor executor, TestListener listener) { - submitTasks(source, Collections.singletonMap(task, listener), config, executor); - } - - private void submitTasks( - final String source, - final Map tasks, - final ClusterStateTaskConfig config, - final TestExecutor executor - ) { - List safeTasks = tasks.entrySet() - .stream() - .map(e -> taskBatcher.new UpdateTask(config.priority(), source, e.getKey(), e.getValue(), executor)) - .collect(Collectors.toList()); - taskBatcher.submitTasks(safeTasks, config.timeout()); + taskBatcher.submitTask(taskBatcher.new UpdateTask(config.priority(), source, task, listener, executor), config.timeout()); } @Override @@ -144,7 +117,7 @@ public void execute(List tasks) { TaskExecutor executorB = new TaskExecutor(); final ClusterStateTaskConfig config = ClusterStateTaskConfig.build(Priority.NORMAL); - final TestListener noopListener = (source, e) -> { throw new AssertionError(e); }; + final TestListener noopListener = e -> { throw new AssertionError(e); }; // this blocks the cluster state queue, so we can set it up right submitTask("0", "A0", config, executorA, noopListener); // wait to be processed @@ -196,19 +169,16 @@ public void testTasksAreExecutedInOrder() throws BrokenBarrierException, Interru int tasksSubmittedPerThread = randomIntBetween(2, 1024); - CopyOnWriteArrayList> failures = new CopyOnWriteArrayList<>(); CountDownLatch updateLatch = new CountDownLatch(numberOfThreads * tasksSubmittedPerThread); final TestListener listener = new TestListener() { @Override - public void onFailure(String source, Exception e) { - logger.error(() -> new ParameterizedMessage("unexpected failure: [{}]", source), e); - failures.add(new Tuple<>(source, e)); - updateLatch.countDown(); + public void onFailure(Exception e) { + throw new AssertionError(e); } @Override - public void processed(String source) { + public void processed() { updateLatch.countDown(); } }; @@ -242,9 +212,7 @@ public void processed(String source) { // wait for all threads to finish barrier.await(); - updateLatch.await(); - - assertThat(failures, empty()); + assertTrue(updateLatch.await(10, TimeUnit.SECONDS)); for (int i = 0; i < numberOfThreads; i++) { assertEquals(tasksSubmittedPerThread, executors[i].tasks.size()); @@ -255,34 +223,24 @@ public void processed(String source) { } } - public void testSingleBatchSubmission() throws InterruptedException { - Map tasks = new HashMap<>(); - final int numOfTasks = randomInt(10); - final CountDownLatch latch = new CountDownLatch(numOfTasks); - Set usedKeys = new HashSet<>(numOfTasks); - for (int i = 0; i < numOfTasks; i++) { - int key = randomValueOtherThanMany(k -> usedKeys.contains(k), () -> randomInt(1024)); - tasks.put(key, new TestListener() { - @Override - public void processed(String source) { - latch.countDown(); - } - - @Override - public void onFailure(String source, Exception e) { - throw new AssertionError(e); - } - }); - usedKeys.add(key); - } - assert usedKeys.size() == numOfTasks; - + public void testSingleTaskSubmission() throws InterruptedException { + final CountDownLatch latch = new CountDownLatch(1); + final Integer task = randomInt(1024); TestExecutor executor = taskList -> { - assertThat(taskList.size(), equalTo(tasks.size())); - assertThat(taskList.stream().collect(Collectors.toSet()), equalTo(tasks.keySet())); + assertThat(taskList.size(), equalTo(1)); + assertThat(taskList.get(0), equalTo(task)); }; - submitTasks("test", tasks, ClusterStateTaskConfig.build(Priority.LANGUID), executor); + submitTask("test", task, ClusterStateTaskConfig.build(randomFrom(Priority.values())), executor, new TestListener() { + @Override + public void processed() { + latch.countDown(); + } + @Override + public void onFailure(Exception e) { + throw new AssertionError(e); + } + }); latch.await(); } @@ -295,12 +253,12 @@ public void testDuplicateSubmission() throws InterruptedException { SimpleTask task = new SimpleTask(1); TestListener listener = new TestListener() { @Override - public void processed(String source) { + public void processed() { latch.countDown(); } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { throw new AssertionError(e); } }; diff --git a/server/src/test/java/org/elasticsearch/cluster/service/TaskExecutorTests.java b/server/src/test/java/org/elasticsearch/cluster/service/TaskExecutorTests.java index 9fd8de3679b9c..8872bab724020 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/TaskExecutorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/TaskExecutorTests.java @@ -70,9 +70,9 @@ public void shutDownThreadExecutor() { } protected interface TestListener { - void onFailure(String source, Exception e); + void onFailure(Exception e); - default void processed(String source) { + default void processed() { // do nothing by default } } @@ -129,7 +129,7 @@ class UpdateTask extends SourcePrioritizedRunnable { public void run() { logger.trace("will process {}", source); testTask.execute(Collections.singletonList(testTask)); - testTask.processed(source); + testTask.processed(); } } @@ -140,7 +140,7 @@ protected void submitTask(String source, TestTask testTask) { if (timeout != null) { threadExecutor.execute(task, timeout, () -> threadPool.generic().execute(() -> { logger.debug("task [{}] timed out after [{}]", task, timeout); - testTask.onFailure(source, new ProcessClusterEventTimeoutException(timeout, source)); + testTask.onFailure(new ProcessClusterEventTimeoutException(timeout, source)); })); } else { threadExecutor.execute(task); @@ -163,7 +163,7 @@ public void run() { } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { throw new RuntimeException(e); } }; @@ -178,7 +178,7 @@ public void run() { } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { block2.countDown(); } @@ -207,7 +207,7 @@ public void run() { } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { throw new RuntimeException(e); } }; @@ -228,7 +228,7 @@ public void run() { } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { timedOut.countDown(); } }; @@ -245,7 +245,7 @@ public void run() { } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { throw new RuntimeException(e); } }; @@ -312,7 +312,7 @@ public void run() { } @Override - public void onFailure(String source, Exception e) {} + public void onFailure(Exception e) {} @Override public Priority priority() { @@ -349,7 +349,7 @@ public Priority priority() { } @Override - public void onFailure(String source, Exception e) { + public void onFailure(Exception e) { latch.countDown(); } } From 62943fe85c04973825b098ac67d89e2f54bdb42c Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Fri, 11 Feb 2022 13:29:22 +0000 Subject: [PATCH 061/167] Add changelog (#83830) Having a `CHANGELOG.md` file in the root of a code repository is a very common practice. Elasticsearch doesn't follow this for good reasons, and publishes extensive release notes on the website. However we can be more helpful to users browsing the repository by pointing them to the release notes. --- CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000000000..1a965ee4b6eb0 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,3 @@ +# Elasticsearch Changlog + +Please see the [release notes](https://www.elastic.co/guide/en/elasticsearch/reference/current/es-release-notes.html) in the reference manual. From a91e692779de0782871ecec36902ad05739a8672 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Fri, 11 Feb 2022 15:15:17 +0100 Subject: [PATCH 062/167] Completion field to support multiple completion multi-fields (#83595) The completion field supports multi-fields. In case the sub-field is another completion field, the whole object structure from the incoming document needs to provided as input to the sub-field while parsing it. We have a special XContentParser for this scenario, but it does not handle returning the same object structure multiple times. That is why you can have only one completion sub-field within a completion field, and the error returned when such mechanism breaks is a general parsing error (that mentions a field called dummy_field) that makes users think they have done something wrong in their document. This commit expands testing for this scenario and extends the parser to support it. As part of this change, a new parser is created for each sub-field, which makes it possible to expose the same object structure multiple times, for instance in case a completion field has more than one completion sub-fields. Additionally, the wrapping of both the geo_point multi field parser and the completion multi field parser into a dummy_field object is removed in favour of returning the correct currentName of the main field we are parsing. Additionally getTokenLocation is tweaked to return the location of the field we are parsing in the document, so that error messages become clearer when things go wrong. Closes #83534 --- docs/changelog/83595.yaml | 6 + .../xcontent/support/MapXContentParser.java | 17 +- .../mapper/AbstractGeometryFieldMapper.java | 17 +- .../index/mapper/CompletionFieldMapper.java | 100 +++++++- .../index/mapper/FieldMapper.java | 10 +- .../index/mapper/GeoPointFieldMapper.java | 35 ++- .../mapper/CompletionFieldMapperTests.java | 238 +++++++++++++----- .../mapper/GeoPointFieldMapperTests.java | 51 ++++ 8 files changed, 381 insertions(+), 93 deletions(-) create mode 100644 docs/changelog/83595.yaml diff --git a/docs/changelog/83595.yaml b/docs/changelog/83595.yaml new file mode 100644 index 0000000000000..978583755249b --- /dev/null +++ b/docs/changelog/83595.yaml @@ -0,0 +1,6 @@ +pr: 83595 +summary: Completion field to support multiple completion multi-fields +area: Mapping +type: bug +issues: + - 83534 diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/support/MapXContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/support/MapXContentParser.java index 1250c4cae6bc2..bcfd214506ba8 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/support/MapXContentParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/support/MapXContentParser.java @@ -11,14 +11,12 @@ import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentLocation; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.CharBuffer; -import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -28,23 +26,10 @@ */ public class MapXContentParser extends AbstractXContentParser { - private XContentType xContentType; + private final XContentType xContentType; private TokenIterator iterator; private boolean closed; - public static XContentParser wrapObject(Object sourceMap) throws IOException { - XContentParser parser = new MapXContentParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.IGNORE_DEPRECATIONS, - Collections.singletonMap("dummy_field", sourceMap), - XContentType.JSON - ); - parser.nextToken(); // start object - parser.nextToken(); // field name - parser.nextToken(); // field value - return parser; - } - public MapXContentParser( NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, diff --git a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java index ea066d3543735..bf7b0baf4a818 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/AbstractGeometryFieldMapper.java @@ -13,7 +13,10 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.xcontent.DeprecationHandler; +import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.support.MapXContentParser; import java.io.IOException; @@ -53,7 +56,7 @@ public abstract void parse(XContentParser parser, CheckedConsumer consumer) { - try (XContentParser parser = MapXContentParser.wrapObject(sourceMap)) { + try (XContentParser parser = wrapObject(sourceMap)) { parse(parser, v -> consumer.accept(normalizeFromSource(v)), e -> {}); /* ignore malformed */ } catch (IOException e) { throw new UncheckedIOException(e); @@ -67,6 +70,18 @@ private void fetchFromSource(Object sourceMap, Consumer consumer) { // TODO: move geometry normalization to the geometry parser. public abstract T normalizeFromSource(T geometry); + private static XContentParser wrapObject(Object sourceMap) throws IOException { + XContentParser parser = new MapXContentParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.IGNORE_DEPRECATIONS, + Collections.singletonMap("dummy_field", sourceMap), + XContentType.JSON + ); + parser.nextToken(); // start object + parser.nextToken(); // field name + parser.nextToken(); // field value + return parser; + } } public abstract static class AbstractGeometryFieldType extends MappedFieldType { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index 9dc019ba9ccbd..c685d438dfd5e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -30,11 +30,15 @@ import org.elasticsearch.search.suggest.completion.CompletionSuggester; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; -import org.elasticsearch.xcontent.FilterXContentParser; +import org.elasticsearch.xcontent.DelegatingXContentParser; +import org.elasticsearch.xcontent.DeprecationHandler; +import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContentLocation; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParser.NumberType; import org.elasticsearch.xcontent.XContentParser.Token; +import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.support.MapXContentParser; import java.io.IOException; @@ -425,8 +429,11 @@ public void parse(DocumentParserContext context) throws IOException { context.addToFieldNames(fieldType().name()); for (CompletionInputMetadata metadata : inputMap.values()) { - DocumentParserContext externalValueContext = context.switchParser(new CompletionParser(metadata)); - multiFields.parse(this, externalValueContext); + multiFields.parse( + this, + context, + () -> context.switchParser(new MultiFieldParser(metadata, fieldType().name(), context.parser().getTokenLocation())) + ); } } @@ -586,19 +593,66 @@ public void doValidate(MappingLookup mappers) { } } - private static class CompletionParser extends FilterXContentParser { + /** + * Parser that exposes the expected format depending on the type of multi-field that is consuming content. + * Completion fields can hold multi-fields, which can either parse a simple string value or an object in case of another completion + * field. This parser detects which of the two is parsing content and exposes the full object when needed (including input, weight + * and context if available), otherwise the input value only. + * + * A few assumptions are made that make this work: + * 1) only string values are supported for a completion field, hence only sub-fields that parse strings are supported + * 2) sub-fields that parse simple values only ever call {@link #textOrNull()} to do so. They may call {@link #currentToken()} only to + * check if there's a null value, which is irrelevant in the multi-fields scenario as null values are ignored in the parent field and + * don't lead to any field creation. + * 3) completion is the only sub-field type that may be parsing the object structure. + * + * The parser is set to expose by default simple value, unless {@link #nextToken()} is called which is what signals that the + * consumer supports the object structure. + */ + // This parser changes behaviour depending on which methods are called by consumers, which is extremely delicate. This kind of works for + // our internal mappers, but what about mappers from plugins + static class MultiFieldParser extends DelegatingXContentParser { + private final String textValue; + private final String fieldName; + private final XContentLocation locationOffset; + private final XContentParser fullObjectParser; + // we assume that the consumer is parsing values, we will switch to exposing the object format if nextToken is called + private boolean parsingObject = false; + + MultiFieldParser(CompletionInputMetadata metadata, String fieldName, XContentLocation locationOffset) { + this.fullObjectParser = new MapXContentParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.IGNORE_DEPRECATIONS, + metadata.toMap(), + XContentType.JSON + ); + this.fieldName = fieldName; + this.locationOffset = locationOffset; + this.textValue = metadata.input; + } - boolean advanced = false; - final String textValue; + @Override + protected XContentParser delegate() { + // if consumers are only reading values, they should never go through delegate and rather call the + // overridden currentToken and textOrNull below that don't call super + assert parsingObject; + return fullObjectParser; + } - private CompletionParser(CompletionInputMetadata metadata) throws IOException { - super(MapXContentParser.wrapObject(metadata.toMap())); - this.textValue = metadata.input; + @Override + public Token currentToken() { + if (parsingObject == false) { + // nextToken has not been called, it may or may not be called at a later time. + // What we return does not really matter for mappers that support simple values, as they only check for VALUE_NULL. + // For mappers that do support objects, START_OBJECT is a good choice. + return Token.START_OBJECT; + } + return super.currentToken(); } @Override public String textOrNull() throws IOException { - if (advanced == false) { + if (parsingObject == false) { return textValue; } return super.textOrNull(); @@ -606,8 +660,32 @@ public String textOrNull() throws IOException { @Override public Token nextToken() throws IOException { - advanced = true; + if (parsingObject == false) { + // a completion sub-field is parsing + parsingObject = true; + // move to START_OBJECT, currentToken has already returned START_OBJECT and we will advance one token further just below + this.fullObjectParser.nextToken(); + } return super.nextToken(); } + + @Override + public String currentName() throws IOException { + if (parsingObject == false) { + return fieldName; + } + String currentName = super.currentName(); + if (currentName == null && currentToken() == Token.END_OBJECT) { + return fieldName; + } + return currentName; + } + + @Override + public XContentLocation getTokenLocation() { + // return fixed token location: it's not possible to match the token location while parsing through the object structure, + // because completion metadata have been rewritten hence they won't match the incoming document + return locationOffset; + } } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 3ce33fb26c4b4..3a001e99236a2 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -255,7 +255,7 @@ public void parse(DocumentParserContext context) throws IOException { valuePreview ); } - multiFields.parse(this, context); + multiFields.parse(this, context, () -> context); } /** @@ -449,7 +449,7 @@ public final Map indexAnalyzers() { return indexAnalyzers; } - public static class MultiFields implements Iterable, ToXContent { + public static final class MultiFields implements Iterable, ToXContent { private static final MultiFields EMPTY = new MultiFields(Collections.emptyMap()); @@ -507,16 +507,16 @@ private MultiFields(Map mappers) { this.mappers = mappers; } - public void parse(FieldMapper mainField, DocumentParserContext context) throws IOException { + public void parse(FieldMapper mainField, DocumentParserContext context, Supplier multiFieldContextSupplier) + throws IOException { // TODO: multi fields are really just copy fields, we just need to expose "sub fields" or something that can be part // of the mappings if (mappers.isEmpty()) { return; } - context.path().add(mainField.simpleName()); for (FieldMapper mapper : mappers.values()) { - mapper.parse(context); + mapper.parse(multiFieldContextSupplier.get()); } context.path().remove(); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index ee2e33c3ed3a8..cff2cbdc25359 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -42,9 +42,9 @@ import org.elasticsearch.search.lookup.FieldValues; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.runtime.GeoPointScriptFieldDistanceFeatureQuery; +import org.elasticsearch.xcontent.FilterXContentParser; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.support.MapXContentParser; import java.io.IOException; import java.io.UncheckedIOException; @@ -215,7 +215,38 @@ protected void index(DocumentParserContext context, GeoPoint geometry) throws IO context.doc().add(new StoredField(fieldType().name(), geometry.toString())); } // TODO phase out geohash (which is currently used in the CompletionSuggester) - multiFields.parse(this, context.switchParser(MapXContentParser.wrapObject(geometry.geohash()))); + // we only expose the geohash value and disallow advancing tokens, hence we can reuse the same parser throughout multiple sub-fields + DocumentParserContext parserContext = context.switchParser(new GeoHashMultiFieldParser(context.parser(), geometry.geohash())); + multiFields.parse(this, context, () -> parserContext); + } + + /** + * Parser that pretends to be the main document parser, but exposes the provided geohash regardless of how the geopoint was provided + * in the incoming document. We rely on the fact that consumers are only ever call {@link XContentParser#textOrNull()} and never + * advance tokens, which is explicitly disallowed by this parser. + */ + static class GeoHashMultiFieldParser extends FilterXContentParser { + private final String value; + + GeoHashMultiFieldParser(XContentParser innerParser, String value) { + super(innerParser); + this.value = value; + } + + @Override + public String textOrNull() throws IOException { + return value; + } + + @Override + public Token currentToken() { + return Token.VALUE_STRING; + } + + @Override + public Token nextToken() throws IOException { + throw new UnsupportedOperationException(); + } } @Override diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index 46426c45aeab7..6e4d30a3cb8a8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.unit.Fuzziness; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalyzerScope; import org.elasticsearch.index.analysis.IndexAnalyzers; @@ -38,6 +39,9 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentLocation; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.json.JsonXContent; import org.hamcrest.FeatureMatcher; import org.hamcrest.Matcher; @@ -45,7 +49,11 @@ import org.hamcrest.core.CombinableMatcher; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.Function; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; @@ -135,7 +143,6 @@ public void testPostingsFormat() throws IOException { } public void testDefaultConfiguration() throws IOException { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); @@ -158,7 +165,6 @@ public void testDefaultConfiguration() throws IOException { } public void testCompletionAnalyzerSettings() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "completion"); b.field("analyzer", "simple"); @@ -192,7 +198,6 @@ public void testCompletionAnalyzerSettings() throws Exception { @SuppressWarnings("unchecked") public void testTypeParsing() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "completion"); b.field("analyzer", "simple"); @@ -218,7 +223,6 @@ public void testTypeParsing() throws Exception { } public void testParsingMinimal() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); @@ -228,7 +232,6 @@ public void testParsingMinimal() throws Exception { } public void testParsingFailure() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); MapperParsingException e = expectThrows( @@ -239,7 +242,6 @@ public void testParsingFailure() throws Exception { } public void testKeywordWithSubCompletionAndContext() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "keyword"); b.startObject("fields"); @@ -284,7 +286,6 @@ public void testKeywordWithSubCompletionAndContext() throws Exception { } public void testCompletionWithContextAndSubCompletion() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "completion"); b.startArray("contexts"); @@ -366,8 +367,7 @@ public void testCompletionWithContextAndSubCompletion() throws Exception { } } - public void testKeywordWithSubCompletionAndStringInsert() throws Exception { - + public void testGeoHashWithSubCompletionAndStringInsert() throws Exception { DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "geo_point"); b.startObject("fields"); @@ -386,13 +386,19 @@ public void testKeywordWithSubCompletionAndStringInsert() throws Exception { // unable to assert about geofield content, covered in a REST test } - public void testCompletionTypeWithSubCompletionFieldAndStringInsert() throws Exception { + public void testCompletionTypeWithSubfieldsAndStringInsert() throws Exception { + List> builders = new ArrayList<>(); + builders.add(b -> b.startObject("analyzed1").field("type", "keyword").endObject()); + builders.add(b -> b.startObject("analyzed2").field("type", "keyword").endObject()); + builders.add(b -> b.startObject("subsuggest1").field("type", "completion").endObject()); + builders.add(b -> b.startObject("subsuggest2").field("type", "completion").endObject()); + Collections.shuffle(builders, random()); DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "completion"); b.startObject("fields"); - { - b.startObject("subsuggest").field("type", "completion").endObject(); + for (CheckedConsumer builder : builders) { + builder.accept(b); } b.endObject(); })); @@ -401,42 +407,80 @@ public void testCompletionTypeWithSubCompletionFieldAndStringInsert() throws Exc LuceneDocument indexableFields = parsedDocument.rootDoc(); assertThat(indexableFields.getFields("field"), arrayContainingInAnyOrder(suggestField("suggestion"))); - assertThat(indexableFields.getFields("field.subsuggest"), arrayContainingInAnyOrder(suggestField("suggestion"))); + assertThat(indexableFields.getFields("field.subsuggest1"), arrayContainingInAnyOrder(suggestField("suggestion"))); + assertThat(indexableFields.getFields("field.subsuggest2"), arrayContainingInAnyOrder(suggestField("suggestion"))); + assertThat( + indexableFields.getFields("field.analyzed1"), + arrayContainingInAnyOrder(keywordField("suggestion"), sortedSetDocValuesField("suggestion")) + ); + assertThat( + indexableFields.getFields("field.analyzed2"), + arrayContainingInAnyOrder(keywordField("suggestion"), sortedSetDocValuesField("suggestion")) + ); } - public void testCompletionTypeWithSubCompletionFieldAndObjectInsert() throws Exception { + public void testCompletionTypeWithSubfieldsAndArrayInsert() throws Exception { + List> builders = new ArrayList<>(); + builders.add(b -> b.startObject("analyzed1").field("type", "keyword").endObject()); + builders.add(b -> b.startObject("analyzed2").field("type", "keyword").endObject()); + builders.add(b -> b.startObject("subcompletion1").field("type", "completion").endObject()); + builders.add(b -> b.startObject("subcompletion2").field("type", "completion").endObject()); + Collections.shuffle(builders, random()); DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "completion"); b.startObject("fields"); - { - b.startObject("analyzed").field("type", "completion").endObject(); + for (CheckedConsumer builder : builders) { + builder.accept(b); } b.endObject(); })); - ParsedDocument parsedDocument = defaultMapper.parse(source(b -> { - b.startObject("field"); - { - b.array("input", "New York", "NY"); - b.field("weight", 34); - } - b.endObject(); - })); + ParsedDocument parsedDocument = defaultMapper.parse(source(b -> b.array("field", "New York", "NY"))); LuceneDocument indexableFields = parsedDocument.rootDoc(); assertThat(indexableFields.getFields("field"), arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY"))); - assertThat(indexableFields.getFields("field.analyzed"), arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY"))); - // unable to assert about weight, covered in a REST test + assertThat( + indexableFields.getFields("field.subcompletion1"), + arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY")) + ); + assertThat( + indexableFields.getFields("field.subcompletion2"), + arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY")) + ); + assertThat( + indexableFields.getFields("field.analyzed1"), + arrayContainingInAnyOrder( + keywordField("New York"), + sortedSetDocValuesField("New York"), + keywordField("NY"), + sortedSetDocValuesField("NY") + ) + ); + assertThat( + indexableFields.getFields("field.analyzed2"), + arrayContainingInAnyOrder( + keywordField("New York"), + sortedSetDocValuesField("New York"), + keywordField("NY"), + sortedSetDocValuesField("NY") + ) + ); } - public void testCompletionTypeWithSubKeywordFieldAndObjectInsert() throws Exception { + public void testCompletionTypeWithSubfieldsAndObjectInsert() throws Exception { + List> builders = new ArrayList<>(); + builders.add(b -> b.startObject("analyzed1").field("type", "keyword").endObject()); + builders.add(b -> b.startObject("analyzed2").field("type", "keyword").endObject()); + builders.add(b -> b.startObject("subcompletion1").field("type", "completion").endObject()); + builders.add(b -> b.startObject("subcompletion2").field("type", "completion").endObject()); + Collections.shuffle(builders, random()); DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { b.field("type", "completion"); b.startObject("fields"); - { - b.startObject("analyzed").field("type", "keyword").endObject(); + for (CheckedConsumer builder : builders) { + builder.accept(b); } b.endObject(); })); @@ -453,7 +497,15 @@ public void testCompletionTypeWithSubKeywordFieldAndObjectInsert() throws Except LuceneDocument indexableFields = parsedDocument.rootDoc(); assertThat(indexableFields.getFields("field"), arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY"))); assertThat( - indexableFields.getFields("field.analyzed"), + indexableFields.getFields("field.subcompletion1"), + arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY")) + ); + assertThat( + indexableFields.getFields("field.subcompletion2"), + arrayContainingInAnyOrder(suggestField("New York"), suggestField("NY")) + ); + assertThat( + indexableFields.getFields("field.analyzed1"), arrayContainingInAnyOrder( keywordField("New York"), sortedSetDocValuesField("New York"), @@ -461,32 +513,19 @@ public void testCompletionTypeWithSubKeywordFieldAndObjectInsert() throws Except sortedSetDocValuesField("NY") ) ); - // unable to assert about weight, covered in a REST test - } - - public void testCompletionTypeWithSubKeywordFieldAndStringInsert() throws Exception { - - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(b -> { - b.field("type", "completion"); - b.startObject("fields"); - { - b.startObject("analyzed").field("type", "keyword").endObject(); - } - b.endObject(); - })); - - ParsedDocument parsedDocument = defaultMapper.parse(source(b -> b.field("field", "suggestion"))); - - LuceneDocument indexableFields = parsedDocument.rootDoc(); - assertThat(indexableFields.getFields("field"), arrayContainingInAnyOrder(suggestField("suggestion"))); assertThat( - indexableFields.getFields("field.analyzed"), - arrayContainingInAnyOrder(keywordField("suggestion"), sortedSetDocValuesField("suggestion")) + indexableFields.getFields("field.analyzed2"), + arrayContainingInAnyOrder( + keywordField("New York"), + sortedSetDocValuesField("New York"), + keywordField("NY"), + sortedSetDocValuesField("NY") + ) ); + // unable to assert about weight, covered in a REST test } public void testParsingMultiValued() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); @@ -497,7 +536,6 @@ public void testParsingMultiValued() throws Exception { } public void testParsingWithWeight() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); @@ -515,7 +553,6 @@ public void testParsingWithWeight() throws Exception { } public void testParsingMultiValueWithWeight() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); @@ -536,7 +573,6 @@ public void testParsingMultiValueWithWeight() throws Exception { } public void testParsingWithGeoFieldAlias() throws Exception { - MapperService mapperService = createMapperService(mapping(b -> { b.startObject("completion"); { @@ -574,7 +610,6 @@ public void testParsingWithGeoFieldAlias() throws Exception { } public void testParsingFull() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); @@ -596,7 +631,6 @@ public void testParsingFull() throws Exception { } public void testParsingMixed() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); Mapper fieldMapper = defaultMapper.mappers().getMapper("field"); @@ -640,7 +674,6 @@ public void testParsingMixed() throws Exception { } public void testNonContextEnabledParsingWithContexts() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); MapperParsingException e = expectThrows(MapperParsingException.class, () -> defaultMapper.parse(source(b -> { b.startObject("field"); @@ -656,7 +689,6 @@ public void testNonContextEnabledParsingWithContexts() throws Exception { } public void testFieldValueValidation() throws Exception { - DocumentMapper defaultMapper = createDocumentMapper(fieldMapping(this::minimalMapping)); CharsRefBuilder charsRefBuilder = new CharsRefBuilder(); charsRefBuilder.append("sugg"); @@ -790,6 +822,96 @@ public void testLimitOfContextMappings() throws Throwable { ); } + private static CompletionFieldMapper.CompletionInputMetadata randomCompletionMetadata() { + Map> contexts = randomBoolean() + ? Collections.emptyMap() + : Collections.singletonMap("filter", Collections.singleton("value")); + return new CompletionFieldMapper.CompletionInputMetadata("text", contexts, 10); + } + + private static XContentParser documentParser(CompletionFieldMapper.CompletionInputMetadata metadata) throws IOException { + XContentBuilder docBuilder = JsonXContent.contentBuilder(); + if (randomBoolean()) { + docBuilder.prettyPrint(); + } + docBuilder.startObject(); + docBuilder.field("field"); + docBuilder.map(metadata.toMap()); + docBuilder.endObject(); + String document = Strings.toString(docBuilder); + XContentParser docParser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, document); + docParser.nextToken(); + docParser.nextToken(); + assertEquals(XContentParser.Token.START_OBJECT, docParser.nextToken()); + return docParser; + } + + public void testMultiFieldParserSimpleValue() throws IOException { + CompletionFieldMapper.CompletionInputMetadata metadata = randomCompletionMetadata(); + XContentParser documentParser = documentParser(metadata); + XContentParser multiFieldParser = new CompletionFieldMapper.MultiFieldParser( + metadata, + documentParser.currentName(), + documentParser.getTokenLocation() + ); + // we don't check currentToken here because it returns START_OBJECT that is inconsistent with returning a value + assertEquals("text", multiFieldParser.textOrNull()); + assertEquals(documentParser.getTokenLocation(), multiFieldParser.getTokenLocation()); + assertEquals(documentParser.currentName(), multiFieldParser.currentName()); + } + + public void testMultiFieldParserCompletionSubfield() throws IOException { + CompletionFieldMapper.CompletionInputMetadata metadata = randomCompletionMetadata(); + XContentParser documentParser = documentParser(metadata); + // compare the object structure with the original metadata, this implicitly verifies that the xcontent read is valid + XContentBuilder multiFieldBuilder = JsonXContent.contentBuilder() + .copyCurrentStructure( + new CompletionFieldMapper.MultiFieldParser(metadata, documentParser.currentName(), documentParser.getTokenLocation()) + ); + XContentBuilder metadataBuilder = JsonXContent.contentBuilder().map(metadata.toMap()); + String jsonMetadata = Strings.toString(metadataBuilder); + assertEquals(jsonMetadata, Strings.toString(multiFieldBuilder)); + // advance token by token and verify currentName as well as getTokenLocation + XContentParser multiFieldParser = new CompletionFieldMapper.MultiFieldParser( + metadata, + documentParser.currentName(), + documentParser.getTokenLocation() + ); + XContentParser expectedParser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, jsonMetadata); + assertEquals(expectedParser.nextToken(), multiFieldParser.currentToken()); + XContentLocation expectedTokenLocation = documentParser.getTokenLocation(); + while (expectedParser.nextToken() != null) { + XContentParser.Token token = multiFieldParser.nextToken(); + assertEquals(expectedParser.currentToken(), token); + assertEquals(expectedParser.currentToken(), multiFieldParser.currentToken()); + assertEquals(expectedTokenLocation, multiFieldParser.getTokenLocation()); + assertEquals(documentParser.nextToken(), multiFieldParser.currentToken()); + assertEquals(documentParser.currentName(), multiFieldParser.currentName()); + } + assertNull(multiFieldParser.nextToken()); + } + + public void testMultiFieldParserMixedSubfields() throws IOException { + CompletionFieldMapper.CompletionInputMetadata metadata = randomCompletionMetadata(); + XContentParser documentParser = documentParser(metadata); + // simulate 10 sub-fields which may either read simple values or the full object structure + for (int i = 0; i < 10; i++) { + XContentParser multiFieldParser = new CompletionFieldMapper.MultiFieldParser( + metadata, + documentParser.currentName(), + documentParser.getTokenLocation() + ); + if (randomBoolean()) { + assertEquals("text", multiFieldParser.textOrNull()); + } else { + XContentBuilder multiFieldBuilder = JsonXContent.contentBuilder().copyCurrentStructure(multiFieldParser); + XContentBuilder metadataBuilder = JsonXContent.contentBuilder().map(metadata.toMap()); + String jsonMetadata = Strings.toString(metadataBuilder); + assertEquals(jsonMetadata, Strings.toString(multiFieldBuilder)); + } + } + } + private Matcher suggestField(String value) { return Matchers.allOf(hasProperty(IndexableField::stringValue, equalTo(value)), Matchers.instanceOf(SuggestField.class)); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index 922e20b410873..420ef2a8fab44 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -8,9 +8,13 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.json.JsonXContent; import org.hamcrest.CoreMatchers; import java.io.IOException; @@ -235,6 +239,53 @@ public void testMultiFieldWithMultipleValues() throws Exception { assertThat(doc.getFields("field.geohash")[1].binaryValue().utf8ToString(), equalTo("s0fu7n0xng81")); } + public void testKeywordWithGeopointSubfield() throws Exception { + DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> { + b.field("type", "keyword").field("doc_values", false); + ; + b.startObject("fields"); + { + b.startObject("geopoint").field("type", "geo_point").field("doc_values", false).endObject(); + } + b.endObject(); + })); + LuceneDocument doc = mapper.parse(source(b -> b.array("field", "s093jd0k72s1"))).rootDoc(); + assertThat(doc.getFields("field"), arrayWithSize(1)); + assertEquals("s093jd0k72s1", doc.getFields("field")[0].binaryValue().utf8ToString()); + assertThat(doc.getFields("field.geopoint"), arrayWithSize(1)); + assertThat(doc.getField("field.geopoint"), hasToString(both(containsString("field.geopoint:2.999")).and(containsString("1.999")))); + } + + private static XContentParser documentParser(String value, boolean prettyPrint) throws IOException { + XContentBuilder docBuilder = JsonXContent.contentBuilder(); + if (prettyPrint) { + docBuilder.prettyPrint(); + } + docBuilder.startObject(); + docBuilder.field("field", value); + docBuilder.endObject(); + String document = Strings.toString(docBuilder); + XContentParser docParser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, document); + docParser.nextToken(); + docParser.nextToken(); + assertEquals(XContentParser.Token.VALUE_STRING, docParser.nextToken()); + return docParser; + } + + public void testGeoHashMultiFieldParser() throws IOException { + boolean prettyPrint = randomBoolean(); + XContentParser docParser = documentParser("POINT (2 3)", prettyPrint); + XContentParser expectedParser = documentParser("s093jd0k72s1", prettyPrint); + XContentParser parser = new GeoPointFieldMapper.GeoHashMultiFieldParser(docParser, "s093jd0k72s1"); + for (int i = 0; i < 10; i++) { + assertEquals(expectedParser.currentToken(), parser.currentToken()); + assertEquals(expectedParser.currentName(), parser.currentName()); + assertEquals(expectedParser.getTokenLocation(), parser.getTokenLocation()); + assertEquals(expectedParser.textOrNull(), parser.textOrNull()); + expectThrows(UnsupportedOperationException.class, parser::nextToken); + } + } + public void testNullValue() throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point"))); Mapper fieldMapper = mapper.mappers().getMapper("field"); From 8487b0344a385e4326a0bf5d81d1f37d71d0cc6c Mon Sep 17 00:00:00 2001 From: Ioana Tagirta Date: Fri, 11 Feb 2022 15:29:46 +0100 Subject: [PATCH 063/167] Add elastic/enterprise-search-server service account (#83325) * Add elastic/enterprise-search-server service account * Remove overlapping index privileges * Linting * Remove cluster privilege already covered by manage * Skip test * Reorder assertions in test * Update docs/changelog/83325.yaml --- docs/changelog/83325.yaml | 5 ++ .../authentication/service-accounts.asciidoc | 3 + x-pack/plugin/build.gradle | 1 + .../authc/service/ServiceAccountIT.java | 49 ++++++++++++ .../authc/service/ElasticServiceAccounts.java | 31 +++++++- ...TransportGetServiceAccountActionTests.java | 4 +- .../service/ElasticServiceAccountsTests.java | 79 +++++++++++++++++++ .../service/ServiceAccountServiceTests.java | 5 +- .../test/service_accounts/10_basic.yml | 23 +++++- 9 files changed, 194 insertions(+), 6 deletions(-) create mode 100644 docs/changelog/83325.yaml diff --git a/docs/changelog/83325.yaml b/docs/changelog/83325.yaml new file mode 100644 index 0000000000000..2e18d8378490d --- /dev/null +++ b/docs/changelog/83325.yaml @@ -0,0 +1,5 @@ +pr: 83325 +summary: Add elastic/enterprise-search-server service account +area: Authorization +type: enhancement +issues: [] diff --git a/x-pack/docs/en/security/authentication/service-accounts.asciidoc b/x-pack/docs/en/security/authentication/service-accounts.asciidoc index 141ad57f8067f..ce64b539ab9b6 100644 --- a/x-pack/docs/en/security/authentication/service-accounts.asciidoc +++ b/x-pack/docs/en/security/authentication/service-accounts.asciidoc @@ -51,6 +51,9 @@ communicate with {es}. `elastic/kibana`:: The service account used by {kib} to communicate with {es}. +`elastic/enterprise-search-server`:: The service account used by Enterprise Search +to communicate with {es}. + // tag::service-accounts-usage[] IMPORTANT: Do not attempt to use service accounts for authenticating individual users. Service accounts can only be authenticated with service tokens, which are diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index b43ac4bd9a6c0..5bf5f256d2169 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -113,6 +113,7 @@ tasks.named("yamlRestTestV7CompatTransform").configure{ task -> task.skipTest("indices.freeze/20_stats/Translog stats on frozen indices", "#70192 -- the freeze index API is removed from 8.0") task.skipTest("indices.freeze/10_basic/Basic", "#70192 -- the freeze index API is removed from 8.0") task.skipTest("indices.freeze/10_basic/Test index options", "#70192 -- the freeze index API is removed from 8.0") + task.skipTest("service_accounts/10_basic/Test get service accounts", "new service accounts are added") task.replaceValueInMatch("_type", "_doc") task.addAllowedWarningRegex("\\[types removal\\].*") diff --git a/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java b/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java index b08952b78e40f..36bf585bdb404 100644 --- a/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java +++ b/x-pack/plugin/security/qa/service-account/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountIT.java @@ -144,6 +144,42 @@ public class ServiceAccountIT extends ESRestTestCase { } }"""; + private static final String ELASTIC_ENTERPRISE_SEARCH_SERVER_ROLE_DESCRIPTOR = """ + { + "cluster": [ + "manage", + "manage_security" + ], + "indices": [ + { + "names": [ + ".ent-search-*", + ".monitoring-ent-search-*", + "metricbeat-ent-search-*", + "enterprise-search-*", + "logs-app_search.analytics-default", + "logs-enterprise_search.api-default", + "logs-app_search.search_relevance_suggestions-default", + "logs-crawler-default", + "logs-workplace_search.analytics-default", + "logs-workplace_search.content_events-default" + ], + "privileges": [ + "manage", + "read", + "write" + ], + "allow_restricted_indices": false + } + ], + "applications": [], + "run_as": [], + "metadata": {}, + "transient_metadata": { + "enabled": true + } + }"""; + @BeforeClass public static void init() throws URISyntaxException, FileNotFoundException { URL resource = ServiceAccountIT.class.getResource("/ssl/ca.crt"); @@ -199,6 +235,19 @@ public void testGetServiceAccount() throws IOException { ) ); + final Request getServiceAccountRequestEnterpriseSearchService = new Request( + "GET", + "_security/service/elastic/enterprise-search-server" + ); + final Response getServiceAccountResponseEnterpriseSearchService = client().performRequest( + getServiceAccountRequestEnterpriseSearchService + ); + assertServiceAccountRoleDescriptor( + getServiceAccountResponseEnterpriseSearchService, + "elastic/enterprise-search-server", + ELASTIC_ENTERPRISE_SEARCH_SERVER_ROLE_DESCRIPTOR + ); + final String requestPath = "_security/service/" + randomFrom("foo", "elastic/foo", "foo/bar"); final Request getServiceAccountRequest4 = new Request("GET", requestPath); final Response getServiceAccountResponse4 = client().performRequest(getServiceAccountRequest4); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java index 43cee77e76c26..d8679a74f361b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java @@ -22,6 +22,35 @@ final class ElasticServiceAccounts { static final String NAMESPACE = "elastic"; + private static final ServiceAccount ENTERPRISE_SEARCH_ACCOUNT = new ElasticServiceAccount( + "enterprise-search-server", + new RoleDescriptor( + NAMESPACE + "/enterprise-search-server", + new String[] { "manage", "manage_security" }, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder() + .indices( + ".ent-search-*", + ".monitoring-ent-search-*", + "metricbeat-ent-search-*", + "enterprise-search-*", + "logs-app_search.analytics-default", + "logs-enterprise_search.api-default", + "logs-app_search.search_relevance_suggestions-default", + "logs-crawler-default", + "logs-workplace_search.analytics-default", + "logs-workplace_search.content_events-default" + ) + .privileges("manage", "read", "write") + .build() }, + null, + null, + null, + null, + null + ) + ); + private static final ServiceAccount FLEET_ACCOUNT = new ElasticServiceAccount( "fleet-server", new RoleDescriptor( @@ -71,7 +100,7 @@ final class ElasticServiceAccounts { ReservedRolesStore.kibanaSystemRoleDescriptor(NAMESPACE + "/kibana") ); - static final Map ACCOUNTS = List.of(FLEET_ACCOUNT, KIBANA_SYSTEM_ACCOUNT) + static final Map ACCOUNTS = List.of(ENTERPRISE_SEARCH_ACCOUNT, FLEET_ACCOUNT, KIBANA_SYSTEM_ACCOUNT) .stream() .collect(Collectors.toMap(a -> a.id().asPrincipal(), Function.identity())); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountActionTests.java index 04432131fc9ff..87ef1ea24559e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/service/TransportGetServiceAccountActionTests.java @@ -45,12 +45,12 @@ public void testDoExecute() { final PlainActionFuture future1 = new PlainActionFuture<>(); transportGetServiceAccountAction.doExecute(mock(Task.class), request1, future1); final GetServiceAccountResponse getServiceAccountResponse1 = future1.actionGet(); - assertThat(getServiceAccountResponse1.getServiceAccountInfos().length, equalTo(2)); + assertThat(getServiceAccountResponse1.getServiceAccountInfos().length, equalTo(3)); assertThat( Arrays.stream(getServiceAccountResponse1.getServiceAccountInfos()) .map(ServiceAccountInfo::getPrincipal) .collect(Collectors.toList()), - containsInAnyOrder("elastic/fleet-server", "elastic/kibana") + containsInAnyOrder("elastic/enterprise-search-server", "elastic/fleet-server", "elastic/kibana") ); final GetServiceAccountRequest request2 = new GetServiceAccountRequest("elastic", "fleet-server"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java index e96925281c8c9..a673f114bbf91 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccountsTests.java @@ -7,12 +7,18 @@ package org.elasticsearch.xpack.security.authc.service; +import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsAction; import org.elasticsearch.action.admin.indices.create.AutoCreateAction; import org.elasticsearch.action.admin.indices.create.CreateIndexAction; import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction; import org.elasticsearch.action.admin.indices.mapping.put.AutoPutMappingAction; +import org.elasticsearch.action.admin.indices.refresh.RefreshAction; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsAction; import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; +import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateAction; +import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesAction; +import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateAction; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.delete.DeleteAction; import org.elasticsearch.action.get.GetAction; @@ -25,6 +31,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xpack.core.ilm.action.GetLifecycleAction; +import org.elasticsearch.xpack.core.ilm.action.PutLifecycleAction; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarEventAction; @@ -87,12 +95,15 @@ import org.elasticsearch.xpack.core.ml.action.UpdateProcessAction; import org.elasticsearch.xpack.core.ml.action.ValidateDetectorAction; import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction; +import org.elasticsearch.xpack.core.monitoring.action.MonitoringBulkAction; import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.CreateApiKeyRequest; import org.elasticsearch.xpack.core.security.action.apikey.GetApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.GetApiKeyRequest; import org.elasticsearch.xpack.core.security.action.apikey.InvalidateApiKeyAction; import org.elasticsearch.xpack.core.security.action.apikey.InvalidateApiKeyRequest; +import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; +import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.permission.Role; @@ -289,6 +300,74 @@ public void testElasticServiceAccount() { ); } + public void testElasticEnterpriseSearchServerAccount() { + final Role role = Role.builder( + ElasticServiceAccounts.ACCOUNTS.get("elastic/enterprise-search-server").roleDescriptor(), + null, + RESTRICTED_INDICES_AUTOMATON + ).build(); + + final Authentication authentication = mock(Authentication.class); + final TransportRequest request = mock(TransportRequest.class); + + // manage + assertThat(role.cluster().check(ClusterUpdateSettingsAction.NAME, request, authentication), is(true)); + + // manage_security + assertThat( + role.cluster() + .check(CreateApiKeyAction.NAME, new CreateApiKeyRequest(randomAlphaOfLengthBetween(3, 8), null, null), authentication), + is(true) + ); + assertThat(role.cluster().check(GetApiKeyAction.NAME, GetApiKeyRequest.forOwnedApiKeys(), authentication), is(true)); + assertThat(role.cluster().check(InvalidateApiKeyAction.NAME, InvalidateApiKeyRequest.forOwnedApiKeys(), authentication), is(true)); + + assertThat(role.cluster().check(PutUserAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(PutRoleAction.NAME, request, authentication), is(true)); + + // manage_index_templates + assertThat(role.cluster().check(PutIndexTemplateAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(GetIndexTemplatesAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(DeleteIndexTemplateAction.NAME, request, authentication), is(true)); + + // monitoring + assertThat(role.cluster().check(MonitoringBulkAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(ClusterHealthAction.NAME, request, authentication), is(true)); + + // manage_ilm + assertThat(role.cluster().check(GetLifecycleAction.NAME, request, authentication), is(true)); + assertThat(role.cluster().check(PutLifecycleAction.NAME, request, authentication), is(true)); + + List.of( + ".ent-search-" + randomAlphaOfLengthBetween(1, 20), + ".monitoring-ent-search-" + randomAlphaOfLengthBetween(1, 20), + "metricbeat-ent-search-" + randomAlphaOfLengthBetween(1, 20), + "enterprise-search-" + randomAlphaOfLengthBetween(1, 20), + "logs-app_search.analytics-default", + "logs-enterprise_search.api-default", + "logs-app_search.search_relevance_suggestions-default", + "logs-crawler-default", + "logs-workplace_search.analytics-default", + "logs-workplace_search.content_events-default" + ).forEach(index -> { + final IndexAbstraction enterpriseSearchIndex = mockIndexAbstraction(index); + assertThat(role.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(DeleteAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(IndexAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(BulkAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(GetAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(MultiGetAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(SearchAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(MultiSearchAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(IndicesStatsAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher(RefreshAction.NAME).test(enterpriseSearchIndex), is(true)); + assertThat(role.indices().allowedIndicesMatcher("indices:foo").test(enterpriseSearchIndex), is(false)); + }); + } + private IndexAbstraction mockIndexAbstraction(String name) { IndexAbstraction mock = mock(IndexAbstraction.class); when(mock.getName()).thenReturn(name); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountServiceTests.java index f105c63958304..8d54f58273fde 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/ServiceAccountServiceTests.java @@ -96,7 +96,10 @@ public void stopThreadPool() { } public void testGetServiceAccountPrincipals() { - assertThat(ServiceAccountService.getServiceAccountPrincipals(), containsInAnyOrder("elastic/fleet-server", "elastic/kibana")); + assertThat( + ServiceAccountService.getServiceAccountPrincipals(), + containsInAnyOrder("elastic/enterprise-search-server", "elastic/fleet-server", "elastic/kibana") + ); } public void testTryParseToken() throws IOException, IllegalAccessException { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/service_accounts/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/service_accounts/10_basic.yml index 7524379427f91..5c6d2d0c78275 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/service_accounts/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/service_accounts/10_basic.yml @@ -20,18 +20,27 @@ teardown: name: api-token-kibana ignore: 404 + - do: + security.delete_service_token: + namespace: elastic + service: enterprise-search-server + name: api-token-enterprise-search-server + ignore: 404 + --- "Test get service accounts": - do: security.get_service_accounts: {} - - length: { '': 2 } + - length: { '': 3 } + - is_true: "elastic/enterprise-search-server" - is_true: "elastic/fleet-server" - is_true: "elastic/kibana" - do: security.get_service_accounts: namespace: elastic - - length: { '': 2 } + - length: { '': 3 } + - is_true: "elastic/enterprise-search-server" - is_true: "elastic/fleet-server" - is_true: "elastic/kibana" @@ -66,6 +75,16 @@ teardown: - match: { "token.name": "api-token-kibana" } - set: { "token.value": service_token_kibana } + - do: + security.create_service_token: + namespace: elastic + service: enterprise-search-server + name: api-token-enterprise-search-server + + - is_true: created + - match: { "token.name": "api-token-enterprise-search-server" } + - set: { "token.value": service_token_enterprise_search_server } + - do: headers: Authorization: Bearer ${service_token_fleet} From e4cc73cae1e7f79479413fc2bef758d818823b25 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Fri, 11 Feb 2022 09:46:55 -0500 Subject: [PATCH 064/167] Remove LegacyCTRAListener from MetadataMappingService (#83811) --- .../metadata/MetadataMappingService.java | 25 +++++++++++-------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java index 3e89e11ae0ec0..d82a617dd9d6b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataMappingService.java @@ -57,15 +57,10 @@ public MetadataMappingService(ClusterService clusterService, IndicesService indi this.indicesService = indicesService; } - static class PutMappingClusterStateUpdateTask implements ClusterStateTaskListener, ClusterStateAckListener { - - private final PutMappingClusterStateUpdateRequest request; - private final ActionListener listener; - - PutMappingClusterStateUpdateTask(PutMappingClusterStateUpdateRequest request, ActionListener listener) { - this.request = request; - this.listener = listener; - } + record PutMappingClusterStateUpdateTask(PutMappingClusterStateUpdateRequest request, ActionListener listener) + implements + ClusterStateTaskListener, + ClusterStateAckListener { @Override public void onFailure(Exception e) { @@ -116,7 +111,17 @@ public ClusterTasksResult execute( } } currentState = applyRequest(currentState, request, indexMapperServices); - builder.success(task, new LegacyClusterTaskResultActionListener(task, originalState)); + builder.success(task, new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + // listener is notified at the end of acking + } + + @Override + public void onFailure(Exception e) { + task.onFailure(e); + } + }); } catch (Exception e) { builder.failure(task, e); } From ab6de24f7b40f14e0102959fec86d3751fdaad08 Mon Sep 17 00:00:00 2001 From: weizijun Date: Sat, 12 Feb 2022 00:09:52 +0800 Subject: [PATCH 065/167] TSDB: routingPath object type check improvement (#83310) Only reject mappings who's object fields exactly match routing_path. If the fields match some pattern in routing_path that's ok - so long as the fields inside follow all of the routing_path rules. --- docs/changelog/83310.yaml | 5 +++ .../rest-api-spec/test/tsdb/20_mapping.yml | 41 +++++++++++++++++-- .../index/mapper/DocumentMapper.java | 4 +- .../index/TimeSeriesModeTests.java | 18 +++++++- 4 files changed, 60 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/83310.yaml diff --git a/docs/changelog/83310.yaml b/docs/changelog/83310.yaml new file mode 100644 index 0000000000000..e1c001927389b --- /dev/null +++ b/docs/changelog/83310.yaml @@ -0,0 +1,5 @@ +pr: 83310 +summary: "TSDB: routingPath object type check improvement" +area: TSDB +type: enhancement +issues: [] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index c01fa5f24de44..fd66eef061b9f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -54,10 +54,10 @@ ecs style: time_series_metric: gauge --- -top level dim object: +top level wildcard dim object: - skip: - version: " - 8.0.99" - reason: introduced in 8.1.0 + version: " - 8.1.99" + reason: routing_path object type check improve in 8.2.0 - do: indices.create: @@ -66,7 +66,7 @@ top level dim object: settings: index: mode: time_series - routing_path: [dim.*] + routing_path: [dim*] time_series: start_time: 2021-04-28T00:00:00Z end_time: 2021-04-29T00:00:00Z @@ -111,6 +111,39 @@ top level dim object: type: double time_series_metric: gauge +--- +exact match object type: + - skip: + version: " - 8.1.99" + reason: routing_path object type check improve in 8.2.0 + + - do: + catch: '/All fields that match routing_path must be keywords with \[time_series_dimension: true\] and without the \[script\] parameter. \[dim\] was \[object\]./' + indices.create: + index: tsdb_index + body: + settings: + index: + mode: time_series + routing_path: [dim] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + number_of_replicas: 0 + number_of_shards: 2 + mappings: + properties: + "@timestamp": + type: date + dim: + properties: + metricset: + type: keyword + time_series_dimension: true + uid: + type: keyword + time_series_dimension: true + --- non keyword matches routing_path: - skip: diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index be8f9c5834787..cb0390eb37fb1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -9,7 +9,6 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.regex.Regex; import org.elasticsearch.index.IndexSettings; import java.util.List; @@ -104,7 +103,8 @@ public void validate(IndexSettings settings, boolean checkLimits) { mappingLookup.getFieldType(match).validateMatchedRoutingPath(); } for (String objectName : mappingLookup.objectMappers().keySet()) { - if (Regex.simpleMatch(path, objectName)) { + // object type is not allowed in the routing paths + if (path.equals(objectName)) { throw new IllegalArgumentException( "All fields that match routing_path must be keywords with [time_series_dimension: true] " + "and without the [script] parameter. [" diff --git a/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java b/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java index 8184ce2dc97d8..2b6f29d2e79f1 100644 --- a/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java +++ b/server/src/test/java/org/elasticsearch/index/TimeSeriesModeTests.java @@ -144,8 +144,22 @@ public void testValidateAliasWithSearchRouting() { assertThat(e.getMessage(), equalTo("routing is forbidden on CRUD operations that target indices in [index.mode=time_series]")); } - public void testRoutingPathMatchesObject() { - Settings s = getSettings(randomBoolean() ? "dim.o" : "dim.*"); + public void testRoutingPathMatchesObject() throws IOException { + Settings s = getSettings("dim.o*"); + createMapperService(s, mapping(b -> { + b.startObject("dim").startObject("properties"); + { + b.startObject("o").startObject("properties"); + b.startObject("inner_dim").field("type", "keyword").field("time_series_dimension", true).endObject(); + b.endObject().endObject(); + } + b.startObject("dim").field("type", "keyword").field("time_series_dimension", true).endObject(); + b.endObject().endObject(); + })); + } + + public void testRoutingPathEqualsObjectNameError() { + Settings s = getSettings("dim.o"); Exception e = expectThrows(IllegalArgumentException.class, () -> createMapperService(s, mapping(b -> { b.startObject("dim").startObject("properties"); { From 06af71e3819b318645629c2cf2ffe56bb54d733f Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 11 Feb 2022 08:38:10 -0800 Subject: [PATCH 066/167] Move x-content detection to format specific classes (#83808) x-content has support for automatically detecting the content type provided, through a series of heuristics for each supported x-content type. This change moves those heuristics into the relevant x-content implementation classes so that XContentFactory does not have direct dependencies on those Jackson classes. relates #83705 --- .../org/elasticsearch/xcontent/XContent.java | 6 ++++ .../xcontent/XContentFactory.java | 35 +++++-------------- .../xcontent/cbor/CborXContent.java | 26 ++++++++++++++ .../xcontent/json/JsonXContent.java | 10 ++++++ .../xcontent/smile/SmileXContent.java | 17 +++++++++ .../xcontent/yaml/YamlXContent.java | 10 ++++++ 6 files changed, 77 insertions(+), 27 deletions(-) diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContent.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContent.java index a78bdf159144b..18c12bc5b918e 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContent.java @@ -26,6 +26,12 @@ public interface XContent { byte streamSeparator(); + @Deprecated + boolean detectContent(byte[] bytes, int offset, int length); + + @Deprecated + boolean detectContent(CharSequence chars); + /** * Creates a new generator using the provided output stream. */ diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentFactory.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentFactory.java index cca70b2f1a00c..b7c98837678cd 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentFactory.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentFactory.java @@ -8,9 +8,6 @@ package org.elasticsearch.xcontent; -import com.fasterxml.jackson.dataformat.cbor.CBORConstants; -import com.fasterxml.jackson.dataformat.smile.SmileConstants; - import org.elasticsearch.xcontent.cbor.CborXContent; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xcontent.smile.SmileXContent; @@ -142,22 +139,20 @@ public static XContentType xContentType(CharSequence content) { return null; } char first = content.charAt(0); - if (first == '{') { + if (JsonXContent.jsonXContent.detectContent(content)) { return XContentType.JSON; } // Should we throw a failure here? Smile idea is to use it in bytes.... - if (length > 2 - && first == SmileConstants.HEADER_BYTE_1 - && content.charAt(1) == SmileConstants.HEADER_BYTE_2 - && content.charAt(2) == SmileConstants.HEADER_BYTE_3) { + if (SmileXContent.smileXContent.detectContent(content)) { return XContentType.SMILE; } - if (length > 2 && first == '-' && content.charAt(1) == '-' && content.charAt(2) == '-') { + if (YamlXContent.yamlXContent.detectContent(content)) { return XContentType.YAML; } // CBOR is not supported + // fallback for JSON for (int i = 0; i < length; i++) { char c = content.charAt(i); if (c == '{') { @@ -287,34 +282,20 @@ public static XContentType xContentType(byte[] bytes, int offset, int length) { return null; } byte first = bytes[offset]; - if (first == '{') { + if (JsonXContent.jsonXContent.detectContent(bytes, offset, length)) { return XContentType.JSON; } - if (length > 2 - && first == SmileConstants.HEADER_BYTE_1 - && bytes[offset + 1] == SmileConstants.HEADER_BYTE_2 - && bytes[offset + 2] == SmileConstants.HEADER_BYTE_3) { + if (SmileXContent.smileXContent.detectContent(bytes, offset, length)) { return XContentType.SMILE; } if (length > 2 && first == '-' && bytes[offset + 1] == '-' && bytes[offset + 2] == '-') { return XContentType.YAML; } - // CBOR logic similar to CBORFactory#hasCBORFormat - if (first == CBORConstants.BYTE_OBJECT_INDEFINITE && length > 1) { - return XContentType.CBOR; - } - if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, first) && length > 2) { - // Actually, specific "self-describe tag" is a very good indicator - if (first == (byte) 0xD9 && bytes[offset + 1] == (byte) 0xD9 && bytes[offset + 2] == (byte) 0xF7) { - return XContentType.CBOR; - } - } - // for small objects, some encoders just encode as major type object, we can safely - // say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort - if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, first)) { + if (CborXContent.cborXContent.detectContent(bytes, offset, length)) { return XContentType.CBOR; } + // fallback for JSON int jsonStart = 0; // JSON may be preceded by UTF-8 BOM if (length > 3 && first == (byte) 0xEF && bytes[offset + 1] == (byte) 0xBB && bytes[offset + 2] == (byte) 0xBF) { diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/cbor/CborXContent.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/cbor/CborXContent.java index 90c88b979514e..fbea0aab10019 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/cbor/CborXContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/cbor/CborXContent.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.dataformat.cbor.CBORConstants; import com.fasterxml.jackson.dataformat.cbor.CBORFactory; import org.elasticsearch.xcontent.XContent; @@ -60,6 +61,31 @@ public byte streamSeparator() { throw new XContentParseException("cbor does not support stream parsing..."); } + @Override + public boolean detectContent(byte[] bytes, int offset, int length) { + // CBOR logic similar to CBORFactory#hasCBORFormat + if (bytes[offset] == CBORConstants.BYTE_OBJECT_INDEFINITE && length > 1) { + return true; + } + if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_TAG, bytes[offset]) && length > 2) { + // Actually, specific "self-describe tag" is a very good indicator + if (bytes[offset] == (byte) 0xD9 && bytes[offset + 1] == (byte) 0xD9 && bytes[offset + 2] == (byte) 0xF7) { + return true; + } + } + // for small objects, some encoders just encode as major type object, we can safely + // say its CBOR since it doesn't contradict SMILE or JSON, and its a last resort + if (CBORConstants.hasMajorType(CBORConstants.MAJOR_TYPE_OBJECT, bytes[offset])) { + return true; + } + return false; + } + + @Override + public boolean detectContent(CharSequence chars) { + return false; + } + @Override public XContentGenerator createGenerator(OutputStream os, Set includes, Set excludes) throws IOException { return new CborXContentGenerator(cborFactory.createGenerator(os, JsonEncoding.UTF8), os, includes, excludes); diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/json/JsonXContent.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/json/JsonXContent.java index b5542a3e1890e..d543c39ae7fcf 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/json/JsonXContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/json/JsonXContent.java @@ -62,6 +62,16 @@ public byte streamSeparator() { return '\n'; } + @Override + public boolean detectContent(byte[] bytes, int offset, int length) { + return bytes[offset] == '{'; + } + + @Override + public boolean detectContent(CharSequence chars) { + return chars.charAt(0) == '{'; + } + @Override public XContentGenerator createGenerator(OutputStream os, Set includes, Set excludes) throws IOException { return new JsonXContentGenerator(jsonFactory.createGenerator(os, JsonEncoding.UTF8), os, includes, excludes); diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/smile/SmileXContent.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/smile/SmileXContent.java index b789e691030cb..9d1a41b264ed7 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/smile/SmileXContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/smile/SmileXContent.java @@ -11,6 +11,7 @@ import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.dataformat.smile.SmileConstants; import com.fasterxml.jackson.dataformat.smile.SmileFactory; import com.fasterxml.jackson.dataformat.smile.SmileGenerator; @@ -62,6 +63,22 @@ public byte streamSeparator() { return (byte) 0xFF; } + @Override + public boolean detectContent(byte[] bytes, int offset, int length) { + return length > 2 + && bytes[offset] == SmileConstants.HEADER_BYTE_1 + && bytes[offset + 1] == SmileConstants.HEADER_BYTE_2 + && bytes[offset + 2] == SmileConstants.HEADER_BYTE_3; + } + + @Override + public boolean detectContent(CharSequence chars) { + return chars.length() > 2 + && chars.charAt(0) == SmileConstants.HEADER_BYTE_1 + && chars.charAt(1) == SmileConstants.HEADER_BYTE_2 + && chars.charAt(2) == SmileConstants.HEADER_BYTE_3; + } + @Override public XContentGenerator createGenerator(OutputStream os, Set includes, Set excludes) throws IOException { return new SmileXContentGenerator(smileFactory.createGenerator(os, JsonEncoding.UTF8), os, includes, excludes); diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/yaml/YamlXContent.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/yaml/YamlXContent.java index 971bb6a28ebb7..8dd552e49af0f 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/yaml/YamlXContent.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/yaml/YamlXContent.java @@ -55,6 +55,16 @@ public byte streamSeparator() { throw new UnsupportedOperationException("yaml does not support stream parsing..."); } + @Override + public boolean detectContent(byte[] bytes, int offset, int length) { + return length > 2 && bytes[offset] == '-' && bytes[offset + 1] == '-' && bytes[offset + 2] == '-'; + } + + @Override + public boolean detectContent(CharSequence chars) { + return chars.length() > 2 && chars.charAt(0) == '-' && chars.charAt(1) == '-' && chars.charAt(2) == '-'; + } + @Override public XContentGenerator createGenerator(OutputStream os, Set includes, Set excludes) throws IOException { return new YamlXContentGenerator(yamlFactory.createGenerator(os, JsonEncoding.UTF8), os, includes, excludes); From 20409f566a5dca80a42274f78c08e7b341c61192 Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 11 Feb 2022 17:13:52 +0000 Subject: [PATCH 067/167] Remove LegacyCTRAListener from MasterServiceTests (#83840) Relates #83784 --- .../cluster/service/MasterServiceTests.java | 166 ++++++++++-------- 1 file changed, 96 insertions(+), 70 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java index 334cda2b05dc2..ec65219a79b31 100644 --- a/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/service/MasterServiceTests.java @@ -260,53 +260,38 @@ public void onAckTimeout() { */ public void testClusterStateTaskListenerThrowingExceptionIsOkay() throws InterruptedException { final CountDownLatch latch = new CountDownLatch(1); - AtomicBoolean published = new AtomicBoolean(); try (MasterService masterService = createMasterService(true)) { - ClusterStateTaskListener update = new ClusterStateTaskListener() { - @Override - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - throw new RuntimeException("testing exception handling"); - } - - @Override - public void onFailure(Exception e) {} - }; masterService.submitStateUpdateTask( "testClusterStateTaskListenerThrowingExceptionIsOkay", - update, + new ExpectSuccessTask(), ClusterStateTaskConfig.build(Priority.NORMAL), new ClusterStateTaskExecutor<>() { @Override - public ClusterTasksResult execute( - ClusterState currentState, - List tasks - ) { - ClusterState newClusterState = ClusterState.builder(currentState).build(); - return successes(currentState, tasks).build(newClusterState); + public ClusterTasksResult execute(ClusterState currentState, List tasks) { + var builder = ClusterTasksResult.builder(); + for (final var task : tasks) { + builder.success( + task, + EXPECT_SUCCESS_LISTENER.delegateFailure( + (delegate, cs) -> { throw new RuntimeException("testing exception handling"); } + ) + ); + } + return builder.build(ClusterState.builder(currentState).build()); } @Override public void clusterStatePublished(ClusterStatePublicationEvent clusterStatePublicationEvent) { - published.set(true); latch.countDown(); } } ); - latch.await(); - assertTrue(published.get()); + assertTrue(latch.await(10, TimeUnit.SECONDS)); } } - private static ClusterTasksResult.Builder successes(ClusterState originalState, List tasks) { - ClusterTasksResult.Builder builder = ClusterTasksResult.builder(); - for (T task : tasks) { - builder = builder.success(task, new ClusterStateTaskExecutor.LegacyClusterTaskResultActionListener(task, originalState)); - } - return builder; - } - @TestLogging(value = "org.elasticsearch.cluster.service:TRACE", reason = "to ensure that we log cluster state events on TRACE level") public void testClusterStateUpdateLogging() throws Exception { MockLogAppender mockAppender = new MockLogAppender(); @@ -476,17 +461,10 @@ public void onFailure(Exception e) { public void testMultipleSubmissionBatching() throws Exception { - class Task implements ClusterStateTaskListener { - @Override - public void onFailure(Exception e) { - throw new AssertionError(e); - } - } - final int executorCount = between(1, 5); final var executionCountDown = new CountDownLatch(executorCount); - class Executor implements ClusterStateTaskExecutor { + class Executor implements ClusterStateTaskExecutor { final AtomicBoolean executed = new AtomicBoolean(); @@ -497,11 +475,16 @@ public void addExpectedTaskCount(int taskCount) { } @Override - public ClusterTasksResult execute(ClusterState currentState, List tasks) throws Exception { + public ClusterTasksResult execute(ClusterState currentState, List tasks) + throws Exception { assertTrue("Should execute all tasks at once", executed.compareAndSet(false, true)); assertThat("Should execute all tasks at once", tasks.size(), equalTo(expectedTaskCount)); executionCountDown.countDown(); - return successes(currentState, tasks).build(currentState); + var builder = ClusterTasksResult.builder(); + for (final var task : tasks) { + builder.success(task, EXPECT_SUCCESS_LISTENER); + } + return builder.build(currentState); } } @@ -516,12 +499,16 @@ public ClusterTasksResult execute(ClusterState currentState, List ta masterService.submitStateUpdateTask( "block", - new Task(), + new ExpectSuccessTask(), ClusterStateTaskConfig.build(Priority.NORMAL), (currentState, tasks) -> { executionBarrier.await(10, TimeUnit.SECONDS); // notify test thread that the master service is blocked executionBarrier.await(10, TimeUnit.SECONDS); // wait for test thread to release us - return successes(currentState, tasks).build(currentState); + var builder = ClusterTasksResult.builder(); + for (final var task : tasks) { + builder.success(task, EXPECT_SUCCESS_LISTENER); + } + return builder.build(currentState); } ); @@ -532,7 +519,7 @@ public ClusterTasksResult execute(ClusterState currentState, List ta final var submitThreads = new Thread[between(1, 10)]; for (int i = 0; i < submitThreads.length; i++) { final var executor = randomFrom(executors); - final var task = new Task(); + final var task = new ExpectSuccessTask(); executor.addExpectedTaskCount(1); submitThreads[i] = new Thread(() -> { try { @@ -611,13 +598,12 @@ public void execute() { @Override public void onFailure(Exception e) { - throw new AssertionError(e); + throw new AssertionError("should not be called", e); } @Override public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - processedStates.incrementAndGet(); - processedStatesLatch.get().countDown(); + throw new AssertionError("should not be called"); } @Override @@ -673,7 +659,23 @@ public ClusterTasksResult execute(ClusterState currentState, List ta equalTo(true) ); } - return successes(currentState, tasks).build(maybeUpdatedClusterState); + var builder = ClusterTasksResult.builder(); + for (final var task : tasks) { + builder = builder.success(task, new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + processedStates.incrementAndGet(); + processedStatesLatch.get().countDown(); + } + + @Override + public void onFailure(Exception e) { + throw new AssertionError("should not be called", e); + } + }); + } + + return builder.build(maybeUpdatedClusterState); } @Override @@ -971,35 +973,31 @@ public void testBlockingCallInClusterStateTaskListenerFails() throws Interrupted final AtomicReference assertionRef = new AtomicReference<>(); try (MasterService masterService = createMasterService(true)) { - ClusterStateTaskListener update = new ClusterStateTaskListener() { - @Override - public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - BaseFuture future = new BaseFuture() { - }; - try { - if (randomBoolean()) { - future.get(1L, TimeUnit.SECONDS); - } else { - future.get(); - } - } catch (Exception e) { - throw new RuntimeException(e); - } catch (AssertionError e) { - assertionRef.set(e); - latch.countDown(); - } - } - - @Override - public void onFailure(Exception e) {} - }; masterService.submitStateUpdateTask( "testBlockingCallInClusterStateTaskListenerFails", - update, + new ExpectSuccessTask(), ClusterStateTaskConfig.build(Priority.NORMAL), (currentState, tasks) -> { - ClusterState newClusterState = ClusterState.builder(currentState).build(); - return successes(currentState, tasks).build(newClusterState); + var builder = ClusterTasksResult.builder(); + for (final var task : tasks) { + builder = builder.success(task, EXPECT_SUCCESS_LISTENER.delegateFailure((delegate, cs) -> { + BaseFuture future = new BaseFuture() { + }; + try { + if (randomBoolean()) { + future.get(1L, TimeUnit.SECONDS); + } else { + future.get(); + } + } catch (Exception e) { + throw new RuntimeException(e); + } catch (AssertionError e) { + assertionRef.set(e); + latch.countDown(); + } + })); + } + return builder.build(ClusterState.builder(currentState).build()); } ); @@ -1501,4 +1499,32 @@ public TimeValue masterNodeTimeout() { }; } + /** + * Listener that asserts it does not fail. + */ + private static final ActionListener EXPECT_SUCCESS_LISTENER = new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) {} + + @Override + public void onFailure(Exception e) { + throw new AssertionError("should not be called", e); + } + }; + + /** + * Task that asserts it does not fail. + */ + private static class ExpectSuccessTask implements ClusterStateTaskListener { + @Override + public void onFailure(Exception e) { + throw new AssertionError("should not be called", e); + } + + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + // see parent method javadoc, we use dedicated listeners rather than calling this method + throw new AssertionError("should not be called"); + } + } } From 4cf37b7cc96f8c0268fbff36c8955a91b9c4e42d Mon Sep 17 00:00:00 2001 From: Dan Hermann Date: Fri, 11 Feb 2022 11:56:47 -0600 Subject: [PATCH 068/167] Short circuit date patterns after first match (#83764) --- docs/changelog/83764.yaml | 5 +++++ .../ingest/common/DateProcessor.java | 1 + .../ingest/common/DateProcessorTests.java | 22 +++++++++++++++++++ 3 files changed, 28 insertions(+) create mode 100644 docs/changelog/83764.yaml diff --git a/docs/changelog/83764.yaml b/docs/changelog/83764.yaml new file mode 100644 index 0000000000000..83d8aa024bba3 --- /dev/null +++ b/docs/changelog/83764.yaml @@ -0,0 +1,5 @@ +pr: 83764 +summary: Short circuit date patterns after first match +area: Ingest +type: bug +issues: [] diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java index e6baafa3a9750..031ed9cf86bf7 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateProcessor.java @@ -102,6 +102,7 @@ public IngestDocument execute(IngestDocument ingestDocument) { for (Function, Function> dateParser : dateParsers) { try { dateTime = dateParser.apply(ingestDocument.getSourceAndMetadata()).apply(value); + break; } catch (Exception e) { // try the next parser and keep track of the exceptions lastException = ExceptionsHelper.useOrSuppress(lastException, e); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java index 2beba89adfd1a..9cc376fc379b8 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java @@ -100,6 +100,28 @@ public void testJavaPatternMultipleFormats() { } } + public void testShortCircuitAdditionalPatternsAfterFirstMatchingPattern() { + List matchFormats = new ArrayList<>(); + matchFormats.add("invalid"); + matchFormats.add("uuuu-dd-MM"); + matchFormats.add("uuuu-MM-dd"); + DateProcessor dateProcessor = new DateProcessor( + randomAlphaOfLength(10), + null, + templatize(ZoneId.of("Europe/Amsterdam")), + templatize(Locale.ENGLISH), + "date_as_string", + matchFormats, + "date_as_date" + ); + + Map document = new HashMap<>(); + document.put("date_as_string", "2010-03-04"); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + dateProcessor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("date_as_date", String.class), equalTo("2010-04-03T00:00:00.000+02:00")); + } + public void testJavaPatternNoTimezone() { DateProcessor dateProcessor = new DateProcessor( randomAlphaOfLength(10), From 26be52b066eeed87537ccb1b3b566b4f5eb6258a Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 11 Feb 2022 13:53:22 -0500 Subject: [PATCH 069/167] Shrink slow log for `match` query (#83338) This removes defaults from the output of the `match` query's `toXContent`. That'll make it take up less room in the slow log. And anywhere else is shows up. Relates to #76515 --- .../index/query/AbstractQueryBuilder.java | 19 +++++++++- .../index/query/MatchQueryBuilder.java | 30 +++++++++++----- .../index/query/MatchQueryBuilderTests.java | 29 +++++++++++++-- .../index/query/NestedQueryBuilderTests.java | 35 +++++++++++++++++-- .../utils/XContentObjectTransformerTests.java | 18 +--------- 5 files changed, 101 insertions(+), 30 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java index fb45597fb4771..f1a31db6125d2 100644 --- a/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/AbstractQueryBuilder.java @@ -78,13 +78,30 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws protected abstract void doXContent(XContentBuilder builder, Params params) throws IOException; - protected void printBoostAndQueryName(XContentBuilder builder) throws IOException { + /** + * Add {@code boost} and {@code query_name} to the builder. + * @deprecated use {@link #boostAndQueryNameToXContent} + */ + @Deprecated + protected final void printBoostAndQueryName(XContentBuilder builder) throws IOException { builder.field(BOOST_FIELD.getPreferredName(), boost); if (queryName != null) { builder.field(NAME_FIELD.getPreferredName(), queryName); } } + /** + * Add {@code boost} and {@code query_name} to the builder. + */ + protected final void boostAndQueryNameToXContent(XContentBuilder builder) throws IOException { + if (boost != DEFAULT_BOOST) { + builder.field(BOOST_FIELD.getPreferredName(), boost); + } + if (queryName != null) { + builder.field(NAME_FIELD.getPreferredName(), queryName); + } + } + @Override public final Query toQuery(SearchExecutionContext context) throws IOException { Query query = doToQuery(context); diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java index ccf33a2818d6a..0f8630ccbb030 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java @@ -328,15 +328,21 @@ public void doXContent(XContentBuilder builder, Params params) throws IOExceptio builder.startObject(fieldName); builder.field(QUERY_FIELD.getPreferredName(), value); - builder.field(OPERATOR_FIELD.getPreferredName(), operator.toString()); + if (operator != DEFAULT_OPERATOR) { + builder.field(OPERATOR_FIELD.getPreferredName(), operator.toString()); + } if (analyzer != null) { builder.field(ANALYZER_FIELD.getPreferredName(), analyzer); } if (fuzziness != null) { fuzziness.toXContent(builder, params); } - builder.field(PREFIX_LENGTH_FIELD.getPreferredName(), prefixLength); - builder.field(MAX_EXPANSIONS_FIELD.getPreferredName(), maxExpansions); + if (prefixLength != FuzzyQuery.defaultPrefixLength) { + builder.field(PREFIX_LENGTH_FIELD.getPreferredName(), prefixLength); + } + if (maxExpansions != FuzzyQuery.defaultMaxExpansions) { + builder.field(MAX_EXPANSIONS_FIELD.getPreferredName(), maxExpansions); + } if (minimumShouldMatch != null) { builder.field(MINIMUM_SHOULD_MATCH_FIELD.getPreferredName(), minimumShouldMatch); } @@ -344,11 +350,19 @@ public void doXContent(XContentBuilder builder, Params params) throws IOExceptio builder.field(FUZZY_REWRITE_FIELD.getPreferredName(), fuzzyRewrite); } // LUCENE 4 UPGRADE we need to document this & test this - builder.field(FUZZY_TRANSPOSITIONS_FIELD.getPreferredName(), fuzzyTranspositions); - builder.field(LENIENT_FIELD.getPreferredName(), lenient); - builder.field(ZERO_TERMS_QUERY_FIELD.getPreferredName(), zeroTermsQuery.toString()); - builder.field(GENERATE_SYNONYMS_PHRASE_QUERY.getPreferredName(), autoGenerateSynonymsPhraseQuery); - printBoostAndQueryName(builder); + if (fuzzyTranspositions != FuzzyQuery.defaultTranspositions) { + builder.field(FUZZY_TRANSPOSITIONS_FIELD.getPreferredName(), fuzzyTranspositions); + } + if (lenient != MatchQueryParser.DEFAULT_LENIENCY) { + builder.field(LENIENT_FIELD.getPreferredName(), lenient); + } + if (false == zeroTermsQuery.equals(MatchQueryParser.DEFAULT_ZERO_TERMS_QUERY)) { + builder.field(ZERO_TERMS_QUERY_FIELD.getPreferredName(), zeroTermsQuery.toString()); + } + if (autoGenerateSynonymsPhraseQuery == false) { + builder.field(GENERATE_SYNONYMS_PHRASE_QUERY.getPreferredName(), autoGenerateSynonymsPhraseQuery); + } + boostAndQueryNameToXContent(builder); builder.endObject(); builder.endObject(); } diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index b1819af573a4f..be96503a382b6 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -240,7 +240,11 @@ public void testIllegalValues() { } } - public void testSimpleMatchQuery() throws IOException { + public void testParseDefaultsRemoved() throws IOException { + /* + * This json includes many defaults. When we parse the query and then + * call toString on it all of the defaults are removed. + */ String json = """ { "match" : { @@ -258,12 +262,33 @@ public void testSimpleMatchQuery() throws IOException { } }"""; MatchQueryBuilder qb = (MatchQueryBuilder) parseQuery(json); - checkGeneratedJson(json, qb); + checkGeneratedJson(""" + { + "match": { + "message": { + "query": "to be or not to be", + "operator": "AND", + "zero_terms_query": "ALL" + } + } + }""", qb); assertEquals(json, "to be or not to be", qb.value()); assertEquals(json, Operator.AND, qb.operator()); } + public void testToXConentWithDefaults() throws IOException { + QueryBuilder query = new MatchQueryBuilder("foo", "bar"); + checkGeneratedJson(""" + { + "match": { + "foo": { + "query": "bar" + } + } + }""", query); + } + public void testFuzzinessOnNonStringField() throws Exception { MatchQueryBuilder query = new MatchQueryBuilder(INT_FIELD_NAME, 42); query.fuzziness(randomFuzziness(INT_FIELD_NAME)); diff --git a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index a9f121e9dc349..56a0845a6d7a3 100644 --- a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -139,7 +139,11 @@ public void testValidate() { assertThat(e.getMessage(), equalTo("[nested] requires 'score_mode' field")); } - public void testFromJson() throws IOException { + public void testParseDefaultsRemoved() throws IOException { + /* + * This json includes many defaults. When we parse the query and then + * call toString on it all of the defaults are removed. + */ String json = """ { "nested" : { @@ -178,7 +182,34 @@ public void testFromJson() throws IOException { }"""; NestedQueryBuilder parsed = (NestedQueryBuilder) parseQuery(json); - checkGeneratedJson(json, parsed); + checkGeneratedJson(""" + { + "nested" : { + "query" : { + "bool" : { + "must" : [ { + "match" : { + "obj1.name" : { + "query" : "blue" + } + } + }, { + "range" : { + "obj1.count" : { + "gt" : 5, + "boost" : 1.0 + } + } + } ], + "boost" : 1.0 + } + }, + "path" : "obj1", + "ignore_unmapped" : false, + "score_mode" : "avg", + "boost" : 1.0 + } + }""", parsed); assertEquals(json, ScoreMode.Avg, parsed.scoreMode()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformerTests.java index 38a1ab4f45c79..ae043a6e73de4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/XContentObjectTransformerTests.java @@ -59,23 +59,7 @@ public void testFromMap() throws IOException { assertXContentAreEqual(aggTransformer.fromMap(aggMap), aggMap); assertXContentAreEqual(aggTransformer.fromMap(aggMap), aggTransformer.toMap(aggTransformer.fromMap(aggMap))); - Map queryMap = Collections.singletonMap( - "match", - Collections.singletonMap("fieldName", new HashMap() { - { - // Add all the default fields so they are not added dynamically when the object is parsed - put("query", "fieldValue"); - put("operator", "OR"); - put("prefix_length", 0); - put("max_expansions", 50); - put("fuzzy_transpositions", true); - put("lenient", false); - put("zero_terms_query", "NONE"); - put("auto_generate_synonyms_phrase_query", true); - put("boost", 1.0); - } - }) - ); + Map queryMap = Map.of("match", Map.of("fieldName", Map.of("query", "fieldValue"))); XContentObjectTransformer queryBuilderTransformer = XContentObjectTransformer.queryBuilderTransformer( xContentRegistry() From 4fbbcda4945624590d46b57dda29099167f6ad7d Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Fri, 11 Feb 2022 11:14:11 -0800 Subject: [PATCH 070/167] [DOCS] Fix nesting in bucket correlation aggregation (#83816) --- .../bucket-correlation-aggregation.asciidoc | 27 +++++++++++-------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/docs/reference/aggregations/pipeline/bucket-correlation-aggregation.asciidoc b/docs/reference/aggregations/pipeline/bucket-correlation-aggregation.asciidoc index 55b14b5d9adc2..841632124805f 100644 --- a/docs/reference/aggregations/pipeline/bucket-correlation-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/bucket-correlation-aggregation.asciidoc @@ -31,21 +31,29 @@ The correlation function to execute. The configuration to calculate a count correlation. This function is designed for determining the correlation of a term value and a given metric. Consequently, it needs to meet the following requirements. ++ +-- * The `buckets_path` must point to a `_count` metric. * The total count of all the `bucket_path` count values must be less than or equal to `indicator.doc_count`. * When utilizing this function, an initial calculation to gather the required `indicator` values is required. - +-- ++ .Properties of `count_correlation` [%collapsible%open] ===== `indicator`::: (Required, object) The indicator with which to correlate the configured `bucket_path` values. - ++ .Properties of `indicator` [%collapsible%open] -===== +====== +`doc_count`::: +(Required, integer) +The total number of documents that initially created the `expectations`. It's required to be greater than or equal to the sum +of all values in the `buckets_path` as this is the originating superset of data to which the term values are correlated. + `expectations`::: (Required, array) An array of numbers with which to correlate the configured `bucket_path` values. The length of this value must always equal @@ -55,12 +63,7 @@ the number of buckets returned by the `bucket_path`. (Optional, array) An array of fractions to use when averaging and calculating variance. This should be used if the pre-calculated data and the `buckets_path` have known gaps. The length of `fractions`, if provided, must equal `expectations`. - -`doc_count`::: -(Required, integer) -The total number of documents that initially created the `expectations`. It's required to be greater than or equal to the sum -of all values in the `buckets_path` as this is the originating superset of data to which the term values are correlated. -===== +====== ===== ==== @@ -75,8 +78,10 @@ A `bucket_correlation` aggregation looks like this in isolation: "buckets_path": "range_values>_count", <1> "function": { "count_correlation": { <2> - "expectations": [...], - "doc_count": 10000 + "indicator": { + "expectations": [...], + "doc_count": 10000 + } } } } From 5ef77ef370f4743d7f8779f6fe1dbf2178a9f60b Mon Sep 17 00:00:00 2001 From: edh-oss <42759970+edh-oss@users.noreply.github.com> Date: Fri, 11 Feb 2022 14:19:30 -0500 Subject: [PATCH 071/167] Add/update source block delimeters (#83624) Asciidoc source blocks are to be delimited with four dashes. This adds missing delimiters, and updates some that contained only three dashes. It matters for parsing purposes. --- docs/reference/how-to/fix-common-cluster-issues.asciidoc | 8 ++++---- docs/reference/sql/functions/conditional.asciidoc | 7 ++++++- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/docs/reference/how-to/fix-common-cluster-issues.asciidoc b/docs/reference/how-to/fix-common-cluster-issues.asciidoc index cc778ea0d79e4..28d79f63761eb 100644 --- a/docs/reference/how-to/fix-common-cluster-issues.asciidoc +++ b/docs/reference/how-to/fix-common-cluster-issues.asciidoc @@ -692,9 +692,9 @@ see the number of active threads in each thread pool and how many tasks are queued, how many have been rejected, and how many have completed. [source,console] ---- +---- GET /_cat/thread_pool?v&s=t,n&h=type,name,node_name,active,queue,rejected,completed ---- +---- **Inspect the hot threads on each node** @@ -704,9 +704,9 @@ to determine if the thread has sufficient resources to progress and gauge how quickly it is progressing. [source,console] ---- +---- GET /_nodes/hot_threads ---- +---- **Look for long running tasks** diff --git a/docs/reference/sql/functions/conditional.asciidoc b/docs/reference/sql/functions/conditional.asciidoc index d7c53e94c4bbf..f70e6de9e6925 100644 --- a/docs/reference/sql/functions/conditional.asciidoc +++ b/docs/reference/sql/functions/conditional.asciidoc @@ -86,10 +86,12 @@ E.g.: for the following query: [source, sql] +---- CASE WHEN a = 1 THEN null WHEN a > 2 THEN 10 WHEN a > 5 THEN 'foo' END +---- an error message would be returned, mentioning that *'foo'* is of data type *keyword*, which does not match the expected data type *integer* (based on result *10*). @@ -105,6 +107,7 @@ interesting than every single value, CASE can create custom buckets as in the following example: [source, sql] +---- SELECT count(*) AS count, CASE WHEN NVL(languages, 0) = 0 THEN 'zero' WHEN languages = 1 THEN 'one' @@ -115,7 +118,7 @@ SELECT count(*) AS count, FROM employees GROUP BY lang_skills ORDER BY lang_skills; - +---- With this query, one can create normal grouping buckets for values _0, 1, 2, 3_ with descriptive names, and every value _>= 4_ falls into the _multilingual_ bucket. @@ -282,7 +285,9 @@ include-tagged::{sql-specs}/docs/docs.csv-spec[iifWithoutDefaultValue] expression. E.g.: [source, sql] +---- IIF(a = 1, 'one', IIF(a = 2, 'two', IIF(a = 3, 'three', 'many'))) +---- ================= From f45621fb3ea46223d3dd9d28fb4044ae61da421e Mon Sep 17 00:00:00 2001 From: David Turner Date: Fri, 11 Feb 2022 19:38:05 +0000 Subject: [PATCH 072/167] Remove LegacyCTRAListener from ShardStateAction (#83842) Relates #83784 --- .../action/shard/ShardStateAction.java | 60 +++++++++++++------ 1 file changed, 42 insertions(+), 18 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 720fc0ce9efa7..3449423c15f1f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -333,7 +333,7 @@ public ClusterTasksResult execute(ClusterState currentSta entry, entry.getShardId().getIndex() ); - batchResultBuilder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); + batchResultBuilder.success(task, task.newPublicationListener()); } else { // The primary term is 0 if the shard failed itself. It is > 0 if a write was done on a primary but was failed to be // replicated to the shard copy with the provided allocation id. In case where the shard failed itself, it's ok to just @@ -393,7 +393,7 @@ public ClusterTasksResult execute(ClusterState currentSta } else { // tasks that correspond to non-existent shards are marked as successful logger.debug("{} ignoring shard failed task [{}] (shard does not exist anymore)", entry.getShardId(), entry); - batchResultBuilder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); + batchResultBuilder.success(task, task.newPublicationListener()); } } else { // failing a shard also possibly marks it as stale (see IndexMetadataUpdater) @@ -409,7 +409,7 @@ public ClusterTasksResult execute(ClusterState currentSta try { maybeUpdatedState = applyFailedShards(currentState, failedShardsToBeApplied, staleShardsToBeApplied); for (var task : tasksToBeApplied) { - batchResultBuilder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); + batchResultBuilder.success(task, task.newPublicationListener()); } } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("failed to apply failed shards {}", failedShardsToBeApplied), e); @@ -537,6 +537,21 @@ public record FailedShardUpdateTask(FailedShardEntry entry, ActionListener newPublicationListener() { + return new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + listener.onResponse(TransportResponse.Empty.INSTANCE); + } + + @Override + public void onFailure(Exception e) { + // delegate to task's onFailure for logging + FailedShardUpdateTask.this.onFailure(e); + } + }; + } + @Override public void onFailure(Exception e) { if (e instanceof NotMasterException) { @@ -551,7 +566,7 @@ public void onFailure(Exception e) { @Override public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - listener.onResponse(TransportResponse.Empty.INSTANCE); + assert false : "should not be called"; } } @@ -639,7 +654,7 @@ public ClusterTasksResult execute(ClusterState currentSt // requests might still be in flight even after the shard has already been started or failed on the master. We just // ignore these requests for now. logger.debug("{} ignoring shard started task [{}] (shard does not exist anymore)", entry.shardId, entry); - builder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); + builder.success(task, task.newPublicationListener()); } else { if (matched.primary() && entry.primaryTerm > 0) { final IndexMetadata indexMetadata = currentState.metadata().index(entry.shardId.getIndex()); @@ -660,7 +675,7 @@ public ClusterTasksResult execute(ClusterState currentSt entry.primaryTerm, currentPrimaryTerm ); - builder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); + builder.success(task, task.newPublicationListener()); continue; } } @@ -673,7 +688,7 @@ public ClusterTasksResult execute(ClusterState currentSt entry, matched ); - builder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); + builder.success(task, task.newPublicationListener()); } else { // remove duplicate actions as allocation service expects a clean list without duplicates if (seenShardRoutings.contains(matched)) { @@ -730,7 +745,7 @@ public ClusterTasksResult execute(ClusterState currentSt assert assertStartedIndicesHaveCompleteTimestampRanges(maybeUpdatedState); for (var task : tasksToBeApplied) { - builder.success(task, new LegacyClusterTaskResultActionListener(task, currentState)); + builder.success(task, task.newPublicationListener()); } } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("failed to apply started shards {}", shardRoutingsToBeApplied), e); @@ -837,20 +852,29 @@ public int hashCode() { } } - public static class StartedShardUpdateTask implements ClusterStateTaskListener { - - private final StartedShardEntry entry; - private final ActionListener listener; - - public StartedShardUpdateTask(StartedShardEntry entry, ActionListener listener) { - this.entry = entry; - this.listener = listener; - } + public record StartedShardUpdateTask(StartedShardEntry entry, ActionListener listener) + implements + ClusterStateTaskListener { public StartedShardEntry getEntry() { return entry; } + public ActionListener newPublicationListener() { + return new ActionListener<>() { + @Override + public void onResponse(ClusterState clusterState) { + listener.onResponse(TransportResponse.Empty.INSTANCE); + } + + @Override + public void onFailure(Exception e) { + // delegate to task's onFailure for logging + StartedShardUpdateTask.this.onFailure(e); + } + }; + } + @Override public void onFailure(Exception e) { if (e instanceof NotMasterException) { @@ -865,7 +889,7 @@ public void onFailure(Exception e) { @Override public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { - listener.onResponse(TransportResponse.Empty.INSTANCE); + assert false : "should not be called"; } @Override From ba95c87b509cc83f96c3c5d975db3a7c6a59057f Mon Sep 17 00:00:00 2001 From: Fabio Busatto <52658645+bytebilly@users.noreply.github.com> Date: Fri, 11 Feb 2022 21:02:45 +0100 Subject: [PATCH 073/167] Fix version variable in docker-compose.yml (#83823) --- docs/reference/setup/install/docker-compose.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/reference/setup/install/docker-compose.yml b/docs/reference/setup/install/docker-compose.yml index 3d8b92ffcce58..bb99575d7eefd 100644 --- a/docs/reference/setup/install/docker-compose.yml +++ b/docs/reference/setup/install/docker-compose.yml @@ -2,7 +2,7 @@ version: "2.2" services: setup: - image: docker.elastic.co/elasticsearch/elasticsearch:{version} + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} volumes: - certs:/usr/share/elasticsearch/config/certs user: "0" @@ -66,7 +66,7 @@ services: depends_on: setup: condition: service_healthy - image: {docker-repo}:{version} + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} volumes: - certs:/usr/share/elasticsearch/config/certs - esdata01:/usr/share/elasticsearch/data @@ -109,7 +109,7 @@ services: es02: depends_on: - es01 - image: {docker-repo}:{version} + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} volumes: - certs:/usr/share/elasticsearch/config/certs - esdata02:/usr/share/elasticsearch/data @@ -149,7 +149,7 @@ services: es03: depends_on: - es02 - image: {docker-repo}:{version} + image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} volumes: - certs:/usr/share/elasticsearch/config/certs - esdata03:/usr/share/elasticsearch/data @@ -194,7 +194,7 @@ services: condition: service_healthy es03: condition: service_healthy - image: docker.elastic.co/kibana/kibana:{version} + image: docker.elastic.co/kibana/kibana:${STACK_VERSION} volumes: - certs:/usr/share/kibana/config/certs - kibanadata:/usr/share/kibana/data From b2e196fe9188b71a5c9a216c03f9808e0f77515d Mon Sep 17 00:00:00 2001 From: Adam Locke Date: Fri, 11 Feb 2022 15:16:58 -0500 Subject: [PATCH 074/167] [DOCS] Replace mentions of tls_auto_config directory (#83856) Updates directory mentions of `config/tls_auto_config_` to `config/certs`. --- docs/reference/setup/install/connect-clients.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/setup/install/connect-clients.asciidoc b/docs/reference/setup/install/connect-clients.asciidoc index 19c3d556b1d5e..dcc40ac385a99 100644 --- a/docs/reference/setup/install/connect-clients.asciidoc +++ b/docs/reference/setup/install/connect-clients.asciidoc @@ -26,7 +26,7 @@ path is to the auto-generated CA certificate for the HTTP layer. [source,sh] ---- -openssl x509 -fingerprint -sha256 -in config/tls_auto_config_/http_ca.crt +openssl x509 -fingerprint -sha256 -in config/certs/http_ca.crt ---- ``:: The timestamp of when the auto-configuration process created the security files directory. @@ -45,6 +45,6 @@ SHA256 Fingerprint= If your library doesn't support a method of validating the fingerprint, the auto-generated CA certificate is created in the -`config/tls_auto_config_` directory on each {es} node. Copy the +`config/certs` directory on each {es} node. Copy the `http_ca.crt` file to your machine and configure your client to use this -certificate to establish trust when it connects to {es}. \ No newline at end of file +certificate to establish trust when it connects to {es}. From 950c231e04d81fb0c9d45edaa3df48851f319292 Mon Sep 17 00:00:00 2001 From: Adam Locke Date: Fri, 11 Feb 2022 16:07:38 -0500 Subject: [PATCH 075/167] [DOCS] Clarify when changes are replicated in CCR (#83863) Removes a table, which some users found confusing (refer to https://github.com/elastic/sdh-elasticsearch/issues/4009). Clarifies language around when changes are replicated from the leader index to the follower index. --- docs/reference/ccr/index.asciidoc | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/docs/reference/ccr/index.asciidoc b/docs/reference/ccr/index.asciidoc index c39b875446598..f42f0d0f51bf0 100644 --- a/docs/reference/ccr/index.asciidoc +++ b/docs/reference/ccr/index.asciidoc @@ -216,19 +216,11 @@ To manage how operations are replicated from the leader index, you can configure settings when <>. -The follower index automatically retrieves some updates applied to the leader -index, while other updates are retrieved as needed: - -[cols="3"] -|=== -h| Update type h| Automatic h| As needed -| Alias | {yes-icon} | {no-icon} -| Mapping | {no-icon} | {yes-icon} -| Settings | {no-icon} | {yes-icon} -|=== - -For example, changing the number of replicas on the leader index is not -replicated by the follower index, so that setting might not be retrieved. +Changes in the index mapping on the leader index are replicated to the +follower index as soon as possible. This behavior is true for index +settings as well, except for some settings that are local to the leader +index. For example, changing the number of replicas on the leader index is +not replicated by the follower index, so that setting might not be retrieved. If you apply a non-dynamic settings change to the leader index that is needed by the follower index, the follower index closes itself, applies the From 6f8db84885973d5ea3210a8f717d07b1ea6d7dee Mon Sep 17 00:00:00 2001 From: Adam Locke Date: Fri, 11 Feb 2022 16:23:17 -0500 Subject: [PATCH 076/167] [DOCS] Moving Docker YAML and .env to a separate directory (#83864) --- docs/reference/setup/install/docker.asciidoc | 6 +++--- docs/reference/setup/install/{ => docker}/.env | 0 .../reference/setup/install/{ => docker}/docker-compose.yml | 0 3 files changed, 3 insertions(+), 3 deletions(-) rename docs/reference/setup/install/{ => docker}/.env (100%) rename docs/reference/setup/install/{ => docker}/docker-compose.yml (100%) diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index 43375b63c6619..99bfeeb223391 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -249,7 +249,7 @@ Settings (Windows). Create the following configuration files in a new, empty directory. These files are also available from the -https://github.com/elastic/elasticsearch/tree/master/docs/reference/setup/install[elasticsearch] +https://github.com/elastic/elasticsearch/tree/master/docs/reference/setup/install/docker[elasticsearch] repository on GitHub. -- @@ -276,7 +276,7 @@ referenced by the `docker-compose.yml` file. ["source","txt",subs="attributes"] ---- -include::.env[] +include::docker/.env[] ---- [discrete] @@ -297,7 +297,7 @@ then only be accessible from the host machine itself. [source,yaml,subs="attributes"] ---- -include::docker-compose.yml[] +include::docker/docker-compose.yml[] ---- endif::[] diff --git a/docs/reference/setup/install/.env b/docs/reference/setup/install/docker/.env similarity index 100% rename from docs/reference/setup/install/.env rename to docs/reference/setup/install/docker/.env diff --git a/docs/reference/setup/install/docker-compose.yml b/docs/reference/setup/install/docker/docker-compose.yml similarity index 100% rename from docs/reference/setup/install/docker-compose.yml rename to docs/reference/setup/install/docker/docker-compose.yml From 4fddf98ac3e383ea5ad428dc8d8105b639c03d6a Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Fri, 11 Feb 2022 16:58:08 -0500 Subject: [PATCH 077/167] Shrink slow log for rank_feature query (#83847) This removes the `boost` from the `toXContent` of `rank_feature` if it is the default. It also removes the score function if it is the default. Relates to #76515 --- .../mapper/extras/RankFeatureQueryBuilder.java | 9 ++++++--- .../extras/RankFeatureQueryBuilderTests.java | 17 +++++++++++++++++ 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java index 2b3ec09abdfd7..25f0786534110 100644 --- a/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java +++ b/modules/mapper-extras/src/main/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilder.java @@ -32,6 +32,7 @@ * Query to run on a [rank_feature] field. */ public final class RankFeatureQueryBuilder extends AbstractQueryBuilder { + private static final ScoreFunction DEFAULT_SCORE_FUNCTION = new ScoreFunction.Saturation(); /** * Scoring function for a [rank_feature] field. @@ -309,7 +310,7 @@ private static ScoreFunction readScoreFunction(StreamInput in) throws IOExceptio if (numNonNulls > 1) { throw new IllegalArgumentException("Can only specify one of [log], [saturation], [sigmoid] and [linear]"); } else if (numNonNulls == 0) { - query = new RankFeatureQueryBuilder(field, new ScoreFunction.Saturation()); + query = new RankFeatureQueryBuilder(field, DEFAULT_SCORE_FUNCTION); } else { ScoreFunction scoreFunction = (ScoreFunction) Arrays.stream(args, 3, args.length).filter(Objects::nonNull).findAny().get(); query = new RankFeatureQueryBuilder(field, scoreFunction); @@ -368,8 +369,10 @@ protected void doWriteTo(StreamOutput out) throws IOException { protected void doXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(getName()); builder.field("field", field); - scoreFunction.doXContent(builder); - printBoostAndQueryName(builder); + if (false == scoreFunction.equals(DEFAULT_SCORE_FUNCTION)) { + scoreFunction.doXContent(builder); + } + boostAndQueryNameToXContent(builder); builder.endObject(); } diff --git a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilderTests.java b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilderTests.java index c4e7e35c646a1..4bd2d0714e93d 100644 --- a/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilderTests.java +++ b/modules/mapper-extras/src/test/java/org/elasticsearch/index/mapper/extras/RankFeatureQueryBuilderTests.java @@ -147,4 +147,21 @@ public void testIllegalCombination() { e.getMessage() ); } + + public void testParseDefaultsRemoved() throws IOException { + String json = """ + { + "rank_feature" : { + "field": "foo", + "boost": 1, + "saturation": {} + } + }"""; + checkGeneratedJson(""" + { + "rank_feature": { + "field": "foo" + } + }""", parseQuery(json)); + } } From a9cdbf42c6140e7c0e2ac49d8ccd25683741084d Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Mon, 14 Feb 2022 14:34:43 +1100 Subject: [PATCH 078/167] Clean up for superuser role name references (#83627) The _xpack_security user no longer has the superuser role since #81400 --- .../elasticsearch/xpack/core/security/user/UsernamesField.java | 2 +- .../security/enrollment/InternalEnrollmentTokenGenerator.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java index c5e17656658e6..9ba7c01eb69e2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java @@ -15,7 +15,7 @@ public final class UsernamesField { public static final String SYSTEM_NAME = "_system"; public static final String SYSTEM_ROLE = "_system"; public static final String XPACK_SECURITY_NAME = "_xpack_security"; - public static final String XPACK_SECURITY_ROLE = "superuser"; + public static final String XPACK_SECURITY_ROLE = "_xpack_security"; public static final String XPACK_NAME = "_xpack"; public static final String XPACK_ROLE = "_xpack"; public static final String LOGSTASH_NAME = "logstash_system"; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGenerator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGenerator.java index 53f0eacb88b06..455fd0c83ea79 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGenerator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGenerator.java @@ -51,7 +51,7 @@ public class InternalEnrollmentTokenGenerator extends BaseEnrollmentTokenGenerat public InternalEnrollmentTokenGenerator(Environment environment, SSLService sslService, Client client) { this.environment = environment; this.sslService = sslService; - // enrollment tokens API keys will be owned by the "_xpack_security" system user ("superuser" role) + // enrollment tokens API keys will be owned by the "_xpack_security" system user this.client = new OriginSettingClient(client, SECURITY_ORIGIN); } From 446fdcd027fbc9e4dc1cf9bed3dd0e6d4086ae55 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Mon, 14 Feb 2022 14:57:01 +1100 Subject: [PATCH 079/167] User Profile: Add initial search profile API (#83191) This PR adds an initial API for search user profiles. As discussed, it is intentially kept minimal and implements in its basic form: * It's a dedicate API (no query dsl) * It searches against names (username, full name etc) with the given string * It takes a size parameter to control the response size It also optionally returns the data section of the profile document since we agreed that read is not subject to namespacing. --- docs/changelog/83191.yaml | 5 + .../action/profile/GetProfileRequest.java | 14 +- .../core/security/action/profile/Profile.java | 10 +- .../action/profile/SearchProfilesAction.java | 20 +++ .../action/profile/SearchProfilesRequest.java | 71 +++++++++ .../profile/SearchProfilesResponse.java | 109 ++++++++++++++ .../xpack/security/operator/Constants.java | 1 + .../xpack/security/profile/ProfileIT.java | 23 +++ .../profile/ProfileSingleNodeTests.java | 135 ++++++++++++++++-- .../xpack/security/Security.java | 9 +- .../profile/TransportGetProfileAction.java | 2 +- .../TransportSearchProfilesAction.java | 35 +++++ .../security/profile/ProfileService.java | 73 +++++++++- .../profile/RestSearchProfilesAction.java | 75 ++++++++++ 14 files changed, 559 insertions(+), 23 deletions(-) create mode 100644 docs/changelog/83191.yaml create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesAction.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesRequest.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesResponse.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportSearchProfilesAction.java create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestSearchProfilesAction.java diff --git a/docs/changelog/83191.yaml b/docs/changelog/83191.yaml new file mode 100644 index 0000000000000..f8d11586ae712 --- /dev/null +++ b/docs/changelog/83191.yaml @@ -0,0 +1,5 @@ +pr: 83191 +summary: "User Profile: Add initial search profile API" +area: Security +type: enhancement +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfileRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfileRequest.java index bc9453fd37f91..17e91b4b8c984 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfileRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/GetProfileRequest.java @@ -18,32 +18,32 @@ public class GetProfileRequest extends ActionRequest { private final String uid; - private final Set datKeys; + private final Set dataKeys; - public GetProfileRequest(String uid, Set datKeys) { + public GetProfileRequest(String uid, Set dataKeys) { this.uid = uid; - this.datKeys = datKeys; + this.dataKeys = dataKeys; } public GetProfileRequest(StreamInput in) throws IOException { super(in); this.uid = in.readString(); - this.datKeys = in.readSet(StreamInput::readString); + this.dataKeys = in.readSet(StreamInput::readString); } public String getUid() { return uid; } - public Set getDatKeys() { - return datKeys; + public Set getDataKeys() { + return dataKeys; } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(uid); - out.writeStringCollection(datKeys); + out.writeStringCollection(dataKeys); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/Profile.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/Profile.java index 24e2f61f2908e..5b04b6f054248 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/Profile.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/Profile.java @@ -124,15 +124,19 @@ public Profile(StreamInput in) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); + innerToXContent(builder, params); + versionControl.toXContent(builder, params); + builder.endObject(); + return builder; + } + + public void innerToXContent(XContentBuilder builder, Params params) throws IOException { builder.field("uid", uid); builder.field("enabled", enabled); builder.field("last_synchronized", lastSynchronized); user.toXContent(builder, params); builder.field("access", access); builder.field("data", applicationData); - versionControl.toXContent(builder, params); - builder.endObject(); - return builder; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesAction.java new file mode 100644 index 0000000000000..35c6d7f9dd148 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesAction.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.profile; + +import org.elasticsearch.action.ActionType; + +public class SearchProfilesAction extends ActionType { + + public static final String NAME = "cluster:admin/xpack/security/profile/search"; + public static final SearchProfilesAction INSTANCE = new SearchProfilesAction(); + + public SearchProfilesAction() { + super(NAME, SearchProfilesResponse::new); + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesRequest.java new file mode 100644 index 0000000000000..20e48247943d6 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesRequest.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.profile; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Objects; +import java.util.Set; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public class SearchProfilesRequest extends ActionRequest { + + private final Set dataKeys; + /** + * String to search name related fields of a profile document + */ + private final String name; + private final int size; + + public SearchProfilesRequest(Set dataKeys, String name, int size) { + this.dataKeys = Objects.requireNonNull(dataKeys, "data parameter must not be null"); + this.name = Objects.requireNonNull(name, "name must not be null"); + this.size = size; + } + + public SearchProfilesRequest(StreamInput in) throws IOException { + super(in); + this.dataKeys = in.readSet(StreamInput::readString); + this.name = in.readOptionalString(); + this.size = in.readVInt(); + } + + public Set getDataKeys() { + return dataKeys; + } + + public String getName() { + return name; + } + + public int getSize() { + return size; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringCollection(dataKeys); + out.writeOptionalString(name); + out.writeVInt(size); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (size < 0) { + validationException = addValidationError("[size] parameter cannot be negative but was [" + size + "]", validationException); + } + return validationException; + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesResponse.java new file mode 100644 index 0000000000000..955e60d2c289f --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/SearchProfilesResponse.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action.profile; + +import org.apache.lucene.search.TotalHits; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; + +import java.io.IOException; + +public class SearchProfilesResponse extends ActionResponse implements ToXContentObject { + + private final ProfileHit[] profileHits; + private final long tookInMillis; + private final TotalHits totalHits; + + public SearchProfilesResponse(ProfileHit[] profileHits, long tookInMillis, TotalHits totalHits) { + this.profileHits = profileHits; + this.tookInMillis = tookInMillis; + this.totalHits = totalHits; + } + + public SearchProfilesResponse(StreamInput in) throws IOException { + super(in); + this.profileHits = in.readArray(ProfileHit::new, ProfileHit[]::new); + this.tookInMillis = in.readVLong(); + this.totalHits = Lucene.readTotalHits(in); + } + + public ProfileHit[] getProfileHits() { + return profileHits; + } + + public long getTookInMillis() { + return tookInMillis; + } + + public TotalHits getTotalHits() { + return totalHits; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeArray(profileHits); + out.writeVLong(tookInMillis); + Lucene.writeTotalHits(out, totalHits); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field("took", tookInMillis); + builder.startObject("total"); + { + builder.field("value", totalHits.value); + builder.field("relation", totalHits.relation == TotalHits.Relation.EQUAL_TO ? "eq" : "gte"); + } + builder.endObject(); + builder.startArray("users"); + { + for (ProfileHit profileHit : profileHits) { + profileHit.toXContent(builder, params); + } + } + builder.endArray(); + } + builder.endObject(); + return builder; + } + + public record ProfileHit(Profile profile, float score) implements Writeable, ToXContentObject { + + public ProfileHit(StreamInput in) throws IOException { + this(new Profile(in), in.readFloat()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + profile.writeTo(out); + out.writeFloat(score); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field("_score", score); + builder.field("uid", profile.uid()); + profile.user().toXContent(builder, params); + builder.field("access", profile.access()); + builder.field("data", profile.applicationData()); + // TODO: output a field of sort which is just score plus uid? + } + builder.endObject(); + return builder; + } + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 1bcfdc99fea29..1cff2e41c26fb 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -197,6 +197,7 @@ public class Constants { "cluster:admin/xpack/security/profile/activate", "cluster:admin/xpack/security/profile/get", "cluster:admin/xpack/security/profile/put/data", + "cluster:admin/xpack/security/profile/search", "cluster:admin/xpack/security/realm/cache/clear", "cluster:admin/xpack/security/role/delete", "cluster:admin/xpack/security/role/get", diff --git a/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java b/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java index f6effae62e65c..9fd40792a2190 100644 --- a/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java +++ b/x-pack/plugin/security/qa/profile/src/javaRestTest/java/org/elasticsearch/xpack/security/profile/ProfileIT.java @@ -24,6 +24,8 @@ import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.hasSize; public class ProfileIT extends ESRestTestCase { @@ -137,6 +139,27 @@ public void testUpdateProfileData() throws IOException { assertThat(castToMap(profileMap1.get("data")), equalTo(Map.of("app1", Map.of("theme", "default")))); } + public void testSearchProfile() throws IOException { + final Map activateProfileMap = doActivateProfile(); + final String uid = (String) activateProfileMap.get("uid"); + final Request searchProfilesRequest1 = new Request(randomFrom("GET", "POST"), "_security/profile/_search"); + searchProfilesRequest1.setJsonEntity(""" + { + "name": "rac", + "size": 10 + }"""); + final Response searchProfilesResponse1 = adminClient().performRequest(searchProfilesRequest1); + assertOK(searchProfilesResponse1); + final Map searchProfileResponseMap1 = responseAsMap(searchProfilesResponse1); + assertThat(searchProfileResponseMap1, hasKey("took")); + assertThat(searchProfileResponseMap1.get("total"), equalTo(Map.of("value", 1, "relation", "eq"))); + @SuppressWarnings("unchecked") + final List> users = (List>) searchProfileResponseMap1.get("users"); + assertThat(users, hasSize(1)); + assertThat(users.get(0), hasKey("_score")); + assertThat(users.get(0).get("uid"), equalTo(uid)); + } + private Map doActivateProfile() throws IOException { final Request activateProfileRequest = new Request("POST", "_security/profile/_activate"); activateProfileRequest.setJsonEntity(""" diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileSingleNodeTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileSingleNodeTests.java index be85aa1eef66d..c17cb13bec458 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileSingleNodeTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileSingleNodeTests.java @@ -7,30 +7,42 @@ package org.elasticsearch.xpack.security.profile; +import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.engine.DocumentMissingException; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.security.action.profile.GetProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.GetProfileRequest; +import org.elasticsearch.xpack.core.security.action.profile.GetProfilesResponse; import org.elasticsearch.xpack.core.security.action.profile.Profile; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesAction; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesRequest; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesResponse; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataRequest; import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; +import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.INTERNAL_SECURITY_PROFILE_INDEX_8; import static org.elasticsearch.xpack.security.support.SecuritySystemIndices.SECURITY_PROFILE_ALIAS; import static org.hamcrest.Matchers.anEmptyMap; -import static org.hamcrest.Matchers.arrayContaining; +import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.emptyArray; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasItemInArray; import static org.hamcrest.Matchers.hasItems; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; @@ -49,19 +61,17 @@ protected Settings nodeSettings() { } public void testProfileIndexAutoCreation() { + // Index does not exist yet + assertThat(getProfileIndexResponse().getIndices(), not(hasItemInArray(INTERNAL_SECURITY_PROFILE_INDEX_8))); + + // Trigger index creation by indexing var indexResponse = client().prepareIndex(randomFrom(INTERNAL_SECURITY_PROFILE_INDEX_8, SECURITY_PROFILE_ALIAS)) .setSource(Map.of("user_profile", Map.of("uid", randomAlphaOfLength(22)))) .get(); - assertThat(indexResponse.status().getStatus(), equalTo(201)); - var getIndexRequest = new GetIndexRequest(); - getIndexRequest.indices(INTERNAL_SECURITY_PROFILE_INDEX_8); - - var getIndexResponse = client().execute(GetIndexAction.INSTANCE, getIndexRequest).actionGet(); - - assertThat(getIndexResponse.getIndices(), arrayContaining(INTERNAL_SECURITY_PROFILE_INDEX_8)); - + final GetIndexResponse getIndexResponse = getProfileIndexResponse(); + assertThat(getIndexResponse.getIndices(), hasItemInArray(INTERNAL_SECURITY_PROFILE_INDEX_8)); var aliases = getIndexResponse.getAliases().get(INTERNAL_SECURITY_PROFILE_INDEX_8); assertThat(aliases, hasSize(1)); assertThat(aliases.get(0).alias(), equalTo(SECURITY_PROFILE_ALIAS)); @@ -222,4 +232,111 @@ public void testUpdateProfileData() { () -> client().execute(UpdateProfileDataAction.INSTANCE, updateProfileDataRequest3).actionGet() ); } + + public void testSearchProfiles() { + final String nativeRacUserPasswordHash = new String(getFastStoredHashAlgoForTests().hash(NATIVE_RAC_USER_PASSWORD)); + final Map users = Map.of( + "user_foo", + "Very Curious User Foo", + "user_bar", + "Super Curious Admin Bar", + "user_baz", + "Very Anxious User Baz", + "user_qux", + "Super Anxious Admin Qux" + ); + users.forEach((key, value) -> { + final PutUserRequest putUserRequest1 = new PutUserRequest(); + putUserRequest1.username(key); + putUserRequest1.fullName(value); + putUserRequest1.roles("rac_role"); + putUserRequest1.passwordHash(nativeRacUserPasswordHash.toCharArray()); + assertThat(client().execute(PutUserAction.INSTANCE, putUserRequest1).actionGet().created(), is(true)); + doActivateProfile(key, NATIVE_RAC_USER_PASSWORD); + }); + + final SearchProfilesResponse.ProfileHit[] profiles1 = doSearch(""); + assertThat(extractUsernames(profiles1), equalTo(users.keySet())); + + final SearchProfilesResponse.ProfileHit[] profiles2 = doSearch(randomFrom("super admin", "admin super")); + assertThat(extractUsernames(profiles2), equalTo(Set.of("user_bar", "user_qux"))); + + // Prefix match on full name + final SearchProfilesResponse.ProfileHit[] profiles3 = doSearch("ver"); + assertThat(extractUsernames(profiles3), equalTo(Set.of("user_foo", "user_baz"))); + + // Prefix match on the username + final SearchProfilesResponse.ProfileHit[] profiles4 = doSearch("user"); + assertThat(extractUsernames(profiles4), equalTo(users.keySet())); + // Documents scored higher are those with matches in more fields + assertThat(extractUsernames(Arrays.copyOfRange(profiles4, 0, 2)), equalTo(Set.of("user_foo", "user_baz"))); + + // Match of different terms on different fields + final SearchProfilesResponse.ProfileHit[] profiles5 = doSearch(randomFrom("admin very", "very admin")); + assertThat(extractUsernames(profiles5), equalTo(users.keySet())); + } + + public void testProfileAPIsWhenIndexNotCreated() { + // Ensure index does not exist + assertThat(getProfileIndexResponse().getIndices(), not(hasItemInArray(INTERNAL_SECURITY_PROFILE_INDEX_8))); + + // Get Profile by ID returns empty result + final GetProfilesResponse getProfilesResponse = client().execute( + GetProfileAction.INSTANCE, + new GetProfileRequest(randomAlphaOfLength(20), Set.of()) + ).actionGet(); + assertThat(getProfilesResponse.getProfiles(), arrayWithSize(0)); + + // Ensure index does not exist + assertThat(getProfileIndexResponse().getIndices(), not(hasItemInArray(INTERNAL_SECURITY_PROFILE_INDEX_8))); + + // Search returns empty result + final SearchProfilesResponse.ProfileHit[] profiles1 = doSearch(""); + assertThat(profiles1, emptyArray()); + + // Ensure index does not exist + assertThat(getProfileIndexResponse().getIndices(), not(hasItemInArray(INTERNAL_SECURITY_PROFILE_INDEX_8))); + + // Updating profile data results into doc missing exception + // But the index is created in the process + final DocumentMissingException e1 = expectThrows( + DocumentMissingException.class, + () -> client().execute( + UpdateProfileDataAction.INSTANCE, + new UpdateProfileDataRequest( + randomAlphaOfLength(20), + null, + Map.of(randomAlphaOfLengthBetween(3, 8), randomAlphaOfLengthBetween(3, 8)), + -1, + -1, + WriteRequest.RefreshPolicy.WAIT_UNTIL + ) + ).actionGet() + ); + + // TODO: The index is created after the update call regardless. Should it not do that? + assertThat(getProfileIndexResponse().getIndices(), hasItemInArray(INTERNAL_SECURITY_PROFILE_INDEX_8)); + } + + private SearchProfilesResponse.ProfileHit[] doSearch(String query) { + final SearchProfilesRequest searchProfilesRequest = new SearchProfilesRequest(Set.of(), query, 10); + final SearchProfilesResponse searchProfilesResponse = client().execute(SearchProfilesAction.INSTANCE, searchProfilesRequest) + .actionGet(); + assertThat(searchProfilesResponse.getTotalHits().relation, is(TotalHits.Relation.EQUAL_TO)); + return searchProfilesResponse.getProfileHits(); + } + + private Set extractUsernames(SearchProfilesResponse.ProfileHit[] profileHits) { + return Arrays.stream(profileHits) + .map(SearchProfilesResponse.ProfileHit::profile) + .map(Profile::user) + .map(Profile.ProfileUser::username) + .collect(Collectors.toUnmodifiableSet()); + } + + private GetIndexResponse getProfileIndexResponse() { + final GetIndexRequest getIndexRequest = new GetIndexRequest(); + getIndexRequest.indices(".*"); + return client().execute(GetIndexAction.INSTANCE, getIndexRequest).actionGet(); + } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 06af48c70471f..c249283511975 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -110,6 +110,7 @@ import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileAction; import org.elasticsearch.xpack.core.security.action.profile.GetProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesAction; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheAction; import org.elasticsearch.xpack.core.security.action.role.ClearRolesCacheAction; @@ -186,6 +187,7 @@ import org.elasticsearch.xpack.security.action.privilege.TransportPutPrivilegesAction; import org.elasticsearch.xpack.security.action.profile.TransportActivateProfileAction; import org.elasticsearch.xpack.security.action.profile.TransportGetProfileAction; +import org.elasticsearch.xpack.security.action.profile.TransportSearchProfilesAction; import org.elasticsearch.xpack.security.action.profile.TransportUpdateProfileDataAction; import org.elasticsearch.xpack.security.action.realm.TransportClearRealmCacheAction; import org.elasticsearch.xpack.security.action.role.TransportClearRolesCacheAction; @@ -280,6 +282,7 @@ import org.elasticsearch.xpack.security.rest.action.privilege.RestPutPrivilegesAction; import org.elasticsearch.xpack.security.rest.action.profile.RestActivateProfileAction; import org.elasticsearch.xpack.security.rest.action.profile.RestGetProfileAction; +import org.elasticsearch.xpack.security.rest.action.profile.RestSearchProfilesAction; import org.elasticsearch.xpack.security.rest.action.profile.RestUpdateProfileDataAction; import org.elasticsearch.xpack.security.rest.action.realm.RestClearRealmCacheAction; import org.elasticsearch.xpack.security.rest.action.role.RestClearRolesCacheAction; @@ -1216,7 +1219,8 @@ public void onIndexModule(IndexModule module) { Stream.of( new ActionHandler<>(GetProfileAction.INSTANCE, TransportGetProfileAction.class), new ActionHandler<>(ActivateProfileAction.INSTANCE, TransportActivateProfileAction.class), - new ActionHandler<>(UpdateProfileDataAction.INSTANCE, TransportUpdateProfileDataAction.class) + new ActionHandler<>(UpdateProfileDataAction.INSTANCE, TransportUpdateProfileDataAction.class), + new ActionHandler<>(SearchProfilesAction.INSTANCE, TransportSearchProfilesAction.class) ) ).toList(); } else { @@ -1301,7 +1305,8 @@ public List getRestHandlers( Stream.of( new RestGetProfileAction(settings, getLicenseState()), new RestActivateProfileAction(settings, getLicenseState()), - new RestUpdateProfileDataAction(settings, getLicenseState()) + new RestUpdateProfileDataAction(settings, getLicenseState()), + new RestSearchProfilesAction(settings, getLicenseState()) ) ).toList(); } else { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportGetProfileAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportGetProfileAction.java index 33404ba39b86f..2dd7de7d9aba2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportGetProfileAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportGetProfileAction.java @@ -30,6 +30,6 @@ public TransportGetProfileAction(TransportService transportService, ActionFilter @Override protected void doExecute(Task task, GetProfileRequest request, ActionListener listener) { - profileService.getProfile(request.getUid(), request.getDatKeys(), listener.map(GetProfilesResponse::new)); + profileService.getProfile(request.getUid(), request.getDataKeys(), listener.map(GetProfilesResponse::new)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportSearchProfilesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportSearchProfilesAction.java new file mode 100644 index 0000000000000..9a0d022d804e0 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/profile/TransportSearchProfilesAction.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.action.profile; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesAction; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesRequest; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesResponse; +import org.elasticsearch.xpack.security.profile.ProfileService; + +public class TransportSearchProfilesAction extends HandledTransportAction { + + private final ProfileService profileService; + + @Inject + public TransportSearchProfilesAction(TransportService transportService, ActionFilters actionFilters, ProfileService profileService) { + super(SearchProfilesAction.NAME, transportService, actionFilters, SearchProfilesRequest::new); + this.profileService = profileService; + } + + @Override + protected void doExecute(Task task, SearchProfilesRequest request, ActionListener listener) { + profileService.searchProfile(request, listener); + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java index 52abce6f90ac5..a3a13aadd25d4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/profile/ProfileService.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.search.TotalHits; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteResponse; @@ -34,9 +35,11 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -45,6 +48,8 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.profile.Profile; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesRequest; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesResponse; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataRequest; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationContext; @@ -161,6 +166,72 @@ public void updateProfileData(UpdateProfileDataRequest request, ActionListener listener) { + tryFreezeAndCheckIndex(listener.map(response -> { + assert response == null : "only null response can reach here"; + return new SearchProfilesResponse(new SearchProfilesResponse.ProfileHit[] {}, 0, new TotalHits(0, TotalHits.Relation.EQUAL_TO)); + })).ifPresent(frozenProfileIndex -> { + final BoolQueryBuilder query = QueryBuilders.boolQuery().filter(QueryBuilders.termQuery("user_profile.enabled", true)); + if (Strings.hasText(request.getName())) { + query.must( + QueryBuilders.multiMatchQuery( + request.getName(), + "user_profile.user.username", + "user_profile.user.username._2gram", + "user_profile.user.username._3gram", + "user_profile.user.full_name", + "user_profile.user.full_name._2gram", + "user_profile.user.full_name._3gram", + "user_profile.user.display_name", + "user_profile.user.display_name._2gram", + "user_profile.user.display_name._3gram" + ).type(MultiMatchQueryBuilder.Type.BOOL_PREFIX) + ); + } + final SearchRequest searchRequest = client.prepareSearch(SECURITY_PROFILE_ALIAS) + .setQuery(query) + .setSize(request.getSize()) + .addSort("_score", SortOrder.DESC) + .addSort("user_profile.last_synchronized", SortOrder.DESC) + .request(); + + frozenProfileIndex.checkIndexVersionThenExecute( + listener::onFailure, + () -> executeAsyncWithOrigin( + client, + SECURITY_ORIGIN, + SearchAction.INSTANCE, + searchRequest, + ActionListener.wrap(searchResponse -> { + final SearchHits searchHits = searchResponse.getHits(); + final SearchHit[] hits = searchHits.getHits(); + final SearchProfilesResponse.ProfileHit[] profileHits; + if (hits.length == 0) { + profileHits = new SearchProfilesResponse.ProfileHit[0]; + } else { + profileHits = new SearchProfilesResponse.ProfileHit[hits.length]; + for (int i = 0; i < hits.length; i++) { + final SearchHit hit = hits[i]; + final VersionedDocument versionedDocument = new VersionedDocument( + buildProfileDocument(hit.getSourceRef()), + hit.getPrimaryTerm(), + hit.getSeqNo() + ); + profileHits[i] = new SearchProfilesResponse.ProfileHit( + versionedDocument.toProfile(request.getDataKeys()), + hit.getScore() + ); + } + } + listener.onResponse( + new SearchProfilesResponse(profileHits, searchResponse.getTook().millis(), searchHits.getTotalHits()) + ); + }, listener::onFailure) + ) + ); + }); + } + private void getVersionedDocument(String uid, ActionListener listener) { tryFreezeAndCheckIndex(listener).ifPresent(frozenProfileIndex -> { final GetRequest getRequest = new GetRequest(SECURITY_PROFILE_ALIAS, uidToDocId(uid)); @@ -385,7 +456,7 @@ private XContentBuilder wrapProfileDocumentWithoutApplicationData(ProfileDocumen /** * Freeze the profile index check its availability and return it if everything is ok. - * Otherwise it returns null. + * Otherwise it calls the listener with null and returns an empty Optional. */ private Optional tryFreezeAndCheckIndex(ActionListener listener) { final SecurityIndexManager frozenProfileIndex = profileIndex.freeze(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestSearchProfilesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestSearchProfilesAction.java new file mode 100644 index 0000000000000..51f4f2a6709ad --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestSearchProfilesAction.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.rest.action.profile; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xcontent.ConstructingObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesAction; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesRequest; +import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; + +import java.io.IOException; +import java.util.List; +import java.util.Set; + +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class RestSearchProfilesAction extends SecurityBaseRestHandler { + + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "search_profile_request_payload", + a -> new Payload((String) a[0], (Integer) a[1]) + ); + + static { + PARSER.declareString(optionalConstructorArg(), new ParseField("name")); + PARSER.declareInt(optionalConstructorArg(), new ParseField("size")); + } + + public RestSearchProfilesAction(Settings settings, XPackLicenseState licenseState) { + super(settings, licenseState); + } + + @Override + public List routes() { + return List.of(new Route(GET, "/_security/profile/_search"), new Route(POST, "/_security/profile/_search")); + } + + @Override + public String getName() { + return "xpack_security_search_profile"; + } + + @Override + protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { + final Set dataKeys = Strings.tokenizeByCommaToSet(request.param("data", null)); + final Payload payload = request.hasContent() ? PARSER.parse(request.contentParser(), null) : new Payload(null, null); + + final SearchProfilesRequest searchProfilesRequest = new SearchProfilesRequest(dataKeys, payload.name(), payload.size()); + return channel -> client.execute(SearchProfilesAction.INSTANCE, searchProfilesRequest, new RestToXContentListener<>(channel)); + } + + record Payload(String name, Integer size) { + + public String name() { + return name != null ? name : ""; + } + + public Integer size() { + return size != null ? size : 10; + } + } +} From ccc18d7f73a13a68f645255da26081b2af51e389 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Mon, 14 Feb 2022 09:48:54 +0100 Subject: [PATCH 080/167] Update gradle wrapper to 7.4 (#81963) * Make ForbiddenApisPrecommitPlugin plugin Gradle 8.0 compatible * Fix deprecations on ignoring empty folders for task inputs * Update Gradle wrapper to 7.4 GA --- .../precommit/LicenseHeadersTask.java | 2 + .../gradle/wrapper/gradle-wrapper.properties | 4 +- .../elasticsearch-build-tool-update.scenarios | 10 +- .../precommit/CheckForbiddenApisTask.java | 32 ++++ .../precommit/FilePermissionsTask.java | 2 + .../ForbiddenApisPrecommitPlugin.java | 143 +++++++++--------- .../precommit/ForbiddenPatternsTask.java | 2 + .../compat/RestCompatTestTransformTask.java | 2 + .../internal/test/rest/CopyRestApiTask.java | 2 + .../internal/test/rest/CopyRestTestsTask.java | 2 + .../src/main/resources/minimumGradleVersion | 2 +- gradle/wrapper/gradle-wrapper.jar | Bin 59536 -> 59821 bytes gradle/wrapper/gradle-wrapper.properties | 4 +- .../gradle/wrapper/gradle-wrapper.properties | 4 +- 14 files changed, 130 insertions(+), 81 deletions(-) create mode 100644 build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java index 45b6b1d142963..414d6c4762e44 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/precommit/LicenseHeadersTask.java @@ -25,6 +25,7 @@ import org.gradle.api.file.FileCollection; import org.gradle.api.provider.ListProperty; import org.gradle.api.tasks.CacheableTask; +import org.gradle.api.tasks.IgnoreEmptyDirectories; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Internal; @@ -68,6 +69,7 @@ public LicenseHeadersTask() { * constructor can write to it. */ @InputFiles + @IgnoreEmptyDirectories @SkipWhenEmpty @PathSensitive(PathSensitivity.RELATIVE) public List getJavaFiles() { diff --git a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties index 7cec6af44e192..d5190930b2f32 100644 --- a/build-tools-internal/gradle/wrapper/gradle-wrapper.properties +++ b/build-tools-internal/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=c9490e938b221daf0094982288e4038deed954a3f12fb54cbf270ddf4e37d879 +distributionSha256Sum=cd5c2958a107ee7f0722004a12d0f8559b4564c34daad7df06cffd4d12a426d0 diff --git a/build-tools-internal/performance/elasticsearch-build-tool-update.scenarios b/build-tools-internal/performance/elasticsearch-build-tool-update.scenarios index 27ff2443e7371..9c7c7abdb8394 100644 --- a/build-tools-internal/performance/elasticsearch-build-tool-update.scenarios +++ b/build-tools-internal/performance/elasticsearch-build-tool-update.scenarios @@ -69,10 +69,9 @@ precommit_master { single_project_branch { title = "single project (@testGitCommit@)" - cleanup-tasks = [":server:clean"] - tasks = [":server:spotlessApply", ":server:precommit"] + tasks = [":server:precommit"] gradle-args = ["--no-scan"] - apply-abi-change-to = "server/src/main/java/org/elasticsearch/Build.java" + apply-abi-change-to = "server/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java" run-using = cli // value can be "cli" or "tooling-api" daemon = warm // value can be "warm", "cold", or "none" warm-ups = 5 @@ -87,10 +86,9 @@ single_project_branch { single_project_master { title = "single project (master)" - cleanup-tasks = [":server:clean"] - tasks = [":server:spotlessApply", ":server:precommit"] + tasks = [":server:precommit"] gradle-args = ["--no-scan"] - apply-abi-change-to = "server/src/main/java/org/elasticsearch/Build.java" + apply-abi-change-to = "server/src/main/java/org/elasticsearch/bootstrap/BootstrapInfo.java" run-using = cli // value can be "cli" or "tooling-api" daemon = warm // value can be "warm", "cold", or "none" warm-ups = 5 diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java new file mode 100644 index 0000000000000..e158dd7c755c9 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/CheckForbiddenApisTask.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.precommit; + +import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis; + +import org.gradle.api.file.FileTree; +import org.gradle.api.tasks.IgnoreEmptyDirectories; + +/** + * This implementation is used to fix gradle 8 compatibility of + * the CheckForbiddenApis task which is built with gradle 4 support + * in mind. + * */ +public class CheckForbiddenApisTask extends CheckForbiddenApis { + + /** + * Add additional annotation to make this input gradle 8 compliant. + * Otherwise we see a deprecation warning here starting with gradle 7.4 + * */ + @Override + @IgnoreEmptyDirectories + public FileTree getClassFiles() { + return super.getClassFiles(); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java index ee3e58fd0552e..bc53358d0a507 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/FilePermissionsTask.java @@ -14,6 +14,7 @@ import org.gradle.api.file.FileTree; import org.gradle.api.file.ProjectLayout; import org.gradle.api.provider.ListProperty; +import org.gradle.api.tasks.IgnoreEmptyDirectories; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Internal; import org.gradle.api.tasks.OutputFile; @@ -75,6 +76,7 @@ private static boolean isExecutableFile(File file) { * Returns the files this task will check */ @InputFiles + @IgnoreEmptyDirectories @SkipWhenEmpty public FileCollection getFiles() { return getSources().get() diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java index 10efa35695cd4..99c36ebabcd1c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenApisPrecommitPlugin.java @@ -8,19 +8,19 @@ package org.elasticsearch.gradle.internal.precommit; -import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApis; -import de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin; +import de.thetaphi.forbiddenapis.gradle.CheckForbiddenApisExtension; import groovy.lang.Closure; import org.elasticsearch.gradle.internal.ExportElasticsearchBuildResourcesTask; import org.elasticsearch.gradle.internal.InternalPlugin; import org.elasticsearch.gradle.internal.conventions.precommit.PrecommitPlugin; import org.elasticsearch.gradle.internal.info.BuildParams; -import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.Project; import org.gradle.api.Task; import org.gradle.api.plugins.ExtraPropertiesExtension; -import org.gradle.api.tasks.SourceSet; +import org.gradle.api.plugins.JavaBasePlugin; +import org.gradle.api.plugins.JavaPluginExtension; +import org.gradle.api.specs.Specs; import org.gradle.api.tasks.SourceSetContainer; import org.gradle.api.tasks.TaskProvider; @@ -29,10 +29,24 @@ import java.util.List; import java.util.Set; +import static de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin.FORBIDDEN_APIS_EXTENSION_NAME; +import static de.thetaphi.forbiddenapis.gradle.ForbiddenApisPlugin.FORBIDDEN_APIS_TASK_NAME; + public class ForbiddenApisPrecommitPlugin extends PrecommitPlugin implements InternalPlugin { @Override public TaskProvider createTask(Project project) { - project.getPluginManager().apply(ForbiddenApisPlugin.class); + project.getPluginManager().apply(JavaBasePlugin.class); + + // create Extension for defaults: + var checkForbiddenApisExtension = project.getExtensions() + .create(FORBIDDEN_APIS_EXTENSION_NAME, CheckForbiddenApisExtension.class, project); + + // Create a convenience task for all checks (this does not conflict with extension, as it has higher priority in DSL): + var forbiddenTask = project.getTasks() + .register(FORBIDDEN_APIS_TASK_NAME, task -> { task.setDescription("Runs forbidden-apis checks."); }); + + JavaPluginExtension javaPluginExtension = project.getExtensions().getByType(JavaPluginExtension.class); + // Define our tasks (one for each SourceSet): TaskProvider resourcesTask = project.getTasks() .register("forbiddenApisResources", ExportElasticsearchBuildResourcesTask.class); @@ -47,76 +61,69 @@ public TaskProvider createTask(Project project) { t.copy("forbidden/es-server-signatures.txt"); t.copy("forbidden/snakeyaml-signatures.txt"); }); - project.getTasks().withType(CheckForbiddenApis.class).configureEach(t -> { - t.dependsOn(resourcesTask); - assert t.getName().startsWith(ForbiddenApisPlugin.FORBIDDEN_APIS_TASK_NAME); - String sourceSetName; - if (ForbiddenApisPlugin.FORBIDDEN_APIS_TASK_NAME.equals(t.getName())) { - sourceSetName = "main"; - } else { - // parse out the sourceSetName - char[] chars = t.getName().substring(ForbiddenApisPlugin.FORBIDDEN_APIS_TASK_NAME.length()).toCharArray(); - chars[0] = Character.toLowerCase(chars[0]); - sourceSetName = new String(chars); - } - - SourceSetContainer sourceSets = GradleUtils.getJavaSourceSets(project); - SourceSet sourceSet = sourceSets.getByName(sourceSetName); - t.setClasspath(project.files(sourceSet.getRuntimeClasspath()).plus(sourceSet.getCompileClasspath())); - - t.setTargetCompatibility(BuildParams.getMinimumRuntimeVersion().getMajorVersion()); - t.setBundledSignatures(Set.of("jdk-unsafe", "jdk-non-portable", "jdk-system-out")); - t.setSignaturesFiles( - project.files( - resourcesDir.resolve("forbidden/jdk-signatures.txt"), - resourcesDir.resolve("forbidden/es-all-signatures.txt"), - resourcesDir.resolve("forbidden/jdk-deprecated.txt") - ) - ); - t.setSuppressAnnotations(Set.of("**.SuppressForbidden")); - if (t.getName().endsWith("Test")) { + project.getExtensions().getByType(SourceSetContainer.class).configureEach(sourceSet -> { + String sourceSetTaskName = sourceSet.getTaskName(FORBIDDEN_APIS_TASK_NAME, null); + var sourceSetTask = project.getTasks().register(sourceSetTaskName, CheckForbiddenApisTask.class, t -> { + t.setDescription("Runs forbidden-apis checks on '${sourceSet.name}' classes."); + t.dependsOn(sourceSet.getOutput()); + t.getOutputs().upToDateWhen(Specs.SATISFIES_ALL); + t.setClassesDirs(sourceSet.getOutput().getClassesDirs()); + t.dependsOn(resourcesTask); + t.setClasspath(project.files(sourceSet.getRuntimeClasspath()).plus(sourceSet.getCompileClasspath())); + t.setTargetCompatibility(BuildParams.getMinimumRuntimeVersion().getMajorVersion()); + t.setBundledSignatures(Set.of("jdk-unsafe", "jdk-non-portable", "jdk-system-out")); t.setSignaturesFiles( - t.getSignaturesFiles() - .plus( - project.files( - resourcesDir.resolve("forbidden/es-test-signatures.txt"), - resourcesDir.resolve("forbidden/http-signatures.txt") - ) - ) + project.files( + resourcesDir.resolve("forbidden/jdk-signatures.txt"), + resourcesDir.resolve("forbidden/es-all-signatures.txt"), + resourcesDir.resolve("forbidden/jdk-deprecated.txt") + ) ); - } else { - t.setSignaturesFiles( - t.getSignaturesFiles().plus(project.files(resourcesDir.resolve("forbidden/es-server-signatures.txt"))) - ); - } - ExtraPropertiesExtension ext = t.getExtensions().getExtraProperties(); - ext.set("replaceSignatureFiles", new Closure(t) { - @Override - public Void call(Object... names) { - List resources = new ArrayList<>(names.length); - for (Object name : names) { - resources.add(resourcesDir.resolve("forbidden/" + name + ".txt")); - } - t.setSignaturesFiles(project.files(resources)); - return null; + t.setSuppressAnnotations(Set.of("**.SuppressForbidden")); + if (t.getName().endsWith("Test")) { + t.setSignaturesFiles( + t.getSignaturesFiles() + .plus( + project.files( + resourcesDir.resolve("forbidden/es-test-signatures.txt"), + resourcesDir.resolve("forbidden/http-signatures.txt") + ) + ) + ); + } else { + t.setSignaturesFiles( + t.getSignaturesFiles().plus(project.files(resourcesDir.resolve("forbidden/es-server-signatures.txt"))) + ); } + ExtraPropertiesExtension ext = t.getExtensions().getExtraProperties(); + ext.set("replaceSignatureFiles", new Closure(t) { + @Override + public Void call(Object... names) { + List resources = new ArrayList<>(names.length); + for (Object name : names) { + resources.add(resourcesDir.resolve("forbidden/" + name + ".txt")); + } + t.setSignaturesFiles(project.files(resources)); + return null; + } - }); - ext.set("addSignatureFiles", new Closure(t) { - @Override - public Void call(Object... names) { - List resources = new ArrayList<>(names.length); - for (Object name : names) { - resources.add(resourcesDir.resolve("forbidden/" + name + ".txt")); + }); + ext.set("addSignatureFiles", new Closure(t) { + @Override + public Void call(Object... names) { + List resources = new ArrayList<>(names.length); + for (Object name : names) { + resources.add(resourcesDir.resolve("forbidden/" + name + ".txt")); + } + t.setSignaturesFiles(t.getSignaturesFiles().plus(project.files(resources))); + return null; } - t.setSignaturesFiles(t.getSignaturesFiles().plus(project.files(resources))); - return null; - } + }); + }); + forbiddenTask.configure(t -> t.dependsOn(sourceSetTask)); }); - TaskProvider forbiddenApis = project.getTasks().named("forbiddenApis"); - forbiddenApis.configure(t -> t.setGroup("")); - return forbiddenApis; + return forbiddenTask; } } diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsTask.java index e4d732149e0eb..32059eefb8683 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/ForbiddenPatternsTask.java @@ -16,6 +16,7 @@ import org.gradle.api.provider.ListProperty; import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; +import org.gradle.api.tasks.IgnoreEmptyDirectories; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Internal; @@ -90,6 +91,7 @@ public ForbiddenPatternsTask(ProjectLayout projectLayout) { } @InputFiles + @IgnoreEmptyDirectories @PathSensitive(PathSensitivity.RELATIVE) @SkipWhenEmpty public FileCollection getFiles() { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/RestCompatTestTransformTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/RestCompatTestTransformTask.java index 8d03ff609d685..bfb53c23b5f1c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/RestCompatTestTransformTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/rest/compat/RestCompatTestTransformTask.java @@ -44,6 +44,7 @@ import org.gradle.api.file.FileSystemOperations; import org.gradle.api.file.FileTree; import org.gradle.api.model.ObjectFactory; +import org.gradle.api.tasks.IgnoreEmptyDirectories; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Internal; @@ -417,6 +418,7 @@ public DirectoryProperty getOutputDirectory() { } @SkipWhenEmpty + @IgnoreEmptyDirectories @InputFiles public FileTree getTestFiles() { return sourceDirectory.getAsFileTree().matching(testPatternSet); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestApiTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestApiTask.java index e6a4a0a7a1397..5c00e0428c9b7 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestApiTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestApiTask.java @@ -15,6 +15,7 @@ import org.gradle.api.file.ProjectLayout; import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.ListProperty; +import org.gradle.api.tasks.IgnoreEmptyDirectories; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Internal; @@ -86,6 +87,7 @@ public boolean isSkipHasRestTestCheck() { } @SkipWhenEmpty + @IgnoreEmptyDirectories @InputFiles public FileTree getInputDir() { FileTree coreFileTree = null; diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java index 4513c64d91183..5cc68f8e73d45 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/rest/CopyRestTestsTask.java @@ -16,6 +16,7 @@ import org.gradle.api.file.ProjectLayout; import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.ListProperty; +import org.gradle.api.tasks.IgnoreEmptyDirectories; import org.gradle.api.tasks.Input; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Optional; @@ -98,6 +99,7 @@ public Map getSubstitutions() { } @SkipWhenEmpty + @IgnoreEmptyDirectories @InputFiles public FileTree getInputDir() { FileTree coreFileTree = null; diff --git a/build-tools-internal/src/main/resources/minimumGradleVersion b/build-tools-internal/src/main/resources/minimumGradleVersion index c6db724bfd030..9904c66141eab 100644 --- a/build-tools-internal/src/main/resources/minimumGradleVersion +++ b/build-tools-internal/src/main/resources/minimumGradleVersion @@ -1 +1 @@ -7.3.3 \ No newline at end of file +7.4 \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 7454180f2ae8848c63b8b4dea2cb829da983f2fa..41d9927a4d4fb3f96a785543079b8df6723c946b 100644 GIT binary patch delta 8958 zcmY+KWl$VIlZIh&f(Hri?gR<$?iyT!TL`X;1^2~W7YVSq1qtqM!JWlDxLm%}UESUM zndj}Uny%^UnjhVhFb!8V3s(a#fIy>`VW15{5nuy;_V&a5O#0S&!a4dSkUMz_VHu3S zGA@p9Q$T|Sj}tYGWdjH;Mpp8m&yu&YURcrt{K;R|kM~(*{v%QwrBJIUF+K1kX5ZmF zty3i{d`y0;DgE+de>vN@yYqFPe1Ud{!&G*Q?iUc^V=|H%4~2|N zW+DM)W!`b&V2mQ0Y4u_)uB=P@-2`v|Wm{>CxER1P^ z>c}ZPZ)xxdOCDu59{X^~2id7+6l6x)U}C4Em?H~F`uOxS1?}xMxTV|5@}PlN%Cg$( zwY6c}r60=z5ZA1L zTMe;84rLtYvcm?M(H~ZqU;6F7Evo{P7!LGcdwO|qf1w+)MsnvK5^c@Uzj<{ zUoej1>95tuSvDJ|5K6k%&UF*uE6kBn47QJw^yE&#G;u^Z9oYWrK(+oL97hBsUMc_^ z;-lmxebwlB`Er_kXp2$`&o+rPJAN<`WX3ws2K{q@qUp}XTfV{t%KrsZ5vM!Q#4{V& zq>iO$MCiLq#%wXj%`W$_%FRg_WR*quv65TdHhdpV&jlq<=K^K`&!Kl5mA6p4n~p3u zWE{20^hYpn1M}}VmSHBXl1*-)2MP=0_k)EPr#>EoZukiXFDz?Di1I>2@Z^P$pvaF+ zN+qUy63jek2m59;YG)`r^F3-O)0RDIXPhf)XOOdkmu`3SMMSW(g+`Ajt{=h1dt~ks ztrhhP|L4G%5x79N#kwAHh5N){@{fzE7n&%dnisCm65Za<8r_hKvfx4Bg*`%-*-Mvn zFvn~)VP@}1sAyD+B{{8l{EjD10Av&Mz9^Xff*t`lU=q=S#(|>ls520;n3<}X#pyh& z*{CJf7$*&~!9jMnw_D~ikUKJ2+UnXmN6qak{xx%W;BKuXt7@ky!LPI1qk?gDwG@@o zkY+BkIie>{{q==5)kXw(*t#I?__Kwi>`=+s?Gq6X+vtSsaAO&Tf+Bl$vKnzc&%BHM z=loWOQq~n}>l=EL(5&6((ESsQC3^@4jlO5Od{qN#sWV)vqXw}aA>*uvwZopNN(|-T zRTF%5Y_k1R$;(d-)n;hWex{;7b6KgdAVE@&0pd(*qDzBO#YZV%kh%pYt1`hnQ(Fa& zYiDrOTDqk5M7hzp9kI2h!PxNnuJ&xl*zF8sx6!67bA49R1bmUF5bpK&&{eI0U~cH}PM z3aW1$lRb|ItkG5~_eBNu$|I|vYIdAA9a!pVq<+UTx*M}fG`23zxXp&E=FfnY- zEzKj;Cu_s4v>leO7M2-mE(UzKHL4c$c`3dS*19OpLV^4NI*hWWnJQ9lvzP4c;c?do zqrcsKT*i~eIHl0D3r4N{)+RsB6XhrC^;sp2cf_Eq#6*CV;t8v=V!ISe>>9kPgh}NI z=1UZutslxcT$Ad;_P^;Oouoa(cs!Ctpvi>%aQ+Zp=1d|h{W9Wmf7JWxa(~<#tSZ?C%wu4_5F!fc!<@PIBeJ)Nr^$bB6!_Gic_7}c3J{QI~Gg5g5jTp9}V6KYgrgaX>pJt}7$!wOht&KO|+z{Iw@YL|@~D zMww}+lG}rm2^peNx>58ME||ZQxFQeVSX8iogHLq_vXb`>RnoEKaTWBF-$JD#Q4BMv zt2(2Qb*x-?ur1Y(NsW8AdtX0#rDB?O(Vs4_xA(u-o!-tBG03OI!pQD+2UytbL5>lG z*(F)KacHqMa4?dxa(Vcrw>IIAeB$3cx#;;5r2X;HE8|}eYdAgCw#tpXNy7C3w1q`9 zGxZ6;@1G%8shz9e+!K2MO*{_RjO}Jo6eL3{TSZ>nY7)Qs`Dhi5><@oh0r)gT7H-?3 zLDsd^@m%JvrS8sta5`QiZNs^*GT}Hiy^zjK2^Ni%`Z|ma)D2 zuyumbvw$M8$haCTI~6M%d4+P)uX%u{Sfg4Al+F7c6;O-*)DKI7E8izSOKB#FcV{M+ zEvY0FBkq!$J0EW$Cxl}3{JwV^ki-T?q6C30Y5e&p@8Rd?$ST-Ghn*-`tB{k54W<>F z5I)TFpUC!E9298=sk>m#FI4sUDy_!8?51FqqW!9LN1(zuDnB3$!pEUjL>N>RNgAG~-9Xm|1lqHseW(%v&6K(DZ3Pano(1-Qe?3%J&>0`~w^Q-p&@ zg@HjvhJk?*hpF7$9P|gkzz`zBz_5Z!C4_-%fCcAgiSilzFQef!@amHDrW!YZS@?7C zs2Y9~>yqO+rkih?kXztzvnB^6W=f52*iyuZPv$c42$WK7>PHb z6%MYIr5D32KPdwL1hJf{_#jn?`k(taW?mwmZVvrr=y~fNcV$`}v(8};o9AjOJumS4 z`889O91^pkF+|@$d9wVoZ3;^j;^sUs&Ubo_qD&MTL%O z&*SE0ujG~zm;?x)8TLC&ft))nyI zcg44@*Q{cYT+qGrA=In_X{NNCD+B0w#;@g)jvBU;_8od6U>;7HIo@F*=g8CQUo(u^ z3r4FJ7#<@)MXO&5+DgKE&^>^`r!loe7CWE*1k0*0wLFzSOV8jvlX~WOQ?$1v zk$Or}!;ix0g78^6W;+<=J>z@CBs!<<)HvF(Ls-&`matpesJ5kkjC)6nGB@b{ii6-Uoho$BT%iJgugTOeZ$5Xo4D7Pd< zC*LJh5V@2#5%aBZCgzlQi3@<_!VfiL07ywc)ZbwKPfcR|ElQoS(8x|a7#IR}7#Io= zwg4$8S{egr-NffD)Fg&X9bJSoM25pF&%hf>(T&9bI}=#dPQyNYz;ZZ7EZ=u1n701SWKkZ9n(-qU ztN`sdWL1uxQ1mKS@x11;O|@^AD9!NeoPx}?EKIr!2>1Qq4gjfGU)tr6?Z5l7JAS3j zZeq{vG{rb%DFE4%$szK}d2UzB{4>L?Tv+NAlE*&Nq6g+XauaSI+N2Y8PJLw+aNg1p zbxr|hI8wcMP&&+(Cu|%+Jq|r>+BHk@{AvfBXKiVldN)@}TBS0LdIpnANCVE26WL-} zV}HJ^?m&$Rkq;Zf*i-hoasnpJVyTH__dbGWrB_R55d*>pTyl6(?$EO@>RCmTX1Hzr zT2)rOng?D4FfZ_C49hjMV*UonG2DlG$^+k=Y%|?Dqae4}JOU=8=fgY4Uh!pa9eEqf zFX&WLPu!jArN*^(>|H>dj~g`ONZhaaD%h_HHrHkk%d~TR_RrX{&eM#P@3x=S^%_6h zh=A)A{id16$zEFq@-D7La;kTuE!oopx^9{uA3y<}9 z^bQ@U<&pJV6kq7LRF47&!UAvgkBx=)KS_X!NY28^gQr27P=gKh0+E>$aCx&^vj2uc}ycsfSEP zedhTgUwPx%?;+dESs!g1z}5q9EC+fol}tAH9#fhZQ?q1GjyIaR@}lGCSpM-014T~l zEwriqt~ftwz=@2tn$xP&-rJt?nn5sy8sJ5Roy;pavj@O+tm}d_qmAlvhG(&k>(arz z;e|SiTr+0<&6(-An0*4{7akwUk~Yf4M!!YKj^swp9WOa%al`%R>V7mi z+5+UodFAaPdi4(8_FO&O!Ymb#@yxkuVMrog(7gkj$G@FLA#ENMxG)4f<}S%Fn?Up$+C%{02AgMKa^ z4SFGWp6U>{Q6VRJV}yjxXT*e`1XaX}(dW1F&RNhpTzvCtzuu;LMhMfJ2LBEy?{^GHG!OF!! zDvs64TG)?MX&9NCE#H3(M0K>O>`ca0WT2YR>PTe&tn?~0FV!MRtdb@v?MAUG&Ef7v zW%7>H(;Mm)RJkt18GXv!&np z?RUxOrCfs;m{fBz5MVlq59idhov21di5>WXWD-594L-X5;|@kyWi@N+(jLuh=o+5l zGGTi~)nflP_G}Yg5Pi%pl88U4+^*ihDoMP&zA*^xJE_X*Ah!jODrijCqQ^{=&hD7& z^)qv3;cu?olaT3pc{)Kcy9jA2E8I)#Kn8qO>70SQ5P8YSCN=_+_&)qg)OYBg|-k^d3*@jRAeB?;yd-O1A0wJ z?K*RDm|wE<(PBz~+C%2CTtzCTUohxP2*1kE8Of~{KRAvMrO_}NN&@P7SUO{;zx0iK z@or9R8ydYOFZf(cHASCAatL%;62IL27~SmASr(7F&NMr+#gNw@z1VM z_ALFwo3)SoANEwRerBdRV`>y`t72#aF2ConmWQp(Xy|msN9$yxhZ1jAQ67lq{vbC5 zujj|MlGo`6Bfn0TfKgi(k=gq0`K~W+X(@GzYlPI4g0M;owH3yG14rhK>lG8lS{`!K z+Nc@glT-DGz?Ym?v#Hq|_mEdPAlHH5jZuh*6glq!+>Lk$S%ED2@+ea6CE@&1-9a?s znglt|fmIK}fg<9@XgHe4*q!aO<-;Xj$T?IzB-{&2`#eA6rdtCi80mpP&vw(Uytxu$#YzNI_cB>LS zmim>ys;ir;*Dzbr22ZDxO2s;671&J0U<9(n1yj)J zHFNz=ufPcQVEG+ePjB<5C;=H0{>Mi*xD>hQq8`Vi7TjJ$V04$`h3EZGL|}a07oQdR z?{cR(z+d>arn^AUug&voOzzi$ZqaS)blz-z3zr;10x;oP2)|Cyb^WtN2*wNn`YX!Y z+$Pji<7|!XyMCEw4so}xXLU)p)BA~2fl>y2Tt}o9*BPm?AXA8UE8a;>rOgyCwZBFa zyl42y`bc3}+hiZL_|L_LY29vVerM+BVE@YxK>TGm@dHi@Uw*7AIq?QA9?THL603J% zIBJ4y3n8OFzsOI;NH%DZ!MDwMl<#$)d9eVVeqVl(5ZX$PPbt*p_(_9VSXhaUPa9Qu z7)q4vqYKX7ieVSjOmVEbLj4VYtnDpe*0Y&+>0dS^bJ<8s*eHq3tjRAw^+Mu4W^-E= z4;&namG4G;3pVDyPkUw#0kWEO1;HI6M51(1<0|*pa(I!sj}F^)avrE`ShVMKBz}nE zzKgOPMSEp6M>hJzyTHHcjV%W*;Tdb}1xJjCP#=iQuBk_Eho6yCRVp&e!}4IBJ&?ksVc&u#g3+G$oNlJ?mWfADjeBS-Ph3`DKk-~Z70XugH8sq2eba@4 zIC1H_J$`9b$K`J)sGX3d!&>OmC@@rx1TL~NinQOYy72Q_+^&Mg>Ku(fTgaXdr$p_V z#gav1o{k~c>#)u3r@~6v^o)Lf=C{rAlL@!s457pq)pO;Cojx7U{urO4cvXP|E>+dV zmr2?!-5)tk-&*ap^D^2x7NG6nOop2zNFQ9v8-EZ{WCz-h36C)<^|f{V#R_WE^@(T0+d-at5hXX{U?zak*ac-XnyINo+yBD~~3O1I=a z99|CI>502&s-Qi5bv>^2#cQ%ut<4d7KgQ^kE|=%6#VlGiY8$rdJUH{sra;P~cyb_i zeX(kS%w0C?mjhJl9TZp8RS;N~y3(EXEz13oPhOSE4WaTljGkVXWd~|#)vsG6_76I)Kb z8ro?;{j^lxNsaxE-cfP;g(e;mhh3)&ba}li?woV2#7ByioiD>s%L_D;?#;C#z;a(N z-_WY<=SH42m9bFQ>Nb z@4K$@4l8pD7AKxCR>t0%`Qoy9=hA?<<^Vcj8;-E+oBe3ReW1`el8np8E$k{LgFQ}2 z2t8a`wOXFdJ9!5$&mEfD1CnJ)TB+RJih88-Zos9@HZ# zL#{qfbF0ARTXkR@G{lwlOH~nnL)1jcyu!qv2`57S&%oKz0}r{~l9U_UHaJ5!8#nrs z?2FrL`mxnzu&{bweD&62)ilz*?pYIvt`T!XFVVA78})p1YEy7 z8fK#s?b~Yo$n7&_a?EBdXH-_W)Z44?!;DFx6pZ?~RArtBI*Qm4~6nX6Z_T*i$bQPE;Qz?DAPstpGSqr-AJ zo%m9cA`oDDm?&dTaoh_>@F>a?!y4qt_;NGN9Z<%SS;fX-cSu|>+Pba22`CRb#|HZa z;{)yHE>M-pc1C0mrnT~80!u&dvVTYFV8xTQ#g;6{c<9d!FDqU%TK5T6h*w*p980D~ zUyCb`y3{-?(mJFP)0*-Nt;mI$-gc4VQumh|rs&j_^R{sgTPF`1Xja2YWstsKFuQ(d zmZMxV$p$|qQUXchu&8%J(9|)B?`~rIx&)LqDS>ob5%gTeTP#Sbny#y*rnJ&?(l=!( zoV~}LJ1DPLnF8oyM(2ScrQ0{Q4m4-BWnS4wilgCW-~~;}pw=&<+HggRD_3c@3RQIr z9+-%!%}u_{`YS=&>h%kPO3ce}>y!d-zqiniNR-b5r97u;+K6HA2tS>Z#cV{+eFI`* zd8RMGAUtX1KWfPV;q<-5JAykS+2sY$2~UX+4461a(%{P#{rwFPu0xpIuYlbgD{C7C z=U{FUarVTYX6ZUq3wE@G^QT4H2Re;n$Fz9cJ>hABl)9T8pozqbA1)H-%1=WKm^QMu zjnUZ&Pu>q+X&6Co*y#@pxc-4waKMInEPGmE_>3@Ym3S*dedSradmc5mlJn`i0vMW6 zhBnGQD^Z;&S0lnS0curqDO@({J7kTtRE+Ra?nl^HP9<)W&C>~`!258f$XDbyQOQXG zP8hhySnarOpgu8xv8@WlXnm(Uk~)_3$Sg0vTbU3 z{W!5B(L3{Yy3K5PN<@jEarAtja`}@KYva&zFRF*s+_%jIXh$T(S=an8?=Ry3H*NRqWgsM`&!#|@kf1>=4q%bFw7^Rhz!z5I zyI^zU8_R1WN9`88Z=n>pIZQ`Ixr~_9G%Q}@A7rd#*%y7G zXl^Id=^ZL?Rx}}gWXCqzj9C6;x(~mAH|$JteXa1MH<6UQig@!Hf~t}B%tP0I|H&;y zO6N0}svOa1a^PyP9N5?4W6VF%=Bj{qHUgc8@siw4bafT=UPFSoQqKgyUX>sXTBZ=x zOh^Ad!{kOM9v{%5y}`-8u*T&C7Vq6mD%GR}UeU(*epO&qgC-CkD;%=l)ZuinSzHM` z{@`j&_vC6dDe{Yb9k@1zeV_K6!l(@=6ucoI=R^cH=6{i71%4W3$J-?<8Qn#$-DMtA z6Qqi)t?4ifrt%3jSA#6ji#{f(($KBL-iQh-xrC||3U3lq`9>r)>X%oLvtimuHW-)} zy}>9~|M>w4eES`g7;iBM%Se5-OP%1U6gNWp3AZqT8C6OlFFfQ$|7LL;tBV)(qlp4K zruar^K8FnJN3@_}B;G`a~H`t|3+6d>q3#`ctTkE-D^1#d9NalQ04lH*qUW2!V zhk7#z8OwHhSl8w14;KctfO8ubZJ4$dEdpXE78wABz=n5*=q9ex3S}`e7x~~V-jmHOhtX2*n+pBslo3uosdE7xABK=V#-t{1Hd~?i z{i~%Bw6NYF+F$aK$M`r#xe=NxhA5=p%i7!$);sd>Q}#`G?Q~fygrMXmZw?0#5#17W}6Tj+&kFexG{!mYl5FoA99}3G9l;3lVQ^ z48^~gsVppE*x91WheqI(A%F0Z#$#1UJP1R12Mj9r)y(A?a+iquX+d8WD4WAQJ_!oq z9rTISr7bPd(GTP57xm$}C}&kjMivi;zi^Y9g3&X0A;ovdJ?{%_wHgt%%9P&N4H z^XzV(uNA4 zAP`hgP6BEN5`YXh|DF~6Pud?~gWfhUKoPX4>z|}0aocC&K+AoV%|SX*N!wGq3|y< zg4lP(04XIPmt6}$N!dTk+pZv>u;MTB{L4hp9uXk7>aS!6jqM2lVr%{)H3$O127TSZ z0x9hi0k-P?nWFdQ0K`pykqUIT&jD~B0tHP{ffS(}fZ(aW$oBWTSfHO!A^><6vA?qar%tzN-5NQO zL&|F{nGiQyzNJ+bM$Y`n=Lx^3wTG^o2bGB@cwr1eb+6c-1tN=U+Db;bc~eJ!hwM{SbI=#g?$!PjDB+) zPgU_2EIxocr*EOJG52-~!gml&|D|C2OQ3Y(zAhL}iae4-Ut0F*!z!VEdfw8#`LAi# zhJ_EM*~;S|FMV6y%-SduHjPOI3cFM(GpH|HES<}*=vqY+64%dJYc|k?n6Br7)D#~# zEqO(xepfaf2F{>{E2`xb=AO%A<7RtUq6kU_Iu0m?@0K(+<}u3gVw5fy=Y4CC*{IE3 zLP3YBJ7x+U(os5=&NT%gKi23bbaZ`@;%ln)wp4GpDUT$J8NtFDHJzIe_-t}{!HAsh zJ4<^WovY};)9IKAskSebdQiXv$y5}THuJZ}ouoElIZRui=6lrupV|_Jz=9^&;@HwL;J#@23k?A;k`0Bgf;ioO>W`IQ+4? z7A)eKoY4%+g%=w;=Vm8}H>@U*=*AWNtPqgWRqib#5RTGA@Q=43FrQn3J`GkTUV5yp0U`EOTqjfp+-9;0F8!dMEwwcK%(6`8sDD^aR04 zd6O5vh|Xk?&3dy4f|1QK&Ulf{h6Iq;d-&*ti#Ck>wZFG;GHwc?b;X~eBITx49>2d8 z4HcK&1&DvEGT6kXdzAm4oO8%c}8OBt~8H956_;YP-ss*uMf==a+%w~F>Qkm7r)IAuxuoX}h92$gHqbFUun#8m zWHdy`Zrm#=Pa98x8cO0vd@Tgkr*lm0{dky+Gocr0P8y%HGEI#c3qLqIRc`Oq_C%*; zG+QTr(#Q|yHKv6R@!DmLlwJQ3FAB)Yor-I4zyDyqM4yp5n2TrQH>gRt*Zw0+WI-Sj`EgmYHh=t9! zF6lz^xpqGGpo6!5`sc0a^FVhy_Uxq|@~(1@IIzV)nTpY9sY`CV!?8e&bB8=M&sYEb z2i}fvKdhp9Hs68Y-!QJ<=wE(iQ5+49tqt;Rh|jhYrI5VW-mIz|UY{h8E=rC5sh#DU z?wGgk-Tn!I?+Zer7pHlF_Z^!Kd1qkS3&lv#%s6-<5Y%jQL${cge5=G5Ab?D&|9$Y~ zf%rJC2+=2vg;y0-SJb3<@3%}BO$T$C66q$L_H33a`VUbgW~N(4B=v5(<=My|#|J7q z*Ox4wL4kbJd_~EjLTABSu4U7Jk#`y(6O*U6(k6XxM}CtGZB(H@3~kh*zaGRXM}Iwp zQ%xFk2>@wiZrVCV_G4G~v;NebCQ%T7{SDyPpSv&dT@Cn)Mx@IK*IdNrj{*4pkV4wv z)y0J538h>cpB7iPSzA~x24T`{dzNkpvGIqvt1Dvdq@o-`B=$hkczX8$yFMhsWNK-X zxr$kR$tMD0@W)Vxe1^t9qVmsg&K^F@u84)(n2dttIEAZFN6VD$&tskpG%SI7whGL3 z)DeRiwe&?8m7U{G`oW8!SCi*dM>oYL%UKQnKxV_0RXAEBQg1kStExGEUVwLJ0orGGwb7uv+kPDl7_E2*iD|J*=8A@;XCvwq0aw5oJYN*Yh&o=l} z2z8YKb-fIAH5spql4eXqp*)o2*b>#1@DSt?zZi{GPj0gH&Nm+EI<3^z0w%YTEV4xw zI6$+=Faa|Y4o5i0zm5lOg|&tmnJ806DBovU@Ll6XsA;NRrTK~t*AAJIAS=v-UZ%Pr z$oddI@NRir&erzCwq|)ciJemr-E061j{0Vc@Ys7K(mW|JYj*$+i1Q8XlIK8T?TYS(AXu$`2U zQ@fHxc=AVHl_}cRZQ)w0anMEoqRKKIvS^`<-aMf*FM`NsG&Uowneo+Ji$7DUDYc7*Hjg;-&aHM%3 zXO6cz$$G};Uqh+iY7Wpme>PHG4cu(q;xyskNLs$^uRRMfEg?8Cj~aE-ajM%CXkx0F z>C?g3tIA#9sBQOpe`J+04{q7^TqhFk^F1jFtk4JDRO*`d-fx`GYHb=&(JiaM1b?Y^ zO3Kj3sj76ieol|N$;>j@t#tKj=@*gP+mv}KwlTcPYgR$+)2(gk)2JNE=jSauPq!$< z<|?Sb%W)wS)b>b6i{8!x!^!xIdU3{CJFVnTcw0j{M%DUCF=_>eYYEUWnA-|B(+KYL z_W_`JI&&u^@t0})@DH^1LDuT0s3dMpCHIbYBgOT4Zh_4yHbSqRbtIKndeT4Q*Jg91 z@>rO!^t-G~*AIW;FQ$3J=b;oGg8?CTa~qNCb>&cgp@e;?0AqA&paz~(%PYO+QBo4( zp?}ZdSMWx0iJm7HVNk9A#^9Osa#GPJ!_pYEW}($8>&2}fbr@&ygZ?${A7_9?X$(&5 z#~-hxdPQwCNEpf=^+WH-3`2LxrrBMTa}~qJC9S;VzhG!On^JLyW6WkF{8aAE$sM+( zxr8xLW(KIjI`Rm(24r3OJBk<3GF=G!uSP0-G&AY32mLm8q=#Xom&Pqv=1C{d3>1^ zAjsmV@XZ%BKq^eUfBpa8KvO8ob|F3hAjJv*yo2Bhl0)KUus{qA9m8jf)KnOGGTa6~4>3@J_VzkL|vYPl*uL+Ot*Q7W!f5rJw5+AsjP_IfL+-S*2p| zB7!FhjvkUTxQkGWGSg{X;h~dK>gAJivW?88Nu!3o>ySDaABn$rAYt086#27fbjPQS zhq>55ASvm*60qRdVOY9=bU^+{Pi#!OaZwENN;zy5?EztOHK-Q5;rCuiFl}BSc1YaQ zC-S{=KsGDz@Ji9O5W;XxE0xI|@3o6(2~i4b8Ii9VT;^G$*dRw(V?=br)D&q^XkeBX z+gl~+R@rVD-Hwv@7RHV?Bip5KMI)aV^&snt?H<$Nt=OPx#VxF&BGi?2A2+lNOYywNUGMeGL;|(=UjGDtLG0sN&LpGx;|U;xa13s z;W_|SPk^G}!M9_^pO zA3bt3-tca%^42sHeDtfcC0S3w3H1ny!Bxpa=*k?XRPpx9Bb-gx1J9Yvx)4J(8cG+q z(iCPZ9dsf3#QVyZgD_MW#G#qgV)olu$59&3(PzQfw@%4uZ~<5J=ABvdY43(Qnp{;G zHg3>@T#>DbTuhFl3)fb3TFqdh)V2aq7!;&JOHseTWukvA7}(iGUq;v-{2J0iHSNHq z;+)h!p6Ok^+Sp8-jgL($n6Qu47xyE`cFO5SdZR6;R!FET`tm#0D37z339Suxjpv+s z*=%2-N$N?X&0?x_uut3erF@aBGj;9$k9?3FlbDO{RQa1_qtxrh4!4#fjp4x~akvdTp@ zos?^Q&XE;3N93s4rHQGPrV7+au1$$aB6$hLy*Yz_kN$~dweb9PcB!eYVQTGjFuJP> zZCEwBtb>TIgIO^qAzq@Bv-qud_ZD-2W<_at&ml-gv`tPt$@DF5`HlA zM>DmmMkpv&Zm-8)Y#0bLQf4MpD4_-7M8eu6rh(tL8dq8onHs#R9J~dGd2IaXXMC~h z91pKhnQa%Fsn29nAA1;x(%oC zhca~qQDJaMf?wFrl-Pj;e$bZMYmMF!Y3Lv&Sb?Sjn#!NVx&NDyc^$b4uYyo2OmERa zRz;yDGd@JTykzFLe|Wk-y7#3x`6$wt$zR8r48mdUvfbeL+4D|Z``~7$PrE@qc7rZe zVsIoIbCwzjLZ@_M1*bD{HaYn();Z1-q*-I{tEnTZ(}Zmk&%MXSNBX>o| z-u*RNkAyKC-Srp7c-=@5f)xMWg>o2WWl}j6j9=8+D8;T z>0*0q#;qw8%U8i;6s0fu#I*%(g*@@a2Er@@nyI}{=@W{Z-;`=wN4N~>6Xrh&z#g}l zN1g5}0-#(nHUTv_rl2{yUZ;h#t&Fd?tY!7L%ClY)>uH-Ny2ET$lW$S)IQiN79H)D^ zb&0AXYkupy0~w8)*>Sj_p9}4L?lGTq%VG|2p`nWGhnM^!g|j-|O{%9Q%swOq63|*W zw$(N_laI}`ilB+o!a-wl?er~;;3+)$_akSQ!8YO_&-e*SI7n^(QQ;X0ZE`{4f!gAl z5$d+9CKVNonM!NO_frREICIAxOv)wm>}-k?iRisM`R7;=lyo|E_YR~FpS&PS`Lg0f zl-ON<0S%Uix8J%#yZdkCz4YNhcec<|7*P(JsM#>-L>+tYg_71q9~70FAc^6KW5jql zw!crdgVLH1G_eET=|SEc977;)ezVC|{PJZfra|}@rD;0s&@61mTEBJtILllg{%{vN zfhb&lq0yChaLhnJ-Qb62MB7`>M;|_ceHKZAeeh@#8tbrK!ArP6oXIhMK;dhEJTY`@ z0Tq>MIe0`7tGv)N*F0IGYSJv0vN?Az8g+4K9S!pW2~9F4W(_U_T=jCZrzuZ3*|__T zONp_UWmyePv8C~rckc?Xji;Z5OEqg zC*Um)i;Wh4TEwqReQdVVbUKT^2>Tpi6z_^-uF*adUFug4i@JhzpWT^Sk&E>CyP2?H zWf6x}ehuTs6wvzCnTU&gYzT029Nz19(In1WC z`(1IGmi!O%2AR|BjQa4Q0~u)kM%}?xQyjWuQ16^Gp++;`vr7!k--UZWM*~7Zl|ceO@I3`OpaRhD;YoCuo5IC0uHx>9 z478hu@H|e0Zlo)Zj@01#;8BDs@991xe~^9uG2}UXLM(m7fa}AMwX*tjioBeV&Q8Gx zSq$6wZFkRBK`cMI>R(@W@+lo2t)L+4q-negWRLWZBz*|%=W4v62JrmzNuOtA*x)QE z5L%=OH#@KMdB%Jp^r?0tE}5-*6oP`-lO7Sf)0)n*e<{HA=&qhLR)oD8-+V}Z4=md) z+k9lKf64DB2hAT)UaCP~di?-V3~JBH7itYyk~L6hrnxM%?RKntqd`=!b|e7eFnAcu z3*V;g{xr7TSTm$}DY%~SMpl>m{Sj!We+WfxSEor?YeiAxYUy25pn(?T()E>ByP^c@ zipwvWrhIK((R((VU+;@LmOnDu)ZXB3YArzzin!Z^0;PyJWnlfflo|q8(QY;o1*5CO z##hnkO{uynTMdk`~DOC#1 zdiYxQoy}=@7(ke#A8$YZZVtk4wo$8x28&I;cY3Ro-|kW=*yiiHgCLZeAr)UtVx>Tu z|LvL0hq|1-jC0I4x#>&QZCfrVB=zT!nR|~Uz`9%~2 znl{uZ{VEszW`Fad^q_HB!K9*|U-stK%?~;g?&&+12A}Rq$z($Bzuk^2X(Y=hF?-dQ ztc3DsQKI;qhWIV`99Q#R3xnU0AvY!i*BECj-z9l74|%O=V@nlv|qqC^r^-~C?E zGW%c|uYgnfJ(gjsTm_cIqcv*mYM{+i+&@F@+69ZQOK&u#v4oxUSQJ=tvqQ3W=*m;| z>SkBi8LYb-qRY7Sthh*0%3XAC%$z1rhOJzuX=PkTOa=DlocZUpE#KxVNH5)_4n=T( zGi3YrH7e~sPNYVBd~Grcq#CF~rN{p9Zza-Ntnwfma@TB)=3g36*0lSZg#ixEjFe%+ zX=&LDZ5zqculZ`=RYc^ln(~;nN|Qh6gN=!6f9-N2h+3NWbIxYud&;4SX*tWf5slk4 z{q@@l71UAZgj~*6edXb57fBUxvAS7s(RI=X868JM0+^DCn2yC>;v%S;qPOjB>YVsz(Zx9a>>BK&M zIQK>7_n)4ud0X5YM}^i*keH{ehLsiy9@NvOpsFeQjdI6anLGvVbBw_*fU1TzdVS$i z*4j7z!I5RF#rSz|8ibi$;qE{4`aqWYik7QB5U&F5C*;TO_x+gtzPGpzNt!7~nsBT7)Ckc(K~%uv&{{6A`mmBJVAk-{s~52Vu|HbCH7_W1~ZCX^RflOakGg=jo2Z z<*s;5-J+2@^LRDZ-7EV&Pq+FTErw@pfFqvx^i%E7Fx#^n(E`m2(c>K-O5`M`Yek9el zzTGs5qD6*G;y#~xu3>qWuO?-amKYtvRA}I9z#UspEeM;wOERYeot_n_EUMJf$4_u?E!6X~?q)tPoZb^_;8Y_Ox2h1m<+Le-fsRd|T8db<8#$bqez zua^Z|>h%zdnuU^ww$#-dZ9NTM`FN+!IlLkz*FqWb!x^Z|C{KyGjZ+>G;;7Mb@LY|H zc+Gp`L((Dw7pnDlHNm&;SfHedhx*kad$I^uGz{`0BYelq0yEUHpNKSkvj$|dpvY3{7*YGyhXA^LP0&wOw9oNoC=QoVx1<2Dne8qqZL zm>nFh5DX(-RnQwvHCZQwn^#Z=E!SPVlaRJ78Bo@}!!9dRt^qZy?-*`Pt4WSmgucJv zV1yFkcjlEM^uz-;b#Q7ZCP@Lk)m}uPX={R4B=56k7WNh11BN~0T*vr@!!ow^B0hOR zQ)4)&(e%>bNNL%bm<&8H{*l_L7s0$2GUgX2Vd;=4d9Dm2v3TaL+;L>{K7h7 zV#k?xDPm(NDE31$ z<}|X)pEY6myjK+^gaIMk&Yj2~F0rSKemNqlsVm4c|N7mp_C*L01s;GNx#D-*&gk!qQr}^?_r@q!8fuXw!)fA7xkd} zb>vHvdx~H$5qqAWrow7}+8zBM65-JOt5z za=T6f7MK`XJuQog8kIEboPdhcaVJeHy)5z7EBLK5NRr()E|#K0L0N^JD@pUA^Czb` zbUZ_558y+vqAGeyHCbrvOvLD67Ph}06959VzQ_|>RrXQAqE+AQ(-AaKdxoWaF8hdt z{O3W@b^*o#-f1VuU>YMV03ELF7zkCN4Q&b#prz%3Nne0lSbRo@@ z^ihv%oIl~Qyl6Q;a#$*jOC%x0_;eis*)J7=f@Ct*)xF5 zo}u~@-I}2|$b%5L7>@+Z?4o+1r&v6ceIy+vroK&jCQ<4q&45HP2wCol4hVm3pZtjf zHz1D7oyaSKJ~T{Gx}7ONLA)D5k(%%`WswrDyzX*rn}i}}TB4^y#@mAwPzoC)`?rYv zHgx|trUN#mu*VzUV~8TnJM2Qh*ZM5B{x&y>5An`(M7=Z*Q>TdiH@j*2=moNuOtvpz z+G`@~-`%~+AgPKgke@XiRPgndh@bp*-HRsh;HTtz@-y_uhb%7ylVOTqG0#u?Vn5c5 zEp*XRo|8hcgG^$#{$O9CJ&NE;TrfRpSnLmes&MO{m=N%zc`}gb!eQ7odl$oy1%PI} z#AIxx%oRVy&{O~9xnK4$EY>(eQj}!HKIV$Fz*H=-=Kn)N0D6u`(;iO|VraI4fu_W` z;b5{7;Lyx4za}DU#+U7}=H0dAS#YJJ&g2!P@Htu-AL&w=-)*%P9h2{wR|@?Ff9~)b z^+e_3Hetq7W%ls{!?<6&Y$Z;NNB41pvrv)|MET6AZXFXJeFqbFW5@i5WGzl?bP+~? z*&_puH;wKv2)9T_d+P`bLvJFqX#j&xa*-;0nGBbQf0DC>o~=J_Wmtf*2SZQr?{i~X z9-IbRH8{iy?<0v9Ir1?$66+igy|yDQ5J~A9sFX@Pe<*kCY8+MwH?I z`P}zfQ6l^AO8ehZ=l^ZR;R%uu4;BK*=?W9t|0{+-at(MQZ(CtG=EJFNaFMlKCMXu30(gJUqj5+ z`GM|!keqcj;FKTa_qq;{*dHRXAq157hlB@kL#8%yAm2AgfU|*rDKX@FLlp=HL8ddv zAWLCHe@DcDeB2}fl7#=0+#<05c3=VqM*O3bkr@9X4nO|)q0hU;Gye{L8ZN*NH8Id@mP-u;Fmb8YuorjLrW&ndip8CN%_qp982r w1WEnz9^$&s1hkp_3#lPJQ~!HI7WYYjA7>z!`?f%npAh2%rB@vD|Lau$2O)#1n*aa+ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 7cec6af44e192..d5190930b2f32 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=c9490e938b221daf0094982288e4038deed954a3f12fb54cbf270ddf4e37d879 +distributionSha256Sum=cd5c2958a107ee7f0722004a12d0f8559b4564c34daad7df06cffd4d12a426d0 diff --git a/plugins/examples/gradle/wrapper/gradle-wrapper.properties b/plugins/examples/gradle/wrapper/gradle-wrapper.properties index 7cec6af44e192..d5190930b2f32 100644 --- a/plugins/examples/gradle/wrapper/gradle-wrapper.properties +++ b/plugins/examples/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=c9490e938b221daf0094982288e4038deed954a3f12fb54cbf270ddf4e37d879 +distributionSha256Sum=cd5c2958a107ee7f0722004a12d0f8559b4564c34daad7df06cffd4d12a426d0 From 5a3bd0212b691c264f0afe8009dabe695f5b3ad1 Mon Sep 17 00:00:00 2001 From: Howard Date: Mon, 14 Feb 2022 19:10:15 +0800 Subject: [PATCH 081/167] [DOCS] Fix typo in PersistentCache (#83836) --- .../xpack/searchablesnapshots/cache/full/PersistentCache.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java index ec3948706c1be..5f735e4c63e05 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java @@ -353,7 +353,7 @@ private static List createWriters(NodeEnvironment nodeEnvironm } /** - * Creates a new {@link CacheIndexWriter} for the specified data path. The is a single instance per data path. + * Creates a new {@link CacheIndexWriter} for the specified data path. There is a single instance per data path. * * @param nodePath the data path * @return a new {@link CacheIndexWriter} instance From f033f5879e76c00f2d8a30d42f33a931e3ff6f66 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Mon, 14 Feb 2022 16:15:31 +0400 Subject: [PATCH 082/167] Fix path to cacert in check-running.asciidoc (#83888) Co-authored-by: Arbkz --- docs/reference/setup/install/check-running.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/setup/install/check-running.asciidoc b/docs/reference/setup/install/check-running.asciidoc index 7b4c176a27fd6..8dae6f2927b54 100644 --- a/docs/reference/setup/install/check-running.asciidoc +++ b/docs/reference/setup/install/check-running.asciidoc @@ -5,7 +5,7 @@ You can test that your {es} node is running by sending an HTTPS request to port ["source","sh",subs="attributes"] ---- -curl --cacert {os-dir}{slash}certs{slash}http_ca.crt -u elastic https://localhost:9200 <1> +curl --cacert {os-dir}{slash}config{slash}certs{slash}http_ca.crt -u elastic https://localhost:9200 <1> ---- // NOTCONSOLE <1> Ensure that you use `https` in your call, or the request will fail. From c18022b48e412ede96e29a218aaa446e716de48e Mon Sep 17 00:00:00 2001 From: weizijun Date: Mon, 14 Feb 2022 20:21:06 +0800 Subject: [PATCH 083/167] Cleanup redundant if check (#83774) Since getWriteIndex is implemented in DataStream class, so the type is alway data_stream, I think the check code can be removed. --- .../org/elasticsearch/cluster/metadata/IndexAbstraction.java | 4 ---- 1 file changed, 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java index b974b774b22e7..4f8083f1506b4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java @@ -371,10 +371,6 @@ public Index getWriteIndex(IndexRequest request, Metadata metadata) { return getWriteIndex(); } - if (getType() != IndexAbstraction.Type.DATA_STREAM) { - return getWriteIndex(); - } - if (dataStream.getIndexMode() != IndexMode.TIME_SERIES) { return getWriteIndex(); } From aa8da2f35accf446fb96b71b3906683a16beccce Mon Sep 17 00:00:00 2001 From: David Kyle Date: Mon, 14 Feb 2022 14:26:42 +0000 Subject: [PATCH 084/167] [ML] Remove uses of ML HLRC classes (#83885) Removes all references to the ML HLRC classes in preparation for removal of those classes. Mostly this means converting tests that use the HLRC to the low level client. --- .../ml/job/results/AnomalyCauseTests.java | 2 +- .../ml/job/results/AnomalyRecordTests.java | 2 +- .../utils/NamedXContentObjectHelperTests.java | 2 - .../xpack/deprecation/MlDeprecationIT.java | 82 +++-- .../ml/integration/InferenceIngestIT.java | 54 ++- .../xpack/ml/integration/TrainedModelIT.java | 159 +++++---- .../upgrades/MlJobSnapshotUpgradeIT.java | 322 +++++++++--------- .../upgrades/MlMappingsUpgradeIT.java | 29 +- 8 files changed, 336 insertions(+), 316 deletions(-) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyCauseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyCauseTests.java index 41b4be1276783..a99945d2361d6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyCauseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyCauseTests.java @@ -6,11 +6,11 @@ */ package org.elasticsearch.xpack.core.ml.job.results; -import org.elasticsearch.client.ml.job.config.DetectorFunction; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.ml.job.config.DetectorFunction; import java.io.IOException; import java.util.ArrayList; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java index 44d8898c2cd34..8ee11741325e8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecordTests.java @@ -6,7 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.job.results; -import org.elasticsearch.client.ml.job.config.DetectorFunction; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentHelper; @@ -16,6 +15,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.ml.MachineLearningField; +import org.elasticsearch.xpack.core.ml.job.config.DetectorFunction; import org.elasticsearch.xpack.core.ml.utils.MlStrings; import java.io.IOException; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObjectHelperTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObjectHelperTests.java index 33e692bd3c723..d40f39fb5efe3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObjectHelperTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/NamedXContentObjectHelperTests.java @@ -6,8 +6,6 @@ */ package org.elasticsearch.xpack.core.ml.utils; -import org.elasticsearch.client.ml.inference.NamedXContentObject; -import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchModule; diff --git a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java index 3aada49d33ac5..2e58bb8f673b5 100644 --- a/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java +++ b/x-pack/plugin/deprecation/qa/rest/src/javaRestTest/java/org/elasticsearch/xpack/deprecation/MlDeprecationIT.java @@ -7,28 +7,17 @@ package org.elasticsearch.xpack.deprecation; -import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.WarningsHandler; -import org.elasticsearch.client.ml.PutJobRequest; -import org.elasticsearch.client.ml.job.config.AnalysisConfig; -import org.elasticsearch.client.ml.job.config.DataDescription; -import org.elasticsearch.client.ml.job.config.Detector; -import org.elasticsearch.client.ml.job.config.Job; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.XContentType; import org.junit.After; import java.io.IOException; -import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; @@ -44,12 +33,6 @@ public class MlDeprecationIT extends ESRestTestCase { .setWarningsHandler(WarningsHandler.PERMISSIVE) .build(); - private static class HLRC extends RestHighLevelClient { - HLRC(RestClient restClient) { - super(restClient, RestClient::close, new ArrayList<>()); - } - } - @After public void resetFeatures() throws IOException { Response response = adminClient().performRequest(new Request("POST", "/_features/_reset")); @@ -69,32 +52,21 @@ protected boolean enableWarningsCheck() { @SuppressWarnings("unchecked") public void testMlDeprecationChecks() throws Exception { - HLRC hlrc = new HLRC(client()); String jobId = "deprecation_check_job"; - hlrc.machineLearning() - .putJob( - new PutJobRequest( - Job.builder(jobId) - .setAnalysisConfig( - AnalysisConfig.builder(Collections.singletonList(Detector.builder().setFunction("count").build())) - ) - .setDataDescription(new DataDescription.Builder().setTimeField("time")) - .build() - ), - REQUEST_OPTIONS - ); - - IndexRequest indexRequest = new IndexRequest(".ml-anomalies-.write-" + jobId).id(jobId + "_model_snapshot_1") - .source("{\"job_id\":\"deprecation_check_job\",\"snapshot_id\":\"1\", \"snapshot_doc_count\":1}", XContentType.JSON); - hlrc.index(indexRequest, REQUEST_OPTIONS); - - indexRequest = new IndexRequest(".ml-anomalies-.write-" + jobId).id(jobId + "_model_snapshot_2") - .source( - "{\"job_id\":\"deprecation_check_job\",\"snapshot_id\":\"2\",\"snapshot_doc_count\":1,\"min_version\":\"8.0.0\"}", - XContentType.JSON - ); - hlrc.index(indexRequest, REQUEST_OPTIONS); - hlrc.indices().refresh(new RefreshRequest(".ml-anomalies-*"), REQUEST_OPTIONS); + buildAndPutJob(jobId); + + indexDoc( + ".ml-anomalies-.write-" + jobId, + jobId + "_model_snapshot_1", + "{\"job_id\":\"deprecation_check_job\",\"snapshot_id\":\"1\", \"snapshot_doc_count\":1}" + ); + + indexDoc( + ".ml-anomalies-.write-" + jobId, + jobId + "_model_snapshot_2", + "{\"job_id\":\"deprecation_check_job\",\"snapshot_id\":\"2\",\"snapshot_doc_count\":1,\"min_version\":\"8.0.0\"}" + ); + client().performRequest(new Request("POST", "/.ml-anomalies-*/_refresh")); // specify an index so that deprecation checks don't run against any accidentally existing indices Request getDeprecations = new Request("GET", "/does-not-exist-*/_migration/deprecations"); @@ -108,4 +80,30 @@ public void testMlDeprecationChecks() throws Exception { assertThat(mlSettingsDeprecations.get(0).get("_meta"), equalTo(Map.of("job_id", jobId, "snapshot_id", "1"))); } + private Response buildAndPutJob(String jobId) throws Exception { + String jobConfig = """ + { + "analysis_config" : { + "bucket_span": "3600s", + "detectors" :[{"function":"count"}] + }, + "data_description" : { + "time_field":"time", + "time_format":"yyyy-MM-dd HH:mm:ssX" + } + }"""; + + Request request = new Request("PUT", "/_ml/anomaly_detectors/" + jobId); + request.setOptions(REQUEST_OPTIONS); + request.setJsonEntity(jobConfig); + return client().performRequest(request); + } + + private Response indexDoc(String index, String docId, String source) throws IOException { + Request request = new Request("PUT", "/" + index + "/_doc/" + docId); + request.setOptions(REQUEST_OPTIONS); + request.setJsonEntity(source); + return client().performRequest(request); + } + } diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIngestIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIngestIT.java index a7ac49d564130..f3d46d2ea1f61 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIngestIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/InferenceIngestIT.java @@ -10,12 +10,11 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.client.ml.GetTrainedModelsStatsResponse; -import org.elasticsearch.client.ml.inference.TrainedModelStats; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ExternalTestCluster; @@ -24,9 +23,7 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.inference.InferenceDefinitionTests; import org.elasticsearch.xpack.core.ml.integration.MlRestTestStateCleaner; @@ -36,6 +33,7 @@ import java.io.IOException; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; @@ -124,8 +122,8 @@ public void testPathologicalPipelineCreationAndDeletion() throws Exception { assertThat(EntityUtils.toString(searchResponse.getEntity()), containsString("\"value\":10")); assertBusy(() -> { try { - assertStatsWithCacheMisses(classificationModelId, 10L); - assertStatsWithCacheMisses(regressionModelId, 10L); + assertStatsWithCacheMisses(classificationModelId, 10); + assertStatsWithCacheMisses(regressionModelId, 10); } catch (ResponseException ex) { // this could just mean shard failures. fail(ex.getMessage()); @@ -176,8 +174,8 @@ public void testPipelineIngest() throws Exception { assertBusy(() -> { try { - assertStatsWithCacheMisses(classificationModelId, 10L); - assertStatsWithCacheMisses(regressionModelId, 15L); + assertStatsWithCacheMisses(classificationModelId, 10); + assertStatsWithCacheMisses(regressionModelId, 15); } catch (ResponseException ex) { // this could just mean shard failures. fail(ex.getMessage()); @@ -185,6 +183,7 @@ public void testPipelineIngest() throws Exception { }, 30, TimeUnit.SECONDS); } + @SuppressWarnings("unchecked") public void testPipelineIngestWithModelAliases() throws Exception { String regressionModelId = "test_regression_1"; putModel(regressionModelId, REGRESSION_CONFIG); @@ -255,17 +254,13 @@ public void testPipelineIngestWithModelAliases() throws Exception { assertThat(EntityUtils.toString(searchResponse.getEntity()), not(containsString("\"value\":0"))); assertBusy(() -> { - try ( - XContentParser parser = createParser( - JsonXContent.jsonXContent, - client().performRequest(new Request("GET", "_ml/trained_models/" + modelAlias + "/_stats")).getEntity().getContent() - ) - ) { - GetTrainedModelsStatsResponse response = GetTrainedModelsStatsResponse.fromXContent(parser); - assertThat(response.toString(), response.getTrainedModelStats(), hasSize(1)); - TrainedModelStats trainedModelStats = response.getTrainedModelStats().get(0); - assertThat(trainedModelStats.getModelId(), equalTo(regressionModelId2)); - assertThat(trainedModelStats.getInferenceStats(), is(notNullValue())); + try { + Response response = client().performRequest(new Request("GET", "_ml/trained_models/" + modelAlias + "/_stats")); + var responseMap = entityAsMap(response); + assertThat((List) responseMap.get("trained_model_stats"), hasSize(1)); + var stats = ((List>) responseMap.get("trained_model_stats")).get(0); + assertThat(stats.get("model_id"), equalTo(regressionModelId2)); + assertThat(stats.get("inference_stats"), is(notNullValue())); } catch (ResponseException ex) { // this could just mean shard failures. fail(ex.getMessage()); @@ -273,16 +268,19 @@ public void testPipelineIngestWithModelAliases() throws Exception { }); } - public void assertStatsWithCacheMisses(String modelId, long inferenceCount) throws IOException { + @SuppressWarnings("unchecked") + public void assertStatsWithCacheMisses(String modelId, int inferenceCount) throws IOException { Response statsResponse = client().performRequest(new Request("GET", "_ml/trained_models/" + modelId + "/_stats")); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, statsResponse.getEntity().getContent())) { - GetTrainedModelsStatsResponse response = GetTrainedModelsStatsResponse.fromXContent(parser); - assertThat(response.getTrainedModelStats(), hasSize(1)); - TrainedModelStats trainedModelStats = response.getTrainedModelStats().get(0); - assertThat(trainedModelStats.getInferenceStats(), is(notNullValue())); - assertThat(trainedModelStats.getInferenceStats().getInferenceCount(), equalTo(inferenceCount)); - assertThat(trainedModelStats.getInferenceStats().getCacheMissCount(), greaterThan(0L)); - } + var responseMap = entityAsMap(statsResponse); + assertThat((List) responseMap.get("trained_model_stats"), hasSize(1)); + var stats = ((List>) responseMap.get("trained_model_stats")).get(0); + assertThat(stats.get("inference_stats"), is(notNullValue())); + assertThat( + stats.toString(), + (Integer) XContentMapValues.extractValue("inference_stats.inference_count", stats), + equalTo(inferenceCount) + ); + assertThat(stats.toString(), (Integer) XContentMapValues.extractValue("inference_stats.cache_miss_count", stats), greaterThan(0)); } public void testSimulate() throws IOException { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelIT.java index 3072504be5399..37e78c850b2c6 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/TrainedModelIT.java @@ -10,25 +10,12 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider; -import org.elasticsearch.client.ml.inference.TrainedModelConfig; -import org.elasticsearch.client.ml.inference.TrainedModelDefinition; -import org.elasticsearch.client.ml.inference.TrainedModelInput; -import org.elasticsearch.client.ml.inference.trainedmodel.RegressionConfig; -import org.elasticsearch.client.ml.inference.trainedmodel.TargetType; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.Ensemble; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.WeightedSum; -import org.elasticsearch.client.ml.inference.trainedmodel.tree.Tree; -import org.elasticsearch.client.ml.inference.trainedmodel.tree.TreeNode; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; @@ -41,8 +28,6 @@ import org.junit.After; import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Map; @@ -70,11 +55,6 @@ protected Settings restClientSettings() { return Settings.builder().put(super.restClientSettings()).put(ThreadContext.PREFIX + ".Authorization", BASIC_AUTH_VALUE).build(); } - @Override - protected NamedXContentRegistry xContentRegistry() { - return new NamedXContentRegistry(new MlInferenceNamedXContentProvider().getNamedXContentParsers()); - } - @Override protected boolean preserveTemplatesUponCompletion() { return true; @@ -268,56 +248,95 @@ public void testExportImportModel() throws IOException { } private void putRegressionModel(String modelId) throws IOException { - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - TrainedModelDefinition.Builder definition = new TrainedModelDefinition.Builder().setPreProcessors(Collections.emptyList()) - .setTrainedModel(buildRegression()); - TrainedModelConfig.builder() - .setDefinition(definition) - .setInferenceConfig(new RegressionConfig()) - .setModelId(modelId) - .setInput(new TrainedModelInput(Arrays.asList("col1", "col2", "col3"))) - .build() - .toXContent(builder, ToXContent.EMPTY_PARAMS); - Request model = new Request("PUT", "_ml/trained_models/" + modelId); - model.setJsonEntity(XContentHelper.convertToJson(BytesReference.bytes(builder), false, XContentType.JSON)); - assertThat(client().performRequest(model).getStatusLine().getStatusCode(), equalTo(200)); - } - } - - private static TrainedModel buildRegression() { - List featureNames = Arrays.asList("field.foo", "field.bar", "animal_cat", "animal_dog"); - Tree tree1 = Tree.builder() - .setFeatureNames(featureNames) - .setNodes( - TreeNode.builder(0).setLeftChild(1).setRightChild(2).setSplitFeature(0).setThreshold(0.5), - TreeNode.builder(1).setLeafValue(Collections.singletonList(0.3)), - TreeNode.builder(2).setThreshold(0.0).setSplitFeature(3).setLeftChild(3).setRightChild(4), - TreeNode.builder(3).setLeafValue(Collections.singletonList(0.1)), - TreeNode.builder(4).setLeafValue(Collections.singletonList(0.2)) - ) - .build(); - Tree tree2 = Tree.builder() - .setFeatureNames(featureNames) - .setNodes( - TreeNode.builder(0).setLeftChild(1).setRightChild(2).setSplitFeature(2).setThreshold(1.0), - TreeNode.builder(1).setLeafValue(Collections.singletonList(1.5)), - TreeNode.builder(2).setLeafValue(Collections.singletonList(0.9)) - ) - .build(); - Tree tree3 = Tree.builder() - .setFeatureNames(featureNames) - .setNodes( - TreeNode.builder(0).setLeftChild(1).setRightChild(2).setSplitFeature(1).setThreshold(0.2), - TreeNode.builder(1).setLeafValue(Collections.singletonList(1.5)), - TreeNode.builder(2).setLeafValue(Collections.singletonList(0.9)) - ) - .build(); - return Ensemble.builder() - .setTargetType(TargetType.REGRESSION) - .setFeatureNames(featureNames) - .setTrainedModels(Arrays.asList(tree1, tree2, tree3)) - .setOutputAggregator(new WeightedSum(Arrays.asList(0.5, 0.5, 0.5))) - .build(); + String modelConfig = """ + { + "definition": { + "trained_model": { + "ensemble": { + "feature_names": ["field.foo", "field.bar", "animal_cat", "animal_dog"], + "trained_models": [{ + "tree": { + "feature_names": ["field.foo", "field.bar", "animal_cat", "animal_dog"], + "tree_structure": [{ + "threshold": 0.5, + "split_feature": 0, + "node_index": 0, + "left_child": 1, + "right_child": 2 + }, { + "node_index": 1, + "leaf_value": [0.3] + }, { + "threshold": 0.0, + "split_feature": 3, + "node_index": 2, + "left_child": 3, + "right_child": 4 + }, { + "node_index": 3, + "leaf_value": [0.1] + }, { + "node_index": 4, + "leaf_value": [0.2] + }] + } + }, { + "tree": { + "feature_names": ["field.foo", "field.bar", "animal_cat", "animal_dog"], + "tree_structure": [{ + "threshold": 1.0, + "split_feature": 2, + "node_index": 0, + "left_child": 1, + "right_child": 2 + }, { + "node_index": 1, + "leaf_value": [1.5] + }, { + "node_index": 2, + "leaf_value": [0.9] + }] + } + }, { + "tree": { + "feature_names": ["field.foo", "field.bar", "animal_cat", "animal_dog"], + "tree_structure": [{ + "threshold": 0.2, + "split_feature": 1, + "node_index": 0, + "left_child": 1, + "right_child": 2 + }, { + "node_index": 1, + "leaf_value": [1.5] + }, { + "node_index": 2, + "leaf_value": [0.9] + }] + } + }], + "aggregate_output": { + "weighted_sum": { + "weights": [0.5, 0.5, 0.5] + } + }, + "target_type": "regression" + } + }, + "preprocessors": [] + }, + "input": { + "field_names": ["col1", "col2", "col3"] + }, + "inference_config": { + "regression": {} + } + } + """; + + Request model = new Request("PUT", "_ml/trained_models/" + modelId); + model.setJsonEntity(modelConfig); + assertThat(client().performRequest(model).getStatusLine().getStatusCode(), equalTo(200)); } public void testStartDeploymentWithInconsistentTotalLengths() throws IOException { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java index e1a1620f37771..73aee56bc6162 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java @@ -8,47 +8,20 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; -import org.elasticsearch.client.MachineLearningClient; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.ml.CloseJobRequest; -import org.elasticsearch.client.ml.CloseJobResponse; -import org.elasticsearch.client.ml.FlushJobRequest; -import org.elasticsearch.client.ml.FlushJobResponse; -import org.elasticsearch.client.ml.GetJobRequest; -import org.elasticsearch.client.ml.GetJobResponse; -import org.elasticsearch.client.ml.GetJobStatsRequest; -import org.elasticsearch.client.ml.GetModelSnapshotsRequest; -import org.elasticsearch.client.ml.GetModelSnapshotsResponse; -import org.elasticsearch.client.ml.OpenJobRequest; -import org.elasticsearch.client.ml.OpenJobResponse; -import org.elasticsearch.client.ml.PostDataRequest; -import org.elasticsearch.client.ml.PostDataResponse; -import org.elasticsearch.client.ml.PutJobRequest; -import org.elasticsearch.client.ml.PutJobResponse; -import org.elasticsearch.client.ml.RevertModelSnapshotRequest; -import org.elasticsearch.client.ml.UpgradeJobModelSnapshotRequest; -import org.elasticsearch.client.ml.job.config.AnalysisConfig; -import org.elasticsearch.client.ml.job.config.DataDescription; -import org.elasticsearch.client.ml.job.config.Detector; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.process.DataCounts; -import org.elasticsearch.client.ml.job.process.ModelSnapshot; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.test.rest.XPackRestTestConstants; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -60,6 +33,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; @@ -68,14 +42,6 @@ public class MlJobSnapshotUpgradeIT extends AbstractUpgradeTestCase { private static final String JOB_ID = "ml-snapshots-upgrade-job"; - private static class HLRC extends RestHighLevelClient { - HLRC(RestClient restClient) { - super(restClient, RestClient::close, new ArrayList<>()); - } - } - - private MachineLearningClient hlrc; - @Override protected Collection templatesToWaitFor() { // We shouldn't wait for ML templates during the upgrade - production won't @@ -95,7 +61,6 @@ protected static void waitForPendingUpgraderTasks() throws Exception { * index mappings when it is assigned to an upgraded node even if no other ML endpoint is called after the upgrade */ public void testSnapshotUpgrader() throws Exception { - hlrc = new HLRC(client()).machineLearning(); Request adjustLoggingLevels = new Request("PUT", "/_cluster/settings"); adjustLoggingLevels.setJsonEntity(""" {"persistent": {"logger.org.elasticsearch.xpack.ml": "trace"}}"""); @@ -125,57 +90,51 @@ public void testSnapshotUpgrader() throws Exception { } } + @SuppressWarnings("unchecked") private void testSnapshotUpgradeFailsOnMixedCluster() throws Exception { - Job job = getJob(JOB_ID).jobs().get(0); - String currentSnapshot = job.getModelSnapshotId(); - GetModelSnapshotsResponse modelSnapshots = getModelSnapshots(job.getId()); - assertThat(modelSnapshots.snapshots(), hasSize(2)); - - ModelSnapshot snapshot = modelSnapshots.snapshots() - .stream() - .filter(s -> s.getSnapshotId().equals(currentSnapshot) == false) + Map jobs = entityAsMap(getJob(JOB_ID)); + + String currentSnapshot = ((List) XContentMapValues.extractValue("jobs.model_snapshot_id", jobs)).get(0); + Response getResponse = getModelSnapshots(JOB_ID); + List> snapshots = (List>) entityAsMap(getResponse).get("model_snapshots"); + assertThat(snapshots, hasSize(2)); + + Map snapshot = snapshots.stream() + .filter(s -> s.get("snapshot_id").equals(currentSnapshot) == false) .findFirst() .orElseThrow(() -> new ElasticsearchException("Not found snapshot other than " + currentSnapshot)); - Exception ex = expectThrows( - Exception.class, - () -> hlrc.upgradeJobSnapshot( - new UpgradeJobModelSnapshotRequest(JOB_ID, snapshot.getSnapshotId(), null, true), - RequestOptions.DEFAULT - ) - ); + Exception ex = expectThrows(Exception.class, () -> upgradeJobSnapshot(JOB_ID, (String) snapshot.get("snapshot_id"), true)); assertThat(ex.getMessage(), containsString("All nodes must be the same version")); } + @SuppressWarnings("unchecked") private void testSnapshotUpgrade() throws Exception { - Job job = getJob(JOB_ID).jobs().get(0); - String currentSnapshot = job.getModelSnapshotId(); + Map jobs = entityAsMap(getJob(JOB_ID)); + String currentSnapshotId = ((List) XContentMapValues.extractValue("jobs.model_snapshot_id", jobs)).get(0); - GetModelSnapshotsResponse modelSnapshots = getModelSnapshots(job.getId()); - assertThat(modelSnapshots.snapshots(), hasSize(2)); - assertThat(modelSnapshots.snapshots().get(0).getMinVersion().major, equalTo(UPGRADE_FROM_VERSION.major)); - assertThat(modelSnapshots.snapshots().get(1).getMinVersion().major, equalTo(UPGRADE_FROM_VERSION.major)); + Response getSnapshotsResponse = getModelSnapshots(JOB_ID); + List> snapshots = (List>) entityAsMap(getSnapshotsResponse).get("model_snapshots"); + assertThat(snapshots, hasSize(2)); + assertThat(Integer.parseInt(snapshots.get(0).get("min_version").toString(), 0, 1, 10), equalTo((int) UPGRADE_FROM_VERSION.major)); + assertThat(Integer.parseInt(snapshots.get(1).get("min_version").toString(), 0, 1, 10), equalTo((int) UPGRADE_FROM_VERSION.major)); - ModelSnapshot snapshot = modelSnapshots.snapshots() - .stream() - .filter(s -> s.getSnapshotId().equals(currentSnapshot) == false) + Map snapshotToUpgrade = snapshots.stream() + .filter(s -> s.get("snapshot_id").equals(currentSnapshotId) == false) .findFirst() - .orElseThrow(() -> new ElasticsearchException("Not found snapshot other than " + currentSnapshot)); + .orElseThrow(() -> new ElasticsearchException("Not found snapshot other than " + currentSnapshotId)); // Don't wait for completion in the initial upgrade call, but instead poll for status // using the stats endpoint - this mimics what the Kibana upgrade assistant does - String snapshotToUpgrade = snapshot.getSnapshotId(); - assertThat( - hlrc.upgradeJobSnapshot(new UpgradeJobModelSnapshotRequest(JOB_ID, snapshotToUpgrade, null, false), RequestOptions.DEFAULT) - .isCompleted(), - is(false) - ); + String snapshotToUpgradeId = (String) snapshotToUpgrade.get("snapshot_id"); + Map upgradeResponse = entityAsMap(upgradeJobSnapshot(JOB_ID, snapshotToUpgradeId, false)); + assertFalse((boolean) upgradeResponse.get("completed")); // Wait for completion by waiting for the persistent task to disappear assertBusy(() -> { try { Response response = client().performRequest( - new Request("GET", "_ml/anomaly_detectors/" + JOB_ID + "/model_snapshots/" + snapshotToUpgrade + "/_upgrade/_stats") + new Request("GET", "_ml/anomaly_detectors/" + JOB_ID + "/model_snapshots/" + snapshotToUpgradeId + "/_upgrade/_stats") ); // Doing this instead of using expectThrows() on the line above means we get better diagnostics if the test fails fail("Upgrade still in progress: " + entityAsMap(response)); @@ -184,96 +143,122 @@ private void testSnapshotUpgrade() throws Exception { } }, 30, TimeUnit.SECONDS); - List snapshots = getModelSnapshots(job.getId(), snapshotToUpgrade).snapshots(); - assertThat(snapshots, hasSize(1)); - snapshot = snapshots.get(0); - assertThat(snapshot.getLatestRecordTimeStamp(), equalTo(snapshots.get(0).getLatestRecordTimeStamp())); + List> upgradedSnapshot = (List>) entityAsMap(getModelSnapshots(JOB_ID, snapshotToUpgradeId)) + .get("model_snapshots"); + assertThat(upgradedSnapshot, hasSize(1)); + assertThat(upgradedSnapshot.get(0).get("latest_record_time_stamp"), equalTo(snapshotToUpgrade.get("latest_record_time_stamp"))); // Does the snapshot still work? + var stats = entityAsMap(getJobStats(JOB_ID)); + List> jobStats = (List>) XContentMapValues.extractValue("jobs", stats); assertThat( - hlrc.getJobStats(new GetJobStatsRequest(JOB_ID), RequestOptions.DEFAULT) - .jobStats() - .get(0) - .getDataCounts() - .getLatestRecordTimeStamp(), - greaterThan(snapshot.getLatestRecordTimeStamp()) + (long) XContentMapValues.extractValue("data_counts.latest_record_timestamp", jobStats.get(0)), + greaterThan((long) snapshotToUpgrade.get("latest_record_time_stamp")) ); - RevertModelSnapshotRequest revertModelSnapshotRequest = new RevertModelSnapshotRequest(JOB_ID, snapshotToUpgrade); - revertModelSnapshotRequest.setDeleteInterveningResults(true); - assertThat( - hlrc.revertModelSnapshot(revertModelSnapshotRequest, RequestOptions.DEFAULT).getModel().getSnapshotId(), - equalTo(snapshotToUpgrade) - ); - assertThat(openJob(JOB_ID).isOpened(), is(true)); + + var revertResponse = entityAsMap(revertModelSnapshot(JOB_ID, snapshotToUpgradeId, true)); + assertThat((String) XContentMapValues.extractValue("model.snapshot_id", revertResponse), equalTo(snapshotToUpgradeId)); + assertThat(entityAsMap(openJob(JOB_ID)).get("opened"), is(true)); + + stats = entityAsMap(getJobStats(JOB_ID)); + jobStats = (List>) XContentMapValues.extractValue("jobs", stats); assertThat( - hlrc.getJobStats(new GetJobStatsRequest(JOB_ID), RequestOptions.DEFAULT) - .jobStats() - .get(0) - .getDataCounts() - .getLatestRecordTimeStamp(), - equalTo(snapshot.getLatestRecordTimeStamp()) + (long) XContentMapValues.extractValue("data_counts.latest_record_timestamp", jobStats.get(0)), + equalTo((long) upgradedSnapshot.get(0).get("latest_record_time_stamp")) ); closeJob(JOB_ID); } + @SuppressWarnings("unchecked") private void createJobAndSnapshots() throws Exception { TimeValue bucketSpan = TimeValue.timeValueHours(1); long startTime = 1491004800000L; - PutJobResponse jobResponse = buildAndPutJob(JOB_ID, bucketSpan); - Job job = jobResponse.getResponse(); - openJob(job.getId()); - DataCounts dataCounts = postData( - job.getId(), - generateData(startTime, bucketSpan, 10, Arrays.asList("foo"), (bucketIndex, series) -> bucketIndex == 5 ? 100.0 : 10.0).stream() - .collect(Collectors.joining()) - ).getDataCounts(); - assertThat(dataCounts.getInvalidDateCount(), equalTo(0L)); - assertThat(dataCounts.getBucketCount(), greaterThan(0L)); - final long lastCount = dataCounts.getBucketCount(); - flushJob(job.getId()); - closeJob(job.getId()); + buildAndPutJob(JOB_ID, bucketSpan); + openJob(JOB_ID); + var dataCounts = entityAsMap( + postData( + JOB_ID, + String.join( + "", + generateData( + startTime, + bucketSpan, + 10, + Collections.singletonList("foo"), + (bucketIndex, series) -> bucketIndex == 5 ? 100.0 : 10.0 + ) + ) + ) + ); + + assertThat((Integer) dataCounts.get("invalid_date_count"), equalTo(0)); + assertThat((Integer) dataCounts.get("bucket_count"), greaterThan(0)); + final int lastCount = (Integer) dataCounts.get("bucket_count"); + flushJob(JOB_ID); + closeJob(JOB_ID); // We need to wait a second to ensure the second time around model snapshot will have a different ID (it depends on epoch seconds) waitUntil(() -> false, 2, TimeUnit.SECONDS); - openJob(job.getId()); - dataCounts = postData( - job.getId(), - generateData(startTime + 10 * bucketSpan.getMillis(), bucketSpan, 10, Arrays.asList("foo"), (bucketIndex, series) -> 10.0) - .stream() - .collect(Collectors.joining()) - ).getDataCounts(); - assertThat(dataCounts.getInvalidDateCount(), equalTo(0L)); - assertThat(dataCounts.getBucketCount(), greaterThan(lastCount)); - flushJob(job.getId()); - closeJob(job.getId()); - - GetModelSnapshotsResponse modelSnapshots = getModelSnapshots(job.getId()); - assertThat(modelSnapshots.snapshots(), hasSize(2)); - assertThat(modelSnapshots.snapshots().get(0).getMinVersion().major, equalTo(UPGRADE_FROM_VERSION.major)); - assertThat(modelSnapshots.snapshots().get(1).getMinVersion().major, equalTo(UPGRADE_FROM_VERSION.major)); + openJob(JOB_ID); + dataCounts = entityAsMap( + postData( + JOB_ID, + String.join( + "", + generateData( + startTime + 10 * bucketSpan.getMillis(), + bucketSpan, + 10, + Collections.singletonList("foo"), + (bucketIndex, series) -> 10.0 + ) + ) + ) + ); + assertThat((Integer) dataCounts.get("invalid_date_count"), equalTo(0)); + assertThat((Integer) dataCounts.get("bucket_count"), greaterThan(lastCount)); + flushJob(JOB_ID); + closeJob(JOB_ID); + + var modelSnapshots = entityAsMap(getModelSnapshots(JOB_ID)); + var snapshots = (List>) modelSnapshots.get("model_snapshots"); + assertThat(snapshots, hasSize(2)); + assertThat(Integer.parseInt(snapshots.get(0).get("min_version").toString(), 0, 1, 10), equalTo((int) UPGRADE_FROM_VERSION.major)); + assertThat(Integer.parseInt(snapshots.get(1).get("min_version").toString(), 0, 1, 10), equalTo((int) UPGRADE_FROM_VERSION.major)); } - private PutJobResponse buildAndPutJob(String jobId, TimeValue bucketSpan) throws Exception { - Detector.Builder detector = new Detector.Builder("mean", "value"); - detector.setPartitionFieldName("series"); - List detectors = new ArrayList<>(); - detectors.add(detector.build()); + private Response buildAndPutJob(String jobId, TimeValue bucketSpan) throws Exception { boolean isCategorization = randomBoolean(); + String jobConfig; + if (isCategorization) { - detectors.add(new Detector.Builder("count", null).setByFieldName("mlcategory").build()); - } - AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(detectors); - analysisConfig.setBucketSpan(bucketSpan); - if (isCategorization) { - analysisConfig.setCategorizationFieldName("text"); + jobConfig = """ + { + "analysis_config" : { + "bucket_span":""" + "\"" + bucketSpan + "\"," + """ + "detectors":[{"function":"mean", "field_name":"value", "partition_field_name":"series"}, + {"function":"count", "by_field_name":"mlcategory"}], + "categorization_field_name":"text" + }, + "data_description" : { + } + }"""; + } else { + jobConfig = """ + { + "analysis_config" : { + "bucket_span":""" + "\"" + bucketSpan + "\"," + """ + "detectors":[{"function":"mean", "field_name":"value", "partition_field_name":"series"}] + }, + "data_description" : { + } + }"""; } - Job.Builder job = new Job.Builder(jobId); - job.setAnalysisConfig(analysisConfig); - DataDescription.Builder dataDescription = new DataDescription.Builder(); - job.setDataDescription(dataDescription); - return putJob(job.build()); + Request request = new Request("PUT", "/_ml/anomaly_detectors/" + jobId); + request.setJsonEntity(jobConfig); + return client().performRequest(request); } private static List generateData( @@ -305,19 +290,19 @@ record = new HashMap<>(); return data; } - protected GetJobResponse getJob(String jobId) throws IOException { - return hlrc.getJob(new GetJobRequest(jobId), RequestOptions.DEFAULT); + protected Response getJob(String jobId) throws IOException { + return client().performRequest(new Request("GET", "/_ml/anomaly_detectors/" + jobId)); } - protected PutJobResponse putJob(Job job) throws IOException { - return hlrc.putJob(new PutJobRequest(job), RequestOptions.DEFAULT); + protected Response getJobStats(String jobId) throws IOException { + return client().performRequest(new Request("GET", "/_ml/anomaly_detectors/" + jobId + "/_stats")); } - protected OpenJobResponse openJob(String jobId) throws IOException { - return hlrc.openJob(new OpenJobRequest(jobId), RequestOptions.DEFAULT); + protected Response openJob(String jobId) throws IOException { + return client().performRequest(new Request("POST", "/_ml/anomaly_detectors/" + jobId + "/_open")); } - protected PostDataResponse postData(String jobId, String data) throws IOException { + protected Response postData(String jobId, String data) throws IOException { // Post data is deprecated, so a deprecation warning is possible (depending on the old version) RequestOptions postDataOptions = RequestOptions.DEFAULT.toBuilder().setWarningsHandler(warnings -> { if (warnings.isEmpty()) { @@ -332,25 +317,52 @@ protected PostDataResponse postData(String jobId, String data) throws IOExceptio + "in a future major version it will be compulsory to use a datafeed" ) == false; }).build(); - return hlrc.postData(new PostDataRequest(jobId, XContentType.JSON, new BytesArray(data)), postDataOptions); + + Request postDataRequest = new Request("POST", "/_ml/anomaly_detectors/" + jobId + "/_data"); + // Post data is deprecated, so expect a deprecation warning + postDataRequest.setOptions(postDataOptions); + postDataRequest.setJsonEntity(data); + return client().performRequest(postDataRequest); } - protected FlushJobResponse flushJob(String jobId) throws IOException { - return hlrc.flushJob(new FlushJobRequest(jobId), RequestOptions.DEFAULT); + protected void flushJob(String jobId) throws IOException { + client().performRequest(new Request("POST", "/_ml/anomaly_detectors/" + jobId + "/_flush")); } - protected CloseJobResponse closeJob(String jobId) throws IOException { - return hlrc.closeJob(new CloseJobRequest(jobId), RequestOptions.DEFAULT); + private void closeJob(String jobId) throws IOException { + Response closeResponse = client().performRequest(new Request("POST", "/_ml/anomaly_detectors/" + jobId + "/_close")); + assertThat(entityAsMap(closeResponse), hasEntry("closed", true)); } - protected GetModelSnapshotsResponse getModelSnapshots(String jobId) throws IOException { + protected Response getModelSnapshots(String jobId) throws IOException { return getModelSnapshots(jobId, null); } - protected GetModelSnapshotsResponse getModelSnapshots(String jobId, String snapshotId) throws IOException { - GetModelSnapshotsRequest getModelSnapshotsRequest = new GetModelSnapshotsRequest(jobId); - getModelSnapshotsRequest.setSnapshotId(snapshotId); - return hlrc.getModelSnapshots(getModelSnapshotsRequest, RequestOptions.DEFAULT); + protected Response getModelSnapshots(String jobId, String snapshotId) throws IOException { + String url = "_ml/anomaly_detectors/" + jobId + "/model_snapshots/"; + if (snapshotId != null) { + url = url + snapshotId; + } + return client().performRequest(new Request("GET", url)); + } + + private Response revertModelSnapshot(String jobId, String snapshotId, boolean deleteIntervening) throws IOException { + String url = "_ml/anomaly_detectors/" + jobId + "/model_snapshots/" + snapshotId + "/_revert"; + + if (deleteIntervening) { + url = url + "?delete_intervening_results=true"; + } + Request request = new Request("POST", url); + return client().performRequest(request); + } + + private Response upgradeJobSnapshot(String jobId, String snapshotId, boolean waitForCompletion) throws IOException { + String url = "_ml/anomaly_detectors/" + jobId + "/model_snapshots/" + snapshotId + "/_upgrade"; + if (waitForCompletion) { + url = url + "?wait_for_completion=true"; + } + Request request = new Request("POST", url); + return client().performRequest(request); } protected static String createJsonRecord(Map keyValueMap) throws IOException { diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java index c9bbf2d78f2c8..2438de83e0eff 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlMappingsUpgradeIT.java @@ -10,19 +10,12 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; -import org.elasticsearch.client.ml.job.config.AnalysisConfig; -import org.elasticsearch.client.ml.job.config.DataDescription; -import org.elasticsearch.client.ml.job.config.Detector; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.xpack.test.rest.IndexMappingTemplateAsserter; import org.elasticsearch.xpack.test.rest.XPackRestTestConstants; import org.elasticsearch.xpack.test.rest.XPackRestTestHelper; import java.io.IOException; import java.util.Collection; -import java.util.Collections; import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -72,19 +65,21 @@ public void testMappingsUpgrade() throws Exception { } private void createAndOpenTestJob() throws IOException { - - Detector.Builder d = new Detector.Builder("metric", "responsetime"); - d.setByFieldName("airline"); - AnalysisConfig.Builder analysisConfig = new AnalysisConfig.Builder(Collections.singletonList(d.build())); - analysisConfig.setBucketSpan(TimeValue.timeValueMinutes(10)); - Job.Builder job = new Job.Builder(JOB_ID); - job.setAnalysisConfig(analysisConfig); - job.setDataDescription(new DataDescription.Builder()); // Use a custom index because other rolling upgrade tests meddle with the shared index - job.setResultsIndexName("mappings-upgrade-test"); + String jobConfig = """ + { + "results_index_name":"mappings-upgrade-test", + "analysis_config" : { + "bucket_span": "600s", + "detectors" :[{"function":"metric","field_name":"responsetime","by_field_name":"airline"}] + }, + "data_description" : { + } + }" + """; Request putJob = new Request("PUT", "_ml/anomaly_detectors/" + JOB_ID); - putJob.setJsonEntity(Strings.toString(job.build())); + putJob.setJsonEntity(jobConfig); Response response = client().performRequest(putJob); assertEquals(200, response.getStatusLine().getStatusCode()); From 2e7ca44666b4d3e4dadf07e51e24acca67095e9d Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 14 Feb 2022 13:21:25 -0500 Subject: [PATCH 085/167] TSDB: improve error message for nested fields (#83837) Nested fields have never been supported in time_series mode but their error message was something along the lines of "nested fields don't work when you've enabled index sorting". Which is *true*, but when you put an index into time_series mode you don't explicitly enable index sorting - time_series mode does that for you. So the message is confusing. This gives you a more explicit "nested fields don't work in time_series mode". I've also added tests for nested dimensions outside of time series mode. We didn't have any such tests and it's nice to assert how they currently work. --- ...dimension_and_metric_in_non_tsdb_index.yml | 36 +++++++++++++++++-- .../rest-api-spec/test/tsdb/20_mapping.yml | 29 +++++++++++++++ .../org/elasticsearch/index/IndexMode.java | 4 +++ .../index/mapper/DocumentMapper.java | 2 +- 4 files changed, 68 insertions(+), 3 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml index 0e22f086096ff..a570c1c460863 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml @@ -52,7 +52,7 @@ add time series mappings: can't shadow dimensions: - skip: version: " - 8.0.99" - reason: introduced in 8.1.0 to be backported to 8.0.0 + reason: introduced in 8.1.0 - do: indices.create: @@ -118,7 +118,7 @@ can't shadow dimensions: can't shadow metrics: - skip: version: " - 8.0.99" - reason: introduced in 8.1.0 to be backported to 8.0.0 + reason: introduced in 8.1.0 - do: indices.create: @@ -214,3 +214,35 @@ no _tsid in standard indices: - is_false: fields.metricset.keyword.non_searchable_indices - is_false: fields.metricset.keyword.non_aggregatable_indices - is_false: fields._tsid # _tsid metadata field must not exist in non-time-series indices + +--- +nested dimensions: + - skip: + version: " - 8.0.99" + reason: introduced in 8.1.0 + + - do: + indices.create: + index: test + body: + mappings: + properties: + "@timestamp": + type: date + nested: + type: nested + properties: + dim: + type: keyword + time_series_dimension: true + + - do: + index: + index: test + refresh: true + body: + "@timestamp": "2021-04-28T18:35:24.467Z" + nested: + - dim: foo + - dim: bar + - dim: baz diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index fd66eef061b9f..3774526653b03 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -304,3 +304,32 @@ runtime field matching routing path: - '{"@timestamp": "2021-04-28T18:50:04.467Z", "dim": {"foo": {"bar": "a"}}}' - match: {items.0.index.error.reason: "All fields matching [routing_path] must be mapped but [dim.foo] was declared as [dynamic: false]"} - match: {items.1.index.error.reason: "All fields matching [routing_path] must be mapped but [dim.foo] was declared as [dynamic: false]"} + +--- +nested dimensions: + - skip: + version: " - 8.1.99" + reason: message changed in 8.2.0 + + - do: + catch: /cannot have nested fields when index is in \[index.mode=time_series\]/ + indices.create: + index: test + body: + settings: + index: + mode: time_series + routing_path: [nested.*] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + nested: + type: nested + properties: + dim: + type: keyword + time_series_dimension: true diff --git a/server/src/main/java/org/elasticsearch/index/IndexMode.java b/server/src/main/java/org/elasticsearch/index/IndexMode.java index 66c0f71b328b0..3e49b5c91e677 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexMode.java +++ b/server/src/main/java/org/elasticsearch/index/IndexMode.java @@ -20,6 +20,7 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.MetadataFieldMapper; +import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.RoutingFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; @@ -110,6 +111,9 @@ private String error(Setting unsupported) { @Override public void validateMapping(MappingLookup lookup) { + if (lookup.nestedLookup() != NestedLookup.EMPTY) { + throw new IllegalArgumentException("cannot have nested fields when index is in " + tsdbMode()); + } if (((RoutingFieldMapper) lookup.getMapper(RoutingFieldMapper.NAME)).required()) { throw new IllegalArgumentException(routingRequiredBad()); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index cb0390eb37fb1..5ccb43c099f91 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -94,6 +94,7 @@ public void validate(IndexSettings settings, boolean checkLimits) { ); } } + settings.getMode().validateMapping(mappingLookup); if (settings.getIndexSortConfig().hasIndexSort() && mappers().nestedLookup() != NestedLookup.EMPTY) { throw new IllegalArgumentException("cannot have nested fields when index sort is activated"); } @@ -117,6 +118,5 @@ public void validate(IndexSettings settings, boolean checkLimits) { if (checkLimits) { this.mappingLookup.checkLimits(settings); } - settings.getMode().validateMapping(mappingLookup); } } From b2e351e379edcbc05385d32db9db20c4e3aa8b1c Mon Sep 17 00:00:00 2001 From: Rory Hunter Date: Mon, 14 Feb 2022 18:59:03 +0000 Subject: [PATCH 086/167] Update system index metadata if `_meta` is null (#83896) Closes #83890. Change the logic in `SystemIndexManager` for calculating the required action on a system index so that the absence of `_meta` in the index mappings no longer throws an exception, but instead indicates that the mappings need to be updated. This aligns with the logic for the case where `_meta` exists, but the version field is absent or holds a `null` value. --- docs/changelog/83896.yaml | 6 +++ .../indices/SystemIndexManagerIT.java | 2 +- .../indices/SystemIndexManager.java | 31 +++++++++--- .../indices/SystemIndexManagerTests.java | 50 ++++++++----------- 4 files changed, 53 insertions(+), 36 deletions(-) create mode 100644 docs/changelog/83896.yaml diff --git a/docs/changelog/83896.yaml b/docs/changelog/83896.yaml new file mode 100644 index 0000000000000..3344003b17a38 --- /dev/null +++ b/docs/changelog/83896.yaml @@ -0,0 +1,6 @@ +pr: 83896 +summary: Update system index mappings if `_meta` is null +area: Infra/Core +type: bug +issues: + - 83890 diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/SystemIndexManagerIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/SystemIndexManagerIT.java index a9b3058770142..3f5b42826e1ba 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/SystemIndexManagerIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/SystemIndexManagerIT.java @@ -52,7 +52,7 @@ protected Collection> nodePlugins() { } /** - * Check that if the the SystemIndexManager finds a managed index with out-of-date mappings, then + * Check that if the SystemIndexManager finds a managed index with out-of-date mappings, then * the manager updates those mappings. */ public void testSystemIndexManagerUpgradesMappings() throws Exception { diff --git a/server/src/main/java/org/elasticsearch/indices/SystemIndexManager.java b/server/src/main/java/org/elasticsearch/indices/SystemIndexManager.java index 49b9563730734..78d763ff224ef 100644 --- a/server/src/main/java/org/elasticsearch/indices/SystemIndexManager.java +++ b/server/src/main/java/org/elasticsearch/indices/SystemIndexManager.java @@ -32,6 +32,7 @@ import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.xcontent.XContentType; +import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; @@ -97,9 +98,19 @@ public void clusterChanged(ClusterChangedEvent event) { } if (isUpgradeInProgress.compareAndSet(false, true)) { - final List descriptors = getEligibleDescriptors(state.getMetadata()).stream() - .filter(descriptor -> getUpgradeStatus(state, descriptor) == UpgradeStatus.NEEDS_MAPPINGS_UPDATE) - .collect(Collectors.toList()); + final List descriptors = new ArrayList<>(); + for (SystemIndexDescriptor systemIndexDescriptor : getEligibleDescriptors(state.getMetadata())) { + UpgradeStatus upgradeStatus; + try { + upgradeStatus = getUpgradeStatus(state, systemIndexDescriptor); + } catch (Exception e) { + logger.warn("Failed to calculate upgrade status: {}" + e.getMessage(), e); + continue; + } + if (upgradeStatus == UpgradeStatus.NEEDS_MAPPINGS_UPDATE) { + descriptors.add(systemIndexDescriptor); + } + } if (descriptors.isEmpty() == false) { // Use a GroupedActionListener so that we only release the lock once all upgrade attempts have succeeded or failed. @@ -271,14 +282,20 @@ private boolean checkIndexMappingUpToDate(SystemIndexDescriptor descriptor, Inde /** * Fetches the mapping version from an index's mapping's `_meta` info. */ - @SuppressWarnings("unchecked") private Version readMappingVersion(SystemIndexDescriptor descriptor, MappingMetadata mappingMetadata) { final String indexName = descriptor.getPrimaryIndex(); try { + @SuppressWarnings("unchecked") Map meta = (Map) mappingMetadata.sourceAsMap().get("_meta"); if (meta == null) { - logger.warn("Missing _meta field in mapping [{}] of index [{}]", mappingMetadata.type(), indexName); - throw new IllegalStateException("Cannot read version string in index " + indexName); + logger.warn( + "Missing _meta field in mapping [{}] of index [{}], assuming mappings update required", + mappingMetadata.type(), + indexName + ); + // This can happen with old system indices, such as .watches, which were created before we had the convention of + // storing a version under `_meta.` We should just replace the template to be sure. + return Version.V_EMPTY; } final Object rawVersion = meta.get(descriptor.getVersionMetaKey()); @@ -289,7 +306,7 @@ private Version readMappingVersion(SystemIndexDescriptor descriptor, MappingMeta } final String versionString = rawVersion != null ? rawVersion.toString() : null; if (versionString == null) { - logger.warn("No value found in mappings for [_meta.{}]", descriptor.getVersionMetaKey()); + logger.warn("No value found in mappings for [_meta.{}], assuming mappings update required", descriptor.getVersionMetaKey()); // If we called `Version.fromString(null)`, it would return `Version.CURRENT` and we wouldn't update the mappings return Version.V_EMPTY; } diff --git a/server/src/test/java/org/elasticsearch/indices/SystemIndexManagerTests.java b/server/src/test/java/org/elasticsearch/indices/SystemIndexManagerTests.java index 679144155c107..85597aea72666 100644 --- a/server/src/test/java/org/elasticsearch/indices/SystemIndexManagerTests.java +++ b/server/src/test/java/org/elasticsearch/indices/SystemIndexManagerTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.SystemIndexManager.UpgradeStatus; @@ -217,6 +218,19 @@ public void testManagerProcessesIndicesWithOutdatedMappings() { ); } + /** + * Check that the manager will try to upgrade indices where the mappings metadata is null or absent. + */ + public void testManagerProcessesIndicesWithNullMetadata() { + SystemIndices systemIndices = new SystemIndices(Map.of("MyIndex", FEATURE)); + SystemIndexManager manager = new SystemIndexManager(systemIndices, client); + + assertThat( + manager.getUpgradeStatus(markShardsAvailable(createClusterState(Strings.toString(getMappings(builder -> {})))), DESCRIPTOR), + equalTo(UpgradeStatus.NEEDS_MAPPINGS_UPDATE) + ); + } + /** * Check that the manager will try to upgrade indices where the version in the metadata is null or absent. */ @@ -225,7 +239,7 @@ public void testManagerProcessesIndicesWithNullVersionMetadata() { SystemIndexManager manager = new SystemIndexManager(systemIndices, client); assertThat( - manager.getUpgradeStatus(markShardsAvailable(createClusterState(Strings.toString(getMappings(null)))), DESCRIPTOR), + manager.getUpgradeStatus(markShardsAvailable(createClusterState(Strings.toString(getMappings((String) null)))), DESCRIPTOR), equalTo(UpgradeStatus.NEEDS_MAPPINGS_UPDATE) ); } @@ -385,42 +399,21 @@ private static XContentBuilder getMappings() { } private static XContentBuilder getMappings(String version) { - try { - final XContentBuilder builder = jsonBuilder(); - - builder.startObject(); - { - builder.startObject("_meta"); - builder.field("version", version); - builder.endObject(); - - builder.field("dynamic", "strict"); - builder.startObject("properties"); - { - builder.startObject("completed"); - builder.field("type", "boolean"); - builder.endObject(); - } - builder.endObject(); - } - - builder.endObject(); - return builder; - } catch (IOException e) { - throw new UncheckedIOException("Failed to build " + SYSTEM_INDEX_NAME + " index mappings", e); - } + return getMappings(builder -> builder.object("_meta", meta -> meta.field("version", version))); } // Prior to 7.12.0, .tasks had _meta.version: 3 so we need to be sure we can handle that private static XContentBuilder getMappings(int version) { + return getMappings(builder -> builder.object("_meta", meta -> meta.field("version", version))); + } + + private static XContentBuilder getMappings(CheckedConsumer metaCallback) { try { final XContentBuilder builder = jsonBuilder(); builder.startObject(); { - builder.startObject("_meta"); - builder.field("version", version); - builder.endObject(); + metaCallback.accept(builder); builder.field("dynamic", "strict"); builder.startObject("properties"); @@ -438,4 +431,5 @@ private static XContentBuilder getMappings(int version) { throw new UncheckedIOException("Failed to build " + SYSTEM_INDEX_NAME + " index mappings", e); } } + } From fea85b3efb3cf92952c98fcd8149bf75e652ba85 Mon Sep 17 00:00:00 2001 From: Salvatore Campagna <93581129+salvatore-campagna@users.noreply.github.com> Date: Mon, 14 Feb 2022 20:28:09 +0100 Subject: [PATCH 087/167] fix: backward compatibility with version 8.0.0 (#83509) Commit fb6e1fb127e3c2bb1683402e9ab083115a56b08d did not land into version 8.0.0. As a result, we need to make master (version 8.2.0) interoperate with version 8.0.0. This means we need to do the following: * always read and write a non-optional string key for a range bucket (not for the range itself but for the bucket). The key is used when serialising the response back to clients. * write originalFrom and originalTo values so that a node running version 8.0.0 can read them properly. * read 'originalFrom' and 'originalTo' from a node running version 8.0.0 and use them to overwrite 'from' and 'to' when creating a new range bucket. All of this needs to be done keeping compatibility with version 7.17 which is fixed by PR #83715 . Also note that, interoperability with version 8.0.1 is taken care by PR #83715. As a result, here we really just need to address backward compatibility with version 8.0.0 that is already released and we cannot change. --- .../bucket/range/InternalBinaryRange.java | 10 ++++++++-- .../aggregations/bucket/range/InternalRange.java | 12 ++++++++++-- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java index 4765f00f41fdb..939e664c080e2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java @@ -70,7 +70,11 @@ private static String generateKey(BytesRef from, BytesRef to, DocValueFormat for } private static Bucket createFromStream(StreamInput in, DocValueFormat format, boolean keyed) throws IOException { - String key = in.getVersion().onOrAfter(Version.V_7_17_1) ? in.readOptionalString() : in.readString(); + // NOTE: the key is required in version == 8.0.0 and version <= 7.17.0, + // while it is optional for all subsequent versions. + String key = in.getVersion().equals(Version.V_8_0_0) ? in.readString() + : in.getVersion().onOrAfter(Version.V_7_17_1) ? in.readOptionalString() + : in.readString(); BytesRef from = in.readBoolean() ? in.readBytesRef() : null; BytesRef to = in.readBoolean() ? in.readBytesRef() : null; long docCount = in.readLong(); @@ -81,7 +85,9 @@ private static Bucket createFromStream(StreamInput in, DocValueFormat format, bo @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(Version.V_7_17_1)) { + if (out.getVersion().equals(Version.V_8_0_0)) { + out.writeString(key == null ? generateKey(from, to, format) : key); + } else if (out.getVersion().onOrAfter(Version.V_7_17_1)) { out.writeOptionalString(key); } else { out.writeString(key == null ? generateKey(from, to, format) : key); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java index fa5595f049cee..21a7e8a1fc470 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java @@ -157,7 +157,11 @@ private static String generateKey(double from, double to, DocValueFormat format) @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(Version.V_7_17_1)) { + // NOTE: the key is required in version == 8.0.0 and version <= 7.17.0, + // while it is optional for all subsequent versions. + if (out.getVersion().equals(Version.V_8_0_0)) { + out.writeString(key == null ? generateKey(from, to, format) : key); + } else if (out.getVersion().onOrAfter(Version.V_7_17_1)) { out.writeOptionalString(key); } else { out.writeString(key == null ? generateKey(from, to, format) : key); @@ -263,7 +267,11 @@ public InternalRange(StreamInput in) throws IOException { int size = in.readVInt(); List ranges = new ArrayList<>(size); for (int i = 0; i < size; i++) { - String key = in.getVersion().onOrAfter(Version.V_7_17_1) ? in.readOptionalString() : in.readString(); + // NOTE: the key is required in version == 8.0.0 and version <= 7.17.0, + // while it is optional for all subsequent versions. + final String key = in.getVersion().equals(Version.V_8_0_0) ? in.readString() + : in.getVersion().onOrAfter(Version.V_7_17_1) ? in.readOptionalString() + : in.readString(); double from = in.readDouble(); if (in.getVersion().onOrAfter(Version.V_7_17_0)) { final Double originalFrom = in.readOptionalDouble(); From 8906af291edac636f5476c8a5cfebc8332afdaac Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 14 Feb 2022 14:41:07 -0500 Subject: [PATCH 088/167] Shrink slow log for has_child query (#83870) This removes the defaults from the `toXContent` of the `has_child` query so slow logs that contain it are marginally easier to read. Relates to #76515 --- .../join/query/HasChildQueryBuilder.java | 23 ++++-- .../join/query/HasChildQueryBuilderTests.java | 79 ++++++++++++++++++- 2 files changed, 93 insertions(+), 9 deletions(-) diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java index 306931fbf8b78..0ec4c0c79c3e0 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java @@ -60,6 +60,7 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder Date: Mon, 14 Feb 2022 18:00:33 -0500 Subject: [PATCH 089/167] Make a builder for _tsid (#83799) `_tsid` is built by getting a sorted list of encoded dimensions. This creates a `TimeSeriesIdBuilder` that abstracts that behind sensibly named methods. --- .../org/elasticsearch/index/IndexMode.java | 13 ++ .../index/mapper/DocumentDimensions.java | 60 ++++++ .../index/mapper/DocumentParserContext.java | 10 + .../index/mapper/IpFieldMapper.java | 9 +- .../index/mapper/KeywordFieldMapper.java | 8 +- .../index/mapper/LuceneDocument.java | 32 ---- .../index/mapper/NumberFieldMapper.java | 10 +- .../index/mapper/TimeSeriesIdFieldMapper.java | 177 ++++++++++-------- .../elasticsearch/search/DocValueFormat.java | 22 +-- .../mapper/BooleanScriptFieldTypeTests.java | 9 +- .../index/mapper/KeywordFieldMapperTests.java | 7 +- .../timeseries/TimeSeriesAggregatorTests.java | 24 +-- .../index/mapper/MapperServiceTestCase.java | 8 + .../mapper/TestDocumentParserContext.java | 4 +- .../unsignedlong/UnsignedLongFieldMapper.java | 9 +- 15 files changed, 223 insertions(+), 179 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/index/mapper/DocumentDimensions.java diff --git a/server/src/main/java/org/elasticsearch/index/IndexMode.java b/server/src/main/java/org/elasticsearch/index/IndexMode.java index 3e49b5c91e677..de6633db7de15 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexMode.java +++ b/server/src/main/java/org/elasticsearch/index/IndexMode.java @@ -17,6 +17,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.index.mapper.DocumentDimensions; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.MetadataFieldMapper; @@ -84,6 +85,11 @@ public MetadataFieldMapper buildTimeSeriesIdFieldMapper() { // non time-series indices must not have a TimeSeriesIdFieldMapper return null; } + + @Override + public DocumentDimensions buildDocumentDimensions() { + return new DocumentDimensions.OnlySingleValueAllowed(); + } }, TIME_SERIES("time_series") { @Override @@ -149,6 +155,11 @@ private String routingRequiredBad() { public MetadataFieldMapper buildTimeSeriesIdFieldMapper() { return TimeSeriesIdFieldMapper.INSTANCE; } + + @Override + public DocumentDimensions buildDocumentDimensions() { + return new TimeSeriesIdFieldMapper.TimeSeriesIdBuilder(); + } }; protected String tsdbMode() { @@ -241,6 +252,8 @@ public String getName() { */ public abstract MetadataFieldMapper buildTimeSeriesIdFieldMapper(); + public abstract DocumentDimensions buildDocumentDimensions(); + public static IndexMode fromString(String value) { return switch (value) { case "standard" -> IndexMode.STANDARD; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentDimensions.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentDimensions.java new file mode 100644 index 0000000000000..6f5f0c336633c --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentDimensions.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.index.mapper; + +import java.net.InetAddress; +import java.util.HashSet; +import java.util.Set; + +/** + * Collects dimensions from documents. + */ +public interface DocumentDimensions { + void addString(String fieldName, String value); + + void addIp(String fieldName, InetAddress value); + + void addLong(String fieldName, long value); + + void addUnsignedLong(String fieldName, long value); + + /** + * Makes sure that each dimension only appears on time. + */ + class OnlySingleValueAllowed implements DocumentDimensions { + private final Set names = new HashSet<>(); + + @Override + public void addString(String fieldName, String value) { + add(fieldName); + } + + @Override + public void addIp(String fieldName, InetAddress value) { + add(fieldName); + } + + @Override + public void addLong(String fieldName, long value) { + add(fieldName); + } + + @Override + public void addUnsignedLong(String fieldName, long value) { + add(fieldName); + } + + private void add(String fieldName) { + boolean isNew = names.add(fieldName); + if (false == isNew) { + throw new IllegalArgumentException("Dimension field [" + fieldName + "] cannot be a multi-valued field."); + } + } + }; +} diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 67247733e9e4e..07aa1b0f23e2c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -89,6 +89,7 @@ protected void addDoc(LuceneDocument doc) { private final Set newFieldsSeen; private final Map dynamicObjectMappers; private final List dynamicRuntimeFields; + private final DocumentDimensions dimensions; private Field version; private SeqNoFieldMapper.SequenceIDFields seqID; @@ -105,6 +106,7 @@ private DocumentParserContext(DocumentParserContext in) { this.dynamicRuntimeFields = in.dynamicRuntimeFields; this.version = in.version; this.seqID = in.seqID; + this.dimensions = in.dimensions; } protected DocumentParserContext( @@ -124,6 +126,7 @@ protected DocumentParserContext( this.newFieldsSeen = new HashSet<>(); this.dynamicObjectMappers = new HashMap<>(); this.dynamicRuntimeFields = new ArrayList<>(); + this.dimensions = indexSettings.getMode().buildDocumentDimensions(); } public final IndexSettings indexSettings() { @@ -334,6 +337,13 @@ public XContentParser parser() { }; } + /** + * The collection of dimensions for this document. + */ + public DocumentDimensions getDimensions() { + return dimensions; + } + public abstract ContentPath path(); public abstract XContentParser parser(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 9efbbe27ec8dd..901f0bda6801f 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -18,11 +18,9 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.DeprecationCategory; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.network.InetAddresses; -import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -492,12 +490,7 @@ private static InetAddress value(XContentParser parser, InetAddress nullValue) t private void indexValue(DocumentParserContext context, InetAddress address) { if (dimension) { - // Encode the tsid part of the dimension field if the _tsid field is enabled. - // If the _tsid field is not enabled, we can skip the encoding part. - BytesReference bytes = context.getMetadataMapper(TimeSeriesIdFieldMapper.NAME) != null - ? TimeSeriesIdFieldMapper.encodeTsidValue(NetworkAddress.format(address)) - : null; - context.doc().addDimensionBytes(fieldType().name(), bytes); + context.getDimensions().addIp(fieldType().name(), address); } if (indexed) { Field field = new InetAddressPoint(fieldType().name(), address); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index e667428c28722..dd934733d4a3c 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -32,7 +32,6 @@ import org.apache.lucene.util.automaton.CompiledAutomaton.AUTOMATON_TYPE; import org.apache.lucene.util.automaton.MinimizationOperations; import org.apache.lucene.util.automaton.Operations; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.AutomatonQueries; @@ -901,12 +900,7 @@ private void indexValue(DocumentParserContext context, String value) { value = normalizeValue(fieldType().normalizer(), name(), value); if (dimension) { - // Encode the tsid part of the dimension field. Although, it would seem reasonable - // to skip the encode part if we don't generate a _tsid field (as we do with number - // and ip fields), we keep this test because we must ensure that the value of this - // dimension field is not larger than TimeSeriesIdFieldMapper.DIMENSION_VALUE_LIMIT - BytesReference bytes = TimeSeriesIdFieldMapper.encodeTsidValue(value); - context.doc().addDimensionBytes(fieldType().name(), bytes); + context.getDimensions().addString(fieldType().name(), value); } // convert to utf8 only once before feeding postings/dv/stored fields diff --git a/server/src/main/java/org/elasticsearch/index/mapper/LuceneDocument.java b/server/src/main/java/org/elasticsearch/index/mapper/LuceneDocument.java index de3c08653ff0b..22b5d8bfc8ffa 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/LuceneDocument.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/LuceneDocument.java @@ -10,16 +10,12 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.bytes.BytesReference; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.SortedMap; -import java.util.TreeMap; /** * Fork of {@link org.apache.lucene.document.Document} with additional functionality. @@ -31,12 +27,6 @@ public class LuceneDocument implements Iterable { private final String prefix; private final List fields; private Map keyedFields; - /** - * A sorted map of the serialized values of dimension fields that will be used - * for generating the _tsid field. The map will be used by {@link TimeSeriesIdFieldMapper} - * to build the _tsid field for the document. - */ - private SortedMap dimensionBytes; LuceneDocument(String path, LuceneDocument parent) { fields = new ArrayList<>(); @@ -109,28 +99,6 @@ public IndexableField getByKey(Object key) { return keyedFields == null ? null : keyedFields.get(key); } - /** - * Add the serialized byte reference for a dimension field. This will be used by {@link TimeSeriesIdFieldMapper} - * to build the _tsid field for the document. - */ - public void addDimensionBytes(String fieldName, BytesReference tsidBytes) { - BytesRef fieldNameBytes = new BytesRef(fieldName); - if (dimensionBytes == null) { - // It is a {@link TreeMap} so that it is order by field name. - dimensionBytes = new TreeMap<>(); - } else if (dimensionBytes.containsKey(fieldNameBytes)) { - throw new IllegalArgumentException("Dimension field [" + fieldName + "] cannot be a multi-valued field."); - } - dimensionBytes.put(fieldNameBytes, tsidBytes); - } - - public SortedMap getDimensionBytes() { - if (dimensionBytes == null) { - return Collections.emptySortedMap(); - } - return dimensionBytes; - } - public IndexableField[] getFields(String name) { List f = new ArrayList<>(); for (IndexableField field : fields) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index f21b697dba0da..6abfd25f194b0 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -28,7 +28,6 @@ import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Numbers; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -1476,14 +1475,7 @@ private static Number value(XContentParser parser, NumberType numberType, Number private void indexValue(DocumentParserContext context, Number numericValue) { if (dimension && numericValue != null) { - // Dimension can only be one of byte, short, int, long. So, we encode the tsid - // part of the dimension field by using the long value. - // Also, there is no point in encoding the tsid value if we do not generate - // the _tsid field. - BytesReference bytes = context.getMetadataMapper(TimeSeriesIdFieldMapper.NAME) != null - ? TimeSeriesIdFieldMapper.encodeTsidValue(numericValue.longValue()) - : null; - context.doc().addDimensionBytes(fieldType().name(), bytes); + context.getDimensions().addLong(fieldType().name(), numericValue.longValue()); } List fields = fieldType().type.createFields(fieldType().name(), numericValue, indexed, hasDocValues, stored); context.doc().addAll(fields); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java index 4ccf0e28f734d..136a297d8b1f5 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TimeSeriesIdFieldMapper.java @@ -16,7 +16,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -29,6 +29,7 @@ import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; +import java.net.InetAddress; import java.time.ZoneId; import java.util.Collections; import java.util.LinkedHashMap; @@ -36,6 +37,7 @@ import java.util.Locale; import java.util.Map; import java.util.SortedMap; +import java.util.TreeMap; import java.util.function.Supplier; /** @@ -141,25 +143,8 @@ private TimeSeriesIdFieldMapper() { public void postParse(DocumentParserContext context) throws IOException { assert fieldType().isIndexed() == false; - // SortedMap is expected to be sorted by key (field name) - SortedMap dimensionFields = context.doc().getDimensionBytes(); - BytesReference timeSeriesId = buildTsidField(dimensionFields); - context.doc().add(new SortedDocValuesField(fieldType().name(), timeSeriesId.toBytesRef())); - } - - public static BytesReference buildTsidField(SortedMap dimensionFields) throws IOException { - if (dimensionFields == null || dimensionFields.isEmpty()) { - throw new IllegalArgumentException("Dimension fields are missing."); - } - - try (BytesStreamOutput out = new BytesStreamOutput()) { - encodeTsid(out, dimensionFields); - BytesReference timeSeriesId = out.bytes(); - if (timeSeriesId.length() > LIMIT) { - throw new IllegalArgumentException(NAME + " longer than [" + LIMIT + "] bytes [" + timeSeriesId.length() + "]."); - } - return timeSeriesId; - } + TimeSeriesIdBuilder timeSeriesIdBuilder = (TimeSeriesIdBuilder) context.getDimensions(); + context.doc().add(new SortedDocValuesField(fieldType().name(), timeSeriesIdBuilder.build().toBytesRef())); } @Override @@ -167,27 +152,6 @@ protected String contentType() { return CONTENT_TYPE; } - public static void encodeTsid(StreamOutput out, SortedMap dimensionFields) throws IOException { - out.writeVInt(dimensionFields.size()); - for (Map.Entry entry : dimensionFields.entrySet()) { - BytesRef fieldName = entry.getKey(); - if (fieldName.length > DIMENSION_NAME_LIMIT) { - throw new IllegalArgumentException( - String.format( - Locale.ROOT, - "Dimension name must be less than [%d] bytes but [%s] was [%s].", - DIMENSION_NAME_LIMIT, - fieldName.utf8ToString(), - fieldName.length - ) - ); - } - out.writeBytesRef(fieldName); - entry.getValue().writeTo(out); - } - - } - /** * Decode the {@code _tsid} into a human readable map. */ @@ -218,52 +182,107 @@ public static Map decodeTsid(StreamInput in) { } } - public static Map decodeTsid(BytesRef bytesRef) { - try (StreamInput input = new BytesArray(bytesRef).streamInput()) { - return decodeTsid(input); - } catch (IOException ex) { - throw new IllegalArgumentException("Dimension field cannot be deserialized.", ex); + public static class TimeSeriesIdBuilder implements DocumentDimensions { + /** + * A sorted map of the serialized values of dimension fields that will be used + * for generating the _tsid field. The map will be used by {@link TimeSeriesIdFieldMapper} + * to build the _tsid field for the document. + */ + private final SortedMap dimensions = new TreeMap<>(); + + public BytesReference build() throws IOException { + if (dimensions.isEmpty()) { + throw new IllegalArgumentException("Dimension fields are missing."); + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.writeVInt(dimensions.size()); + for (Map.Entry entry : dimensions.entrySet()) { + BytesRef fieldName = entry.getKey(); + if (fieldName.length > DIMENSION_NAME_LIMIT) { + throw new IllegalArgumentException( + String.format( + Locale.ROOT, + "Dimension name must be less than [%d] bytes but [%s] was [%s].", + DIMENSION_NAME_LIMIT, + fieldName.utf8ToString(), + fieldName.length + ) + ); + } + out.writeBytesRef(fieldName); + entry.getValue().writeTo(out); + } + BytesReference timeSeriesId = out.bytes(); + if (timeSeriesId.length() > LIMIT) { + throw new IllegalArgumentException(NAME + " longer than [" + LIMIT + "] bytes [" + timeSeriesId.length() + "]."); + } + return timeSeriesId; + } } - } - public static BytesReference encodeTsidValue(String value) { - try (BytesStreamOutput out = new BytesStreamOutput()) { - out.write((byte) 's'); - /* - * Write in utf8 instead of StreamOutput#writeString which is utf-16-ish - * so its easier for folks to reason about the space taken up. Mostly - * it'll be smaller too. - */ - BytesRef bytes = new BytesRef(value); - if (bytes.length > DIMENSION_VALUE_LIMIT) { - throw new IllegalArgumentException( - "Dimension fields must be less than [" + DIMENSION_VALUE_LIMIT + "] bytes but was [" + bytes.length + "]." - ); + @Override + public void addString(String fieldName, String value) { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.write((byte) 's'); + /* + * Write in utf8 instead of StreamOutput#writeString which is utf-16-ish + * so its easier for folks to reason about the space taken up. Mostly + * it'll be smaller too. + */ + BytesRef bytes = new BytesRef(value); + if (bytes.length > DIMENSION_VALUE_LIMIT) { + throw new IllegalArgumentException( + "Dimension fields must be less than [" + DIMENSION_VALUE_LIMIT + "] bytes but was [" + bytes.length + "]." + ); + } + out.writeBytesRef(bytes); + add(fieldName, out.bytes()); + } catch (IOException e) { + throw new IllegalArgumentException("Dimension field cannot be serialized.", e); } - out.writeBytesRef(bytes); - return out.bytes(); - } catch (IOException e) { - throw new IllegalArgumentException("Dimension field cannot be serialized.", e); } - } - public static BytesReference encodeTsidValue(long value) { - try (BytesStreamOutput out = new BytesStreamOutput()) { - out.write((byte) 'l'); - out.writeLong(value); - return out.bytes(); - } catch (IOException e) { - throw new IllegalArgumentException("Dimension field cannot be serialized.", e); + @Override + public void addIp(String fieldName, InetAddress value) { + addString(fieldName, NetworkAddress.format(value)); + } + + @Override + public void addLong(String fieldName, long value) { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.write((byte) 'l'); + out.writeLong(value); + add(fieldName, out.bytes()); + } catch (IOException e) { + throw new IllegalArgumentException("Dimension field cannot be serialized.", e); + } + } + + @Override + public void addUnsignedLong(String fieldName, long value) { + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.write((byte) 'u'); + out.writeLong(value); + add(fieldName, out.bytes()); + } catch (IOException e) { + throw new IllegalArgumentException("Dimension field cannot be serialized.", e); + } + } + + private void add(String fieldName, BytesReference encoded) { + BytesReference old = dimensions.put(new BytesRef(fieldName), encoded); + if (old != null) { + throw new IllegalArgumentException("Dimension field [" + fieldName + "] cannot be a multi-valued field."); + } } } - public static BytesReference encodeTsidUnsignedLongValue(long value) { - try (BytesStreamOutput out = new BytesStreamOutput()) { - out.write((byte) 'u'); - out.writeLong(value); - return out.bytes(); - } catch (IOException e) { - throw new IllegalArgumentException("Dimension field cannot be serialized.", e); + public static Map decodeTsid(BytesRef bytesRef) { + try (StreamInput input = new BytesArray(bytesRef).streamInput()) { + return decodeTsid(input); + } catch (IOException ex) { + throw new IllegalArgumentException("Dimension field cannot be deserialized.", ex); } } } diff --git a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java index 8cd8cee8aaf19..488910a9f700d 100644 --- a/server/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/server/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -12,7 +12,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -23,6 +22,7 @@ import org.elasticsearch.geometry.utils.Geohash; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; +import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper.TimeSeriesIdBuilder; import org.elasticsearch.search.aggregations.bucket.geogrid.GeoTileUtils; import java.io.IOException; @@ -38,8 +38,6 @@ import java.util.Locale; import java.util.Map; import java.util.Objects; -import java.util.SortedMap; -import java.util.TreeMap; import java.util.function.LongSupplier; /** A formatter for values as returned by the fielddata/doc-values APIs. */ @@ -706,38 +704,34 @@ public BytesRef parseBytesRef(Object value) { } Map m = (Map) value; - SortedMap dimensionFields = new TreeMap<>(); + TimeSeriesIdBuilder builder = new TimeSeriesIdBuilder(); for (Map.Entry entry : m.entrySet()) { - BytesRef k = new BytesRef(entry.getKey().toString()); + String f = entry.getKey().toString(); Object v = entry.getValue(); - BytesReference bytes; if (v instanceof String s) { - bytes = TimeSeriesIdFieldMapper.encodeTsidValue(s); + builder.addString(f, s); } else if (v instanceof Long || v instanceof Integer) { Long l = Long.valueOf(v.toString()); // For a long encoded number, we must check if the number can be the encoded value // of an unsigned_long. Number ul = (Number) UNSIGNED_LONG_SHIFTED.format(l); if (l == ul) { - bytes = TimeSeriesIdFieldMapper.encodeTsidValue(l); + builder.addLong(f, l); } else { long ll = UNSIGNED_LONG_SHIFTED.parseLong(String.valueOf(l), false, () -> 0L); - bytes = TimeSeriesIdFieldMapper.encodeTsidUnsignedLongValue(ll); + builder.addUnsignedLong(f, ll); } } else if (v instanceof BigInteger ul) { long ll = UNSIGNED_LONG_SHIFTED.parseLong(ul.toString(), false, () -> 0L); - bytes = TimeSeriesIdFieldMapper.encodeTsidUnsignedLongValue(ll); + builder.addUnsignedLong(f, ll); } else { throw new IllegalArgumentException("Unexpected value in tsid object [" + v + "]"); } - - assert bytes != null : "Could not parse fields in _tsid field [" + value + "]."; - dimensionFields.put(k, bytes); } try { - return TimeSeriesIdFieldMapper.buildTsidField(dimensionFields).toBytesRef(); + return builder.build().toBytesRef(); } catch (IOException e) { throw new IllegalArgumentException(e); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java index 6ff78b0900238..ad47dee581a1a 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lucene.search.function.ScriptScoreQuery; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.BooleanScriptFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.query.SearchExecutionContext; @@ -319,7 +320,13 @@ public void testDualingQueries() throws IOException { String source = "{\"foo\": " + values + "}"; XContentParser parser = createParser(JsonXContent.jsonXContent, source); SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - DocumentParserContext ctx = new TestDocumentParserContext(MappingLookup.EMPTY, null, null, null, sourceToParse) { + DocumentParserContext ctx = new TestDocumentParserContext( + MappingLookup.EMPTY, + MapperTestCase.createIndexSettings(Version.CURRENT, Settings.EMPTY), + null, + null, + sourceToParse + ) { @Override public XContentParser parser() { return parser; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index 96f51e44c3657..b061346d0dddb 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -368,17 +368,18 @@ public void testDimensionIndexedAndDocvalues() { } public void testDimensionMultiValuedField() throws IOException { - DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> { + XContentBuilder mapping = fieldMapping(b -> { minimalMapping(b); b.field("time_series_dimension", true); - })); + }); + DocumentMapper mapper = randomBoolean() ? createDocumentMapper(mapping) : createTimeSeriesModeDocumentMapper(mapping); Exception e = expectThrows(MapperParsingException.class, () -> mapper.parse(source(b -> b.array("field", "1234", "45678")))); assertThat(e.getCause().getMessage(), containsString("Dimension field [field] cannot be a multi-valued field")); } public void testDimensionExtraLongKeyword() throws IOException { - DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> { + DocumentMapper mapper = createTimeSeriesModeDocumentMapper(fieldMapping(b -> { minimalMapping(b); b.field("time_series_dimension", true); })); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java index 41bd15a4e075a..ccbd596e911d2 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesAggregatorTests.java @@ -17,9 +17,6 @@ import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; @@ -27,6 +24,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; +import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper.TimeSeriesIdBuilder; import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.support.ValuesSourceType; @@ -34,8 +32,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import java.util.SortedMap; -import java.util.TreeMap; import java.util.function.Consumer; import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; @@ -81,15 +77,13 @@ public void testStandAloneTimeSeriesWithSum() throws IOException { public static void writeTS(RandomIndexWriter iw, long timestamp, Object[] dimensions, Object[] metrics) throws IOException { final List fields = new ArrayList<>(); fields.add(new SortedNumericDocValuesField(DataStreamTimestampFieldMapper.DEFAULT_PATH, timestamp)); - final SortedMap dimensionFields = new TreeMap<>(); + final TimeSeriesIdBuilder builder = new TimeSeriesIdBuilder(); for (int i = 0; i < dimensions.length; i += 2) { - final BytesReference reference; - if (dimensions[i + 1] instanceof Number) { - reference = TimeSeriesIdFieldMapper.encodeTsidValue(((Number) dimensions[i + 1]).longValue()); + if (dimensions[i + 1]instanceof Number n) { + builder.addLong(dimensions[i].toString(), n.longValue()); } else { - reference = TimeSeriesIdFieldMapper.encodeTsidValue(dimensions[i + 1].toString()); + builder.addString(dimensions[i].toString(), dimensions[i + 1].toString()); } - dimensionFields.put(new BytesRef(dimensions[i].toString()), reference); } for (int i = 0; i < metrics.length; i += 2) { if (metrics[i + 1] instanceof Integer || metrics[i + 1] instanceof Long) { @@ -100,13 +94,9 @@ public static void writeTS(RandomIndexWriter iw, long timestamp, Object[] dimens fields.add(new DoubleDocValuesField(metrics[i].toString(), (double) metrics[i + 1])); } } - try (BytesStreamOutput out = new BytesStreamOutput()) { - TimeSeriesIdFieldMapper.encodeTsid(out, dimensionFields); - BytesReference timeSeriesId = out.bytes(); - fields.add(new SortedDocValuesField(TimeSeriesIdFieldMapper.NAME, timeSeriesId.toBytesRef())); - } + fields.add(new SortedDocValuesField(TimeSeriesIdFieldMapper.NAME, builder.build().toBytesRef())); // TODO: Handle metrics - iw.addDocument(fields.stream().toList()); + iw.addDocument(fields); } private void timeSeriesTestCase( diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java index 6f9e3649ae679..21aa106ff75c2 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/MapperServiceTestCase.java @@ -126,6 +126,14 @@ protected final DocumentMapper createDocumentMapper(XContentBuilder mappings) th return createMapperService(mappings).documentMapper(); } + protected final DocumentMapper createTimeSeriesModeDocumentMapper(XContentBuilder mappings) throws IOException { + Settings settings = Settings.builder() + .put(IndexSettings.MODE.getKey(), "time_series") + .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "uid") + .build(); + return createMapperService(settings, mappings).documentMapper(); + } + protected final DocumentMapper createDocumentMapper(Version version, XContentBuilder mappings) throws IOException { return createMapperService(version, mappings).documentMapper(); } diff --git a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java index 4eeb644132a3c..b42d561de4d72 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java +++ b/test/framework/src/main/java/org/elasticsearch/index/mapper/TestDocumentParserContext.java @@ -8,6 +8,8 @@ package org.elasticsearch.index.mapper; +import org.elasticsearch.Version; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; @@ -31,7 +33,7 @@ public class TestDocumentParserContext extends DocumentParserContext { * Use with caution as it can cause {@link NullPointerException}s down the line. */ public TestDocumentParserContext() { - super(MappingLookup.EMPTY, null, null, null, null); + super(MappingLookup.EMPTY, MapperTestCase.createIndexSettings(Version.CURRENT, Settings.EMPTY), null, null, null); } /** diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index e1fa36abe0451..2300644128733 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -20,7 +20,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Explicit; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexNumericFieldData; @@ -33,7 +32,6 @@ import org.elasticsearch.index.mapper.SimpleMappedFieldType; import org.elasticsearch.index.mapper.SourceValueFetcher; import org.elasticsearch.index.mapper.TextSearchInfo; -import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; import org.elasticsearch.index.mapper.TimeSeriesParams; import org.elasticsearch.index.mapper.TimeSeriesParams.MetricType; import org.elasticsearch.index.mapper.ValueFetcher; @@ -554,12 +552,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } if (dimension && numericValue != null) { - // We encode the tsid part of the dimension field. However, there is no point - // in encoding the tsid value if we do not generate the _tsid field. - BytesReference bytes = context.getMetadataMapper(TimeSeriesIdFieldMapper.NAME) != null - ? TimeSeriesIdFieldMapper.encodeTsidUnsignedLongValue(numericValue) - : null; - context.doc().addDimensionBytes(fieldType().name(), bytes); + context.getDimensions().addUnsignedLong(fieldType().name(), numericValue); } List fields = new ArrayList<>(); From 58630383b5b10a62d1de19e5111dbe0623bb8e70 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 14 Feb 2022 18:27:43 -0500 Subject: [PATCH 090/167] Skip broken test Like ships passing in the night #83799 and #83837 missed eachother and then crashed together in the master branch causing failures. Ooops. --- .../test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml index a570c1c460863..04b2df3359068 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml @@ -218,8 +218,8 @@ no _tsid in standard indices: --- nested dimensions: - skip: - version: " - 8.0.99" - reason: introduced in 8.1.0 + version: all + reason: Awaits fix https://github.com/elastic/elasticsearch/issues/83915 - do: indices.create: From 64929dc5df696e64ae48432651eed043194c630e Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Mon, 14 Feb 2022 15:22:33 -0800 Subject: [PATCH 091/167] Introduce explicit API for configure test cluster feature flags (#83876) --- .../internal/InternalTestClustersPlugin.java | 5 + .../org/elasticsearch/gradle/Version.java | 3 +- .../testclusters/ElasticsearchCluster.java | 22 ++++- .../testclusters/ElasticsearchNode.java | 93 ++++++++++++++++--- .../TestClusterConfiguration.java | 5 + .../testclusters/TestClustersPlugin.java | 10 +- modules/data-streams/build.gradle | 9 +- modules/reindex/build.gradle | 5 +- qa/full-cluster-restart/build.gradle | 8 +- qa/mixed-cluster/build.gradle | 12 +-- qa/multi-cluster-search/build.gradle | 6 +- qa/rolling-upgrade/build.gradle | 15 +-- qa/smoke-test-multinode/build.gradle | 8 +- rest-api-spec/build.gradle | 7 +- .../org/elasticsearch/rollup/RollupV2.java | 5 +- x-pack/docs/build.gradle | 5 +- x-pack/plugin/build.gradle | 9 +- .../plugin/ccr/qa/multi-cluster/build.gradle | 5 +- x-pack/plugin/ilm/build.gradle | 6 +- x-pack/plugin/ilm/qa/multi-node/build.gradle | 7 +- .../plugin/mapper-unsigned-long/build.gradle | 5 +- x-pack/plugin/rollup/build.gradle | 4 +- x-pack/plugin/rollup/qa/rest/build.gradle | 4 +- .../qa/operator-privileges-tests/build.gradle | 7 +- .../plugin/security/qa/profile/build.gradle | 4 +- .../qa/full-cluster-restart/build.gradle | 9 +- .../build.gradle | 7 +- x-pack/qa/full-cluster-restart/build.gradle | 9 +- x-pack/qa/runtime-fields/build.gradle | 9 +- .../qa/xpack-prefix-rest-compat/build.gradle | 6 +- 30 files changed, 199 insertions(+), 110 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java index 1d0eff0b3aa70..cc8348d424e58 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalTestClustersPlugin.java @@ -8,6 +8,7 @@ package org.elasticsearch.gradle.internal; +import org.elasticsearch.gradle.VersionProperties; import org.elasticsearch.gradle.internal.info.BuildParams; import org.elasticsearch.gradle.testclusters.TestClustersPlugin; import org.gradle.api.Plugin; @@ -31,6 +32,10 @@ public void apply(Project project) { project.getRootProject().getPluginManager().apply(InternalReaperPlugin.class); TestClustersPlugin testClustersPlugin = project.getPlugins().apply(TestClustersPlugin.class); testClustersPlugin.setRuntimeJava(providerFactory.provider(() -> BuildParams.getRuntimeJavaHome())); + testClustersPlugin.setIsReleasedVersion( + version -> (version.equals(VersionProperties.getElasticsearchVersion()) && BuildParams.isSnapshotBuild() == false) + || BuildParams.getBwcVersions().unreleasedInfo(version) == null + ); } } diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/Version.java b/build-tools/src/main/java/org/elasticsearch/gradle/Version.java index dfa8be295a8f7..9367b38548b6c 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/Version.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/Version.java @@ -7,6 +7,7 @@ */ package org.elasticsearch.gradle; +import java.io.Serializable; import java.util.Objects; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -14,7 +15,7 @@ /** * Encapsulates comparison and printing logic for an x.y.z version. */ -public final class Version implements Comparable { +public final class Version implements Comparable, Serializable { private final int major; private final int minor; private final int revision; diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java index 5d76824a81a0c..e956895f34bce 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchCluster.java @@ -10,6 +10,7 @@ import org.elasticsearch.gradle.FileSupplier; import org.elasticsearch.gradle.PropertyNormalization; import org.elasticsearch.gradle.ReaperService; +import org.elasticsearch.gradle.Version; import org.gradle.api.Named; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Project; @@ -61,6 +62,7 @@ public class ElasticsearchCluster implements TestClusterConfiguration, Named { private final ArchiveOperations archiveOperations; private final ExecOperations execOperations; private final Provider runtimeJava; + private final Function isReleasedVersion; private int nodeIndex = 0; public ElasticsearchCluster( @@ -73,7 +75,8 @@ public ElasticsearchCluster( ExecOperations execOperations, FileOperations fileOperations, File workingDirBase, - Provider runtimeJava + Provider runtimeJava, + Function isReleasedVersion ) { this.path = path; this.clusterName = clusterName; @@ -85,6 +88,7 @@ public ElasticsearchCluster( this.fileOperations = fileOperations; this.workingDirBase = workingDirBase; this.runtimeJava = runtimeJava; + this.isReleasedVersion = isReleasedVersion; this.nodes = project.container(ElasticsearchNode.class); this.nodes.add( new ElasticsearchNode( @@ -98,7 +102,8 @@ public ElasticsearchCluster( execOperations, fileOperations, workingDirBase, - runtimeJava + runtimeJava, + isReleasedVersion ) ); @@ -131,7 +136,8 @@ public void setNumberOfNodes(int numberOfNodes) { execOperations, fileOperations, workingDirBase, - runtimeJava + runtimeJava, + isReleasedVersion ) ); } @@ -401,6 +407,16 @@ public void rolesFile(File rolesYml) { nodes.all(node -> node.rolesFile(rolesYml)); } + @Override + public void requiresFeature(String feature, Version from) { + nodes.all(node -> node.requiresFeature(feature, from)); + } + + @Override + public void requiresFeature(String feature, Version from, Version until) { + nodes.all(node -> node.requiresFeature(feature, from, until)); + } + private void writeUnicastHostsFiles() { String unicastUris = nodes.stream().flatMap(node -> node.getAllTransportPortURI().stream()).collect(Collectors.joining("\n")); nodes.forEach(node -> { diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 9704fce7b929d..3ef499d4ef814 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -148,6 +148,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { private final LazyPropertyList extraJarConfigurations = new LazyPropertyList<>("Extra jar files", this); private final List> credentials = new ArrayList<>(); private final List roleFiles = new ArrayList<>(); + private final List featureFlags = new ArrayList<>(); final LinkedHashMap defaultConfig = new LinkedHashMap<>(); private final Path confPathRepo; @@ -159,6 +160,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { private final Path esStdinFile; private final Path tmpDir; private final Provider runtimeJava; + private final Function isReleasedVersion; private int currentDistro = 0; private TestDistribution testDistribution; @@ -185,7 +187,8 @@ public class ElasticsearchNode implements TestClusterConfiguration { ExecOperations execOperations, FileOperations fileOperations, File workingDirBase, - Provider runtimeJava + Provider runtimeJava, + Function isReleasedVersion ) { this.clusterName = clusterName; this.path = path; @@ -197,6 +200,7 @@ public class ElasticsearchNode implements TestClusterConfiguration { this.execOperations = execOperations; this.fileOperations = fileOperations; this.runtimeJava = runtimeJava; + this.isReleasedVersion = isReleasedVersion; workingDir = workingDirBase.toPath().resolve(safeName(name)).toAbsolutePath(); confPathRepo = workingDir.resolve("repo"); configFile = workingDir.resolve("config/elasticsearch.yml"); @@ -773,6 +777,16 @@ public void rolesFile(File rolesYml) { roleFiles.add(rolesYml); } + @Override + public void requiresFeature(String feature, Version from) { + featureFlags.add(new FeatureFlag(feature, from, null)); + } + + @Override + public void requiresFeature(String feature, Version from, Version until) { + featureFlags.add(new FeatureFlag(feature, from, until)); + } + private void runElasticsearchBinScriptWithInput(String input, String tool, CharSequence... args) { if (Files.exists(getDistroDir().resolve("bin").resolve(tool)) == false && Files.exists(getDistroDir().resolve("bin").resolve(tool + ".bat")) == false) { @@ -820,19 +834,30 @@ private Map getESEnvironment() { defaultEnv.put("ES_PATH_CONF", configFile.getParent().toString()); String systemPropertiesString = ""; if (systemProperties.isEmpty() == false) { - systemPropertiesString = " " - + systemProperties.entrySet() - .stream() - .map(entry -> "-D" + entry.getKey() + "=" + entry.getValue()) - // ES_PATH_CONF is also set as an environment variable and for a reference to ${ES_PATH_CONF} - // to work ES_JAVA_OPTS, we need to make sure that ES_PATH_CONF before ES_JAVA_OPTS. Instead, - // we replace the reference with the actual value in other environment variables - .map(p -> p.replace("${ES_PATH_CONF}", configFile.getParent().toString())) - .collect(Collectors.joining(" ")); + systemPropertiesString = " " + systemProperties.entrySet().stream().peek(entry -> { + if (entry.getKey().contains("feature_flag")) { + throw new TestClustersException("Invalid system property `" + entry.getKey() + "`. Use `requiresFeature` instead."); + } + }) + .map(entry -> "-D" + entry.getKey() + "=" + entry.getValue()) + // ES_PATH_CONF is also set as an environment variable and for a reference to ${ES_PATH_CONF} + // to work ES_JAVA_OPTS, we need to make sure that ES_PATH_CONF before ES_JAVA_OPTS. Instead, + // we replace the reference with the actual value in other environment variables + .map(p -> p.replace("${ES_PATH_CONF}", configFile.getParent().toString())) + .collect(Collectors.joining(" ")); } if (systemProperties.containsKey("io.netty.leakDetection.level") == false) { systemPropertiesString = systemPropertiesString + " -Dio.netty.leakDetection.level=paranoid"; } + + String featureFlagsString = ""; + if (featureFlags.isEmpty() == false && isReleasedVersion.apply(getVersion())) { + featureFlagsString = featureFlags.stream() + .filter(f -> getVersion().onOrAfter(f.getFrom()) && (f.getUntil() == null || getVersion().before(f.getUntil()))) + .map(f -> "-D" + f.getFeature() + "=true") + .collect(Collectors.joining(" ")); + } + String jvmArgsString = ""; if (jvmArgs.isEmpty() == false) { jvmArgsString = " " + jvmArgs.stream().peek(argument -> { @@ -846,8 +871,19 @@ private Map getESEnvironment() { String heapSize = System.getProperty("tests.heap.size", "512m"); defaultEnv.put( "ES_JAVA_OPTS", - "-Xms" + heapSize + " -Xmx" + heapSize + " -ea -esa " + systemPropertiesString + " " + jvmArgsString + " " + - // Support passing in additional JVM arguments + "-Xms" + + heapSize + + " -Xmx" + + heapSize + + " -ea -esa " + + systemPropertiesString + + " " + + featureFlagsString + + " " + + jvmArgsString + + " " + + + // Support passing in additional JVM arguments System.getProperty("tests.jvm.argline", "") ); defaultEnv.put("ES_TMPDIR", tmpDir.toString()); @@ -1466,6 +1502,11 @@ public List getExtraConfigFiles() { return extraConfigFiles.getNormalizedCollection(); } + @Nested + public List getFeatureFlags() { + return featureFlags; + } + @Override @Internal public boolean isProcessAlive() { @@ -1599,6 +1640,34 @@ public CharSequence[] getArgs() { } } + private static class FeatureFlag { + private final String feature; + private final Version from; + private final Version until; + + public FeatureFlag(String feature, Version from, Version until) { + this.feature = feature; + this.from = from; + this.until = until; + } + + @Input + public String getFeature() { + return feature; + } + + @Input + public Version getFrom() { + return from; + } + + @Input + @Optional + public Version getUntil() { + return until; + } + } + private static class LinkCreationException extends UncheckedIOException { LinkCreationException(String message, IOException cause) { super(message, cause); diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java index 1d4a377cb302d..126890629e60b 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClusterConfiguration.java @@ -9,6 +9,7 @@ import org.elasticsearch.gradle.FileSupplier; import org.elasticsearch.gradle.PropertyNormalization; +import org.elasticsearch.gradle.Version; import org.gradle.api.file.FileCollection; import org.gradle.api.file.RegularFile; import org.gradle.api.logging.Logging; @@ -97,6 +98,10 @@ public interface TestClusterConfiguration { void rolesFile(File rolesYml); + void requiresFeature(String feature, Version from); + + void requiresFeature(String feature, Version from, Version until); + String getHttpSocketURI(); String getTransportPortURI(); diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java index a46453efbc942..2253498b47f72 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/TestClustersPlugin.java @@ -10,6 +10,7 @@ import org.elasticsearch.gradle.DistributionDownloadPlugin; import org.elasticsearch.gradle.ReaperPlugin; import org.elasticsearch.gradle.ReaperService; +import org.elasticsearch.gradle.Version; import org.elasticsearch.gradle.util.GradleUtils; import org.gradle.api.NamedDomainObjectContainer; import org.gradle.api.Plugin; @@ -30,6 +31,7 @@ import org.gradle.process.ExecOperations; import java.io.File; +import java.util.function.Function; import javax.inject.Inject; @@ -45,6 +47,7 @@ public class TestClustersPlugin implements Plugin { private static final Logger logger = Logging.getLogger(TestClustersPlugin.class); private final ProviderFactory providerFactory; private Provider runtimeJavaProvider; + private Function isReleasedVersion = v -> true; @Inject protected FileSystemOperations getFileSystemOperations() { @@ -75,6 +78,10 @@ public void setRuntimeJava(Provider runtimeJava) { this.runtimeJavaProvider = runtimeJava; } + public void setIsReleasedVersion(Function isReleasedVersion) { + this.isReleasedVersion = isReleasedVersion; + } + @Override public void apply(Project project) { project.getPlugins().apply(DistributionDownloadPlugin.class); @@ -124,7 +131,8 @@ private NamedDomainObjectContainer createTestClustersConta getExecOperations(), getFileOperations(), new File(project.getBuildDir(), "testclusters"), - runtimeJavaProvider + runtimeJavaProvider, + isReleasedVersion ); }); project.getExtensions().add(EXTENSION_NAME, container); diff --git a/modules/data-streams/build.gradle b/modules/data-streams/build.gradle index 9faa8036f1cc3..a8afad52077f4 100644 --- a/modules/data-streams/build.gradle +++ b/modules/data-streams/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.test-with-dependencies' @@ -30,9 +31,7 @@ testClusters.configureEach { setting 'xpack.security.enabled', 'true' keystore 'bootstrap.password', 'x-pack-test-password' user username: "x_pack_rest_user", password: "x-pack-test-password" - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } testClusters.matching { it.name == "javaRestTest" }.configureEach { @@ -40,9 +39,7 @@ testClusters.matching { it.name == "javaRestTest" }.configureEach { setting 'xpack.security.enabled', 'false' // disable ILM history, since it disturbs tests using _all setting 'indices.lifecycle.history_index_enabled', 'false' - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } if (BuildParams.inFipsJvm){ diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index 17265a428a29f..ac839731b7746 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -9,6 +9,7 @@ import org.apache.tools.ant.taskdefs.condition.Os import org.elasticsearch.gradle.Architecture import org.elasticsearch.gradle.OS +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.AntFixture import org.elasticsearch.gradle.transform.UnzipTransform @@ -32,9 +33,7 @@ testClusters.configureEach { module ':modules:lang-painless' // Whitelist reindexing from the local node so we can test reindex-from-remote. setting 'reindex.remote.whitelist', '127.0.0.1:*' - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } dependencies { diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle index afe83cb7628a8..a3af45b43363e 100644 --- a/qa/full-cluster-restart/build.gradle +++ b/qa/full-cluster-restart/build.gradle @@ -7,6 +7,7 @@ */ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -28,9 +29,7 @@ BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> setting 'indices.memory.shard_inactive_time', '60m' setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}" setting 'xpack.security.enabled', 'false' - if (BuildParams.isSnapshotBuild() == false && bwcVersion.toString() == project.version) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } tasks.register("${baseName}#oldClusterTest", StandaloneRestIntegTestTask) { @@ -47,9 +46,6 @@ BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> useCluster baseCluster dependsOn "${baseName}#oldClusterTest" doFirst { - if (BuildParams.isSnapshotBuild() == false) { - baseCluster.get().systemProperty 'es.index_mode_feature_flag_registered', 'true' - } baseCluster.get().goToNextVersion() if (bwcVersion.before(BuildParams.bwcVersions.minimumWireCompatibleVersion)) { // When doing a full cluster restart of older versions we actually have to upgrade twice. First to 7.last, then to the current version. diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index 53b179d600763..9fa45187f90ff 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ + +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -31,6 +33,8 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> numberOfNodes = 4 setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}" setting 'xpack.security.enabled', 'false' + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'es.random_sampler_feature_flag_registered', Version.fromString("8.1.0") } tasks.register("${baseName}#mixedClusterTest", StandaloneRestIntegTestTask) { @@ -41,18 +45,10 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> // Getting the endpoints causes a wait for the cluster println "Test cluster endpoints are: ${-> baseCluster.get().allHttpSocketURI.join(",")}" println "Upgrading one node to create a mixed cluster" - if (BuildParams.isSnapshotBuild() == false) { - baseCluster.get().nodes."${baseName}-0".systemProperty 'es.index_mode_feature_flag_registered', 'true' - baseCluster.get().nodes."${baseName}-0".systemProperty 'es.random_sampler_feature_flag_registered', 'true' - } baseCluster.get().nextNodeToNextVersion() // Getting the endpoints causes a wait for the cluster println "Upgrade complete, endpoints are: ${-> baseCluster.get().allHttpSocketURI.join(",")}" println "Upgrading another node to create a mixed cluster" - if (BuildParams.isSnapshotBuild() == false) { - baseCluster.get().nodes."${baseName}-1".systemProperty 'es.index_mode_feature_flag_registered', 'true' - baseCluster.get().nodes."${baseName}-1".systemProperty 'es.random_sampler_feature_flag_registered', 'true' - } baseCluster.get().nextNodeToNextVersion() nonInputProperties.systemProperty('tests.rest.cluster', baseCluster.map(c -> c.allHttpSocketURI.join(","))) nonInputProperties.systemProperty('tests.clustername', baseName) diff --git a/qa/multi-cluster-search/build.gradle b/qa/multi-cluster-search/build.gradle index 818cd917c594a..3f9ba25394649 100644 --- a/qa/multi-cluster-search/build.gradle +++ b/qa/multi-cluster-search/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ + +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask @@ -26,9 +28,7 @@ tasks.register('remote-cluster', RestIntegTestTask) { } testClusters.configureEach { - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } testClusters.matching{ it.name == 'remote-cluster' }.configureEach { diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle index b5c4d5b99f3fc..f1e71b439fdd6 100644 --- a/qa/rolling-upgrade/build.gradle +++ b/qa/rolling-upgrade/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ + +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -36,9 +38,7 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> setting 'repositories.url.allowed_urls', 'http://snapshot.test*' setting 'path.repo', "${buildDir}/cluster/shared/repo/${baseName}" setting 'xpack.security.enabled', 'false' - if (BuildParams.isSnapshotBuild() == false && bwcVersion.toString() == project.version) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } String oldVersion = bwcVersion.toString() @@ -60,9 +60,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> dependsOn "${baseName}#oldClusterTest" useCluster baseCluster doFirst { - if (BuildParams.isSnapshotBuild() == false) { - baseCluster.get().nodes."${baseName}-0".systemProperty 'es.index_mode_feature_flag_registered', 'true' - } baseCluster.get().nextNodeToNextVersion() } systemProperty 'tests.rest.suite', 'mixed_cluster' @@ -76,9 +73,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> dependsOn "${baseName}#oneThirdUpgradedTest" useCluster baseCluster doFirst { - if (BuildParams.isSnapshotBuild() == false) { - baseCluster.get().nodes."${baseName}-1".systemProperty 'es.index_mode_feature_flag_registered', 'true' - } baseCluster.get().nextNodeToNextVersion() } systemProperty 'tests.rest.suite', 'mixed_cluster' @@ -91,9 +85,6 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> tasks.register("${baseName}#upgradedClusterTest", StandaloneRestIntegTestTask) { dependsOn "${baseName}#twoThirdsUpgradedTest" doFirst { - if (BuildParams.isSnapshotBuild() == false) { - baseCluster.get().nodes."${baseName}-2".systemProperty 'es.index_mode_feature_flag_registered', 'true' - } baseCluster.get().nextNodeToNextVersion() } useCluster testClusters.named(baseName) diff --git a/qa/smoke-test-multinode/build.gradle b/qa/smoke-test-multinode/build.gradle index 368e215235512..26fab744c13b6 100644 --- a/qa/smoke-test-multinode/build.gradle +++ b/qa/smoke-test-multinode/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ + +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-testclusters' @@ -27,10 +29,8 @@ testClusters.matching { it.name == "integTest" }.configureEach { testClusters.configureEach { setting 'xpack.security.enabled', 'false' - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - systemProperty 'es.random_sampler_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'es.random_sampler_feature_flag_registered', Version.fromString("8.1.0") } tasks.named("integTest").configure { diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index 71e735de95476..f49cde09e3eee 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.build' @@ -35,10 +36,8 @@ artifacts { testClusters.configureEach { module ':modules:mapper-extras' - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - systemProperty 'es.random_sampler_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'es.random_sampler_feature_flag_registered', Version.fromString("8.1.0") } tasks.named("test").configure { enabled = false } diff --git a/server/src/main/java/org/elasticsearch/rollup/RollupV2.java b/server/src/main/java/org/elasticsearch/rollup/RollupV2.java index 9c9566452c116..65775dcc58e0a 100644 --- a/server/src/main/java/org/elasticsearch/rollup/RollupV2.java +++ b/server/src/main/java/org/elasticsearch/rollup/RollupV2.java @@ -8,8 +8,11 @@ package org.elasticsearch.rollup; +import org.elasticsearch.Build; + public class RollupV2 { - public static final boolean ROLLUP_V2_FEATURE_FLAG_ENABLED = "true".equals(System.getProperty("es.rollup_v2_feature_flag_enabled")); + public static final boolean ROLLUP_V2_FEATURE_FLAG_ENABLED = Build.CURRENT.isSnapshot() + || "true".equals(System.getProperty("es.rollup_v2_feature_flag_enabled")); public static boolean isEnabled() { return ROLLUP_V2_FEATURE_FLAG_ENABLED; diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 8da90eb9e7bfd..482c94e06195b 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.docs-test' @@ -63,9 +64,7 @@ testClusters.matching { it.name == "integTest" }.configureEach { setting 'xpack.security.authc.realms.saml.saml1.attributes.principal', 'uid' setting 'xpack.security.authc.realms.saml.saml1.attributes.name', 'urn:oid:2.5.4.3' - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.user_profile_feature_flag_enabled', 'true' - } + requiresFeature 'es.user_profile_feature_flag_enabled', Version.fromString("8.1.0") user username: 'test_admin' } diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index 5bf5f256d2169..f79cdc65cdab1 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.util.GradleUtils import org.elasticsearch.gradle.internal.test.RestIntegTestTask @@ -152,11 +153,9 @@ testClusters.configureEach { extraConfigFile nodeCert.name, nodeCert extraConfigFile serviceTokens.name, serviceTokens - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - systemProperty 'es.random_sampler_feature_flag_registered', 'true' - systemProperty 'es.user_profile_feature_flag_enabled', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'es.random_sampler_feature_flag_registered', Version.fromString("8.1.0") + requiresFeature 'es.user_profile_feature_flag_enabled', Version.fromString("8.1.0") } tasks.register('enforceApiSpecsConvention').configure { diff --git a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle index bc0e0d80edc7e..31cdf04f6dc94 100644 --- a/x-pack/plugin/ccr/qa/multi-cluster/build.gradle +++ b/x-pack/plugin/ccr/qa/multi-cluster/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.RestIntegTestTask @@ -66,9 +67,7 @@ testClusters.matching {it.name == "follow-cluster" }.configureEach { testClusters.configureEach { - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } tasks.named("check").configure { dependsOn "follow-cluster" } diff --git a/x-pack/plugin/ilm/build.gradle b/x-pack/plugin/ilm/build.gradle index b3c64ed4cde8e..e03c22953021d 100644 --- a/x-pack/plugin/ilm/build.gradle +++ b/x-pack/plugin/ilm/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.internal.info.BuildParams + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-cluster-test' @@ -20,5 +22,7 @@ dependencies { addQaCheckDependencies() tasks.named("test").configure { - systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' + if (BuildParams.isSnapshotBuild() == false) { + systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' + } } diff --git a/x-pack/plugin/ilm/qa/multi-node/build.gradle b/x-pack/plugin/ilm/qa/multi-node/build.gradle index 4139f24fa4983..f9e2bfb4c2913 100644 --- a/x-pack/plugin/ilm/qa/multi-node/build.gradle +++ b/x-pack/plugin/ilm/qa/multi-node/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.util.GradleUtils import org.elasticsearch.gradle.internal.info.BuildParams @@ -14,7 +15,9 @@ File repoDir = file("$buildDir/testclusters/repo") tasks.named("javaRestTest").configure { /* To support taking index snapshots, we have to set path.repo setting */ systemProperty 'tests.path.repo', repoDir - systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' + if (BuildParams.isSnapshotBuild() == false) { + systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' + } } testClusters.configureEach { @@ -38,7 +41,7 @@ testClusters.configureEach { * cached time. So the policy's action date is always after the snapshot's start. */ setting 'thread_pool.estimated_time_interval', '0' - systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' + requiresFeature 'es.rollup_v2_feature_flag_enabled', Version.fromString("8.0.0") } if (BuildParams.inFipsJvm){ diff --git a/x-pack/plugin/mapper-unsigned-long/build.gradle b/x-pack/plugin/mapper-unsigned-long/build.gradle index e301221112f46..eaff7fe362552 100644 --- a/x-pack/plugin/mapper-unsigned-long/build.gradle +++ b/x-pack/plugin/mapper-unsigned-long/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams /* @@ -50,7 +51,5 @@ if (BuildParams.isSnapshotBuild() == false) { } testClusters.configureEach { - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } diff --git a/x-pack/plugin/rollup/build.gradle b/x-pack/plugin/rollup/build.gradle index 483a84aea9987..dbc09c44f43f0 100644 --- a/x-pack/plugin/rollup/build.gradle +++ b/x-pack/plugin/rollup/build.gradle @@ -22,5 +22,7 @@ dependencies { addQaCheckDependencies() tasks.named("test").configure { - systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' + if (BuildParams.isSnapshotBuild() == false) { + systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' + } } diff --git a/x-pack/plugin/rollup/qa/rest/build.gradle b/x-pack/plugin/rollup/qa/rest/build.gradle index 4d5c23feb9d20..90fee3972dd4d 100644 --- a/x-pack/plugin/rollup/qa/rest/build.gradle +++ b/x-pack/plugin/rollup/qa/rest/build.gradle @@ -4,6 +4,8 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ + +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-yaml-rest-test' @@ -22,8 +24,8 @@ restResources { testClusters.configureEach { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'basic' - systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' setting 'xpack.security.enabled', 'false' + requiresFeature 'es.rollup_v2_feature_flag_enabled', Version.fromString("8.0.0") } if (BuildParams.inFipsJvm){ diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/build.gradle b/x-pack/plugin/security/qa/operator-privileges-tests/build.gradle index bd401da91c3d1..5772db6d11efb 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/build.gradle +++ b/x-pack/plugin/security/qa/operator-privileges-tests/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.Version + apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-java-rest-test' @@ -36,10 +38,9 @@ testClusters.configureEach { setting 'xpack.security.operator_privileges.enabled', "true" setting 'path.repo', repoDir.absolutePath - systemProperty 'es.user_profile_feature_flag_enabled', 'true' + requiresFeature 'es.user_profile_feature_flag_enabled', Version.fromString("8.1.0") + requiresFeature 'es.rollup_v2_feature_flag_enabled', Version.fromString("8.0.0") user username: "test_admin", password: 'x-pack-test-password', role: "superuser" user username: "test_operator", password: 'x-pack-test-password', role: "limited_operator" - - systemProperty 'es.rollup_v2_feature_flag_enabled', 'true' } diff --git a/x-pack/plugin/security/qa/profile/build.gradle b/x-pack/plugin/security/qa/profile/build.gradle index c9105fe7394a4..677c286766972 100644 --- a/x-pack/plugin/security/qa/profile/build.gradle +++ b/x-pack/plugin/security/qa/profile/build.gradle @@ -1,3 +1,5 @@ +import org.elasticsearch.gradle.Version + apply plugin: 'elasticsearch.internal-java-rest-test' dependencies { @@ -19,7 +21,7 @@ testClusters.matching { it.name == 'javaRestTest' }.configureEach { setting 'xpack.security.authc.token.enabled', 'true' setting 'xpack.security.authc.api_key.enabled', 'true' - systemProperty 'es.user_profile_feature_flag_enabled', 'true' + requiresFeature 'es.user_profile_feature_flag_enabled', Version.fromString("8.1.0") user username: "test_admin", password: 'x-pack-test-password' user username: "rac_user", password: 'x-pack-test-password', role: "rac_role" diff --git a/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle b/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle index 23b825a8970b6..429b29bbc9fdb 100644 --- a/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle +++ b/x-pack/plugin/shutdown/qa/full-cluster-restart/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -60,9 +61,8 @@ BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> keystore 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' setting 'xpack.security.authc.api_key.enabled', 'true' - if (BuildParams.isSnapshotBuild() == false && bwcVersion.toString() == project.version) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } tasks.register("${baseName}#oldClusterTest", StandaloneRestIntegTestTask) { @@ -80,9 +80,6 @@ BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> useCluster baseCluster dependsOn "${baseName}#oldClusterTest" doFirst { - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } baseCluster.get().goToNextVersion() if (bwcVersion.before(BuildParams.bwcVersions.minimumWireCompatibleVersion)) { // When doing a full cluster restart of older versions we actually have to upgrade twice. First to 7.last, then to the current version. diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index 77303f88e64eb..552dd2bc94f02 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-testclusters' @@ -31,8 +32,6 @@ testClusters.matching { it.name == "integTest" }.configureEach { setting 'xpack.license.self_generated.type', 'trial' setting 'indices.lifecycle.history_index_enabled', 'false' setting 'xpack.security.autoconfiguration.enabled', 'false' - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - systemProperty 'es.random_sampler_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'es.random_sampler_feature_flag_registered', Version.fromString("8.1.0") } diff --git a/x-pack/qa/full-cluster-restart/build.gradle b/x-pack/qa/full-cluster-restart/build.gradle index 34fdd71678349..3923d439d394d 100644 --- a/x-pack/qa/full-cluster-restart/build.gradle +++ b/x-pack/qa/full-cluster-restart/build.gradle @@ -1,3 +1,4 @@ +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask @@ -60,9 +61,8 @@ BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> keystore 'xpack.security.transport.ssl.secure_key_passphrase', 'testnode' setting 'xpack.security.authc.api_key.enabled', 'true' - if (BuildParams.isSnapshotBuild() == false && bwcVersion.toString() == project.version) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } tasks.register("${baseName}#oldClusterTest", StandaloneRestIntegTestTask) { @@ -83,9 +83,6 @@ BuildParams.bwcVersions.withIndexCompatible { bwcVersion, baseName -> useCluster baseCluster dependsOn "${baseName}#oldClusterTest" doFirst { - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } baseCluster.get().goToNextVersion() if (bwcVersion.before(BuildParams.bwcVersions.minimumWireCompatibleVersion)) { // When doing a full cluster restart of older versions we actually have to upgrade twice. First to 7.last, then to the current version. diff --git a/x-pack/qa/runtime-fields/build.gradle b/x-pack/qa/runtime-fields/build.gradle index 52a3f4c1c864f..e2c73dde88977 100644 --- a/x-pack/qa/runtime-fields/build.gradle +++ b/x-pack/qa/runtime-fields/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ + +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.build' @@ -40,10 +42,9 @@ subprojects { testDistribution = 'DEFAULT' setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'false' - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - systemProperty 'es.random_sampler_feature_flag_registered', 'true' - } + + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") + requiresFeature 'es.random_sampler_feature_flag_registered', Version.fromString("8.1.0") } tasks.named("yamlRestTest").configure { diff --git a/x-pack/qa/xpack-prefix-rest-compat/build.gradle b/x-pack/qa/xpack-prefix-rest-compat/build.gradle index ebeab608dd23a..8b6a81202f951 100644 --- a/x-pack/qa/xpack-prefix-rest-compat/build.gradle +++ b/x-pack/qa/xpack-prefix-rest-compat/build.gradle @@ -6,6 +6,8 @@ * Side Public License, v 1. */ + +import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.info.BuildParams import org.elasticsearch.gradle.internal.test.rest.CopyRestTestsTask @@ -94,9 +96,7 @@ testClusters.configureEach { extraConfigFile nodeCert.name, nodeCert extraConfigFile serviceTokens.name, serviceTokens - if (BuildParams.isSnapshotBuild() == false) { - systemProperty 'es.index_mode_feature_flag_registered', 'true' - } + requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") } // transform (again) the (already) transformed x-pack compatibility tests to test the xpack prefixes From 749ad74d73083d25024756d9d078b1ddd6676b23 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 15 Feb 2022 08:37:12 +0100 Subject: [PATCH 092/167] License checks for archive tier (#83894) Implements enterprise license for archive data. Relates #81210 --- .../plugins/RepositoryPlugin.java | 11 + .../repositories/RepositoriesModule.java | 14 +- .../repositories/RepositoriesService.java | 11 +- .../snapshots/RestoreService.java | 61 +++--- ...ClusterStateServiceRandomUpdatesTests.java | 3 +- .../RepositoriesServiceTests.java | 3 +- .../snapshots/SnapshotResiliencyTests.java | 3 +- .../core/LocalStateCompositeXPackPlugin.java | 11 + .../plugin/old-lucene-versions/build.gradle | 2 + .../lucene/bwc/ArchiveLicenseIntegTests.java | 201 ++++++++++++++++++ .../bwc/LocalStateOldLuceneVersions.java | 32 +++ .../lucene/bwc/ArchiveAllocationDecider.java | 62 ++++++ .../xpack/lucene/bwc/ArchiveUsageTracker.java | 43 ++++ ...ShardsOnInvalidLicenseClusterListener.java | 98 +++++++++ .../xpack/lucene/bwc/OldLuceneVersions.java | 93 +++++++- 15 files changed, 612 insertions(+), 36 deletions(-) create mode 100644 x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java create mode 100644 x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/LocalStateOldLuceneVersions.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDecider.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTracker.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java diff --git a/server/src/main/java/org/elasticsearch/plugins/RepositoryPlugin.java b/server/src/main/java/org/elasticsearch/plugins/RepositoryPlugin.java index 41e0e9b3704cb..476baf1c28f63 100644 --- a/server/src/main/java/org/elasticsearch/plugins/RepositoryPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/RepositoryPlugin.java @@ -8,6 +8,7 @@ package org.elasticsearch.plugins; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.env.Environment; @@ -17,6 +18,7 @@ import java.util.Collections; import java.util.Map; +import java.util.function.Consumer; /** * An extension point for {@link Plugin} implementations to add custom snapshot repositories. @@ -59,4 +61,13 @@ default Map getInternalRepositories( return Collections.emptyMap(); } + /** + * Returns a check that is run on restore. This allows plugins to prevent certain restores from happening. + * + * returns null if no check is provided + */ + default Consumer addPreRestoreCheck() { + return null; + } + } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java index 89992b20fe96f..21de3f2f961c6 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java @@ -8,6 +8,7 @@ package org.elasticsearch.repositories; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; @@ -18,10 +19,12 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Consumer; /** * Sets up classes for Snapshot/Restore. @@ -80,6 +83,14 @@ public RepositoriesModule( } } + List> preRestoreChecks = new ArrayList<>(); + for (RepositoryPlugin repoPlugin : repoPlugins) { + Consumer preRestoreCheck = repoPlugin.addPreRestoreCheck(); + if (preRestoreCheck != null) { + preRestoreChecks.add(preRestoreCheck); + } + } + Settings settings = env.settings(); Map repositoryTypes = Collections.unmodifiableMap(factories); Map internalRepositoryTypes = Collections.unmodifiableMap(internalFactories); @@ -89,7 +100,8 @@ public RepositoriesModule( transportService, repositoryTypes, internalRepositoryTypes, - transportService.getThreadPool() + transportService.getThreadPool(), + preRestoreChecks ); } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index 6d1bcd0a131cd..6b837f20eb045 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -56,6 +56,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -97,13 +98,16 @@ public class RepositoriesService extends AbstractLifecycleComponent implements C private volatile Map repositories = Collections.emptyMap(); private final RepositoriesStatsArchive repositoriesStatsArchive; + private final List> preRestoreChecks; + public RepositoriesService( Settings settings, ClusterService clusterService, TransportService transportService, Map typesRegistry, Map internalTypesRegistry, - ThreadPool threadPool + ThreadPool threadPool, + List> preRestoreChecks ) { this.typesRegistry = typesRegistry; this.internalTypesRegistry = internalTypesRegistry; @@ -122,6 +126,7 @@ public RepositoriesService( REPOSITORIES_STATS_ARCHIVE_MAX_ARCHIVED_STATS.get(settings), threadPool::relativeTimeInMillis ); + this.preRestoreChecks = preRestoreChecks; } /** @@ -776,6 +781,10 @@ private static RepositoryConflictException newRepositoryConflictException(String ); } + public List> getPreRestoreChecks() { + return preRestoreChecks; + } + @Override protected void doStart() { diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index b9e600e228de5..d1b996978aa31 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -1289,9 +1289,11 @@ public ClusterState execute(ClusterState currentState) { final String localNodeId = clusterService.state().nodes().getLocalNodeId(); for (Map.Entry indexEntry : indicesToRestore.entrySet()) { final IndexId index = indexEntry.getValue(); + final IndexMetadata originalIndexMetadata = metadata.index(index.getName()); + repositoriesService.getPreRestoreChecks().forEach(check -> check.accept(originalIndexMetadata)); IndexMetadata snapshotIndexMetadata = updateIndexSettings( snapshot, - metadata.index(index.getName()), + originalIndexMetadata, request.indexSettings(), request.ignoreIndexSettings() ); @@ -1591,39 +1593,40 @@ private IndexMetadata convertLegacyIndex(IndexMetadata snapshotIndexMetadata, Cl if (snapshotIndexMetadata.getCreationVersion().before(Version.fromString("5.0.0"))) { throw new IllegalArgumentException("can't restore an index created before version 5.0.0"); } + IndexMetadata.Builder convertedIndexMetadata = IndexMetadata.builder(snapshotIndexMetadata); MappingMetadata mappingMetadata = snapshotIndexMetadata.mapping(); - Map loadedMappingSource = mappingMetadata.rawSourceAsMap(); - - // store old mapping under _meta/legacy_mappings - Map legacyMapping = new LinkedHashMap<>(); - boolean sourceOnlySnapshot = snapshotIndexMetadata.getSettings().getAsBoolean("index.source_only", false); - if (sourceOnlySnapshot) { - // actual mapping is under "_meta" (but strip type first) - Object sourceOnlyMeta = mappingMetadata.sourceAsMap().get("_meta"); - if (sourceOnlyMeta instanceof Map sourceOnlyMetaMap) { - legacyMapping.put("legacy_mappings", sourceOnlyMetaMap); + if (mappingMetadata != null) { + Map loadedMappingSource = mappingMetadata.rawSourceAsMap(); + + // store old mapping under _meta/legacy_mappings + Map legacyMapping = new LinkedHashMap<>(); + boolean sourceOnlySnapshot = snapshotIndexMetadata.getSettings().getAsBoolean("index.source_only", false); + if (sourceOnlySnapshot) { + // actual mapping is under "_meta" (but strip type first) + Object sourceOnlyMeta = mappingMetadata.sourceAsMap().get("_meta"); + if (sourceOnlyMeta instanceof Map sourceOnlyMetaMap) { + legacyMapping.put("legacy_mappings", sourceOnlyMetaMap); + } + } else { + legacyMapping.put("legacy_mappings", loadedMappingSource); } - } else { - legacyMapping.put("legacy_mappings", loadedMappingSource); - } - Map newMappingSource = new LinkedHashMap<>(); - newMappingSource.put("_meta", legacyMapping); + Map newMappingSource = new LinkedHashMap<>(); + newMappingSource.put("_meta", legacyMapping); - Map newMapping = new LinkedHashMap<>(); - newMapping.put(mappingMetadata.type(), newMappingSource); + Map newMapping = new LinkedHashMap<>(); + newMapping.put(mappingMetadata.type(), newMappingSource); + + convertedIndexMetadata.putMapping(new MappingMetadata(mappingMetadata.type(), newMapping)); + } + + convertedIndexMetadata.settings( + Settings.builder() + .put(snapshotIndexMetadata.getSettings()) + .put(IndexMetadata.SETTING_INDEX_VERSION_COMPATIBILITY.getKey(), clusterState.getNodes().getSmallestNonClientNodeVersion()) + ); // TODO: _routing? Perhaps we don't need to obey any routing here as stuff is read-only anyway and get API will be disabled - return IndexMetadata.builder(snapshotIndexMetadata) - .putMapping(new MappingMetadata(mappingMetadata.type(), newMapping)) - .settings( - Settings.builder() - .put(snapshotIndexMetadata.getSettings()) - .put( - IndexMetadata.SETTING_INDEX_VERSION_COMPATIBILITY.getKey(), - clusterState.getNodes().getSmallestNonClientNodeVersion() - ) - ) - .build(); + return convertedIndexMetadata.build(); } private static IndexMetadata.Builder restoreToCreateNewIndex(IndexMetadata snapshotIndexMetadata, String renamedIndexName) { diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java index 57d82cb834b7f..1aa7902112710 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java @@ -525,7 +525,8 @@ private IndicesClusterStateService createIndicesClusterStateService( transportService, Collections.emptyMap(), Collections.emptyMap(), - threadPool + threadPool, + List.of() ); final PeerRecoveryTargetService recoveryTargetService = new PeerRecoveryTargetService( threadPool, diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java index 9525055b13b2d..04c8a08462896 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java @@ -93,7 +93,8 @@ public void setUp() throws Exception { transportService, typesRegistry, typesRegistry, - threadPool + threadPool, + List.of() ); repositoriesService.start(); } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index b0325195bc17b..9b4a06afa705a 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -1715,7 +1715,8 @@ protected void assertSnapshotOrGenericThread() { } ), emptyMap(), - threadPool + threadPool, + List.of() ); final ActionFilters actionFilters = new ActionFilters(emptySet()); snapshotsService = new SnapshotsService( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index de78339b67492..35dccbb3ef9ed 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -20,6 +20,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.coordination.ElectionStrategy; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; @@ -104,6 +105,7 @@ import java.util.Optional; import java.util.Set; import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.LongSupplier; import java.util.function.Predicate; @@ -568,6 +570,15 @@ public Map getInternalRepositories( return internalRepositories; } + @Override + public Consumer addPreRestoreCheck() { + List> checks = filterPlugins(RepositoryPlugin.class).stream() + .map(RepositoryPlugin::addPreRestoreCheck) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + return checks.isEmpty() ? null : imd -> checks.forEach(c -> c.accept(imd)); + } + @Override public void close() throws IOException { IOUtils.close(plugins); diff --git a/x-pack/plugin/old-lucene-versions/build.gradle b/x-pack/plugin/old-lucene-versions/build.gradle index 22ab9d7bf24ce..e59b68f040f6f 100644 --- a/x-pack/plugin/old-lucene-versions/build.gradle +++ b/x-pack/plugin/old-lucene-versions/build.gradle @@ -1,3 +1,4 @@ +apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-test-artifact' @@ -11,6 +12,7 @@ archivesBaseName = 'x-pack-old-lucene-versions' dependencies { compileOnly project(path: xpackModule('core')) + internalClusterTestImplementation(testArtifact(project(xpackModule('core')))) } addQaCheckDependencies() diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java new file mode 100644 index 0000000000000..0c37eac048853 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java @@ -0,0 +1,201 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.env.Environment; +import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.license.DeleteLicenseAction; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicensesMetadata; +import org.elasticsearch.license.PostStartBasicAction; +import org.elasticsearch.license.PostStartBasicRequest; +import org.elasticsearch.license.PostStartTrialAction; +import org.elasticsearch.license.PostStartTrialRequest; +import org.elasticsearch.license.PostStartTrialResponse; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.RepositoryData; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; +import org.elasticsearch.snapshots.RestoreService; +import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.mockstore.MockRepository; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Map; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.oneOf; + +@ESIntegTestCase.ClusterScope(supportsDedicatedMasters = false, numClientNodes = 0, scope = ESIntegTestCase.Scope.TEST) +public class ArchiveLicenseIntegTests extends AbstractSnapshotIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(LocalStateOldLuceneVersions.class, TestRepositoryPlugin.class, MockRepository.Plugin.class); + } + + public static class TestRepositoryPlugin extends Plugin implements RepositoryPlugin { + public static final String FAKE_VERSIONS_TYPE = "fakeversionsrepo"; + + @Override + public Map getRepositories( + Environment env, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + BigArrays bigArrays, + RecoverySettings recoverySettings + ) { + return Map.of( + FAKE_VERSIONS_TYPE, + metadata -> new FakeVersionsRepo(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings) + ); + } + + // fakes an old index version format to activate license checks + private static class FakeVersionsRepo extends FsRepository { + FakeVersionsRepo( + RepositoryMetadata metadata, + Environment env, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + BigArrays bigArrays, + RecoverySettings recoverySettings + ) { + super(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings); + } + + @Override + public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, SnapshotId snapshotId, IndexId index) + throws IOException { + final IndexMetadata original = super.getSnapshotIndexMetaData(repositoryData, snapshotId, index); + return IndexMetadata.builder(original) + .settings( + Settings.builder() + .put(original.getSettings()) + .put( + IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), + randomBoolean() ? Version.fromString("5.0.0") : Version.fromString("6.0.0") + ) + ) + .build(); + } + } + } + + private static final String repoName = "test-repo"; + private static final String indexName = "test-index"; + private static final String snapshotName = "test-snapshot"; + + @Before + public void createAndRestoreArchive() throws Exception { + createRepository( + repoName, + TestRepositoryPlugin.FAKE_VERSIONS_TYPE, + randomRepositorySettings().put(RestoreService.ALLOW_BWC_INDICES_SETTING.getKey(), true) + ); + createIndex(indexName); + createFullSnapshot(repoName, snapshotName); + + assertAcked(client().admin().indices().prepareDelete(indexName)); + + PostStartTrialRequest request = new PostStartTrialRequest().setType(License.LicenseType.TRIAL.getTypeName()).acknowledge(true); + client().execute(PostStartTrialAction.INSTANCE, request).get(); + } + + public void testFailRestoreOnInvalidLicense() throws Exception { + assertAcked(client().execute(DeleteLicenseAction.INSTANCE, new DeleteLicenseRequest()).get()); + assertAcked(client().execute(PostStartBasicAction.INSTANCE, new PostStartBasicRequest()).get()); + + ensureClusterSizeConsistency(); + ensureClusterStateConsistency(); + + final RestoreSnapshotRequest req = new RestoreSnapshotRequest(repoName, snapshotName).indices(indexName).waitForCompletion(true); + ElasticsearchSecurityException e = expectThrows( + ElasticsearchSecurityException.class, + () -> client().admin().cluster().restoreSnapshot(req).actionGet() + ); + assertThat(e.getMessage(), containsString("current license is non-compliant for [archive]")); + } + + // checks that shards are failed if license becomes invalid after successful restore + public void testShardAllocationOnInvalidLicense() throws Exception { + final RestoreSnapshotRequest req = new RestoreSnapshotRequest(repoName, snapshotName).indices(indexName).waitForCompletion(true); + + final RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster().restoreSnapshot(req).get(); + assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(0)); + ensureGreen(indexName); + + assertAcked(client().execute(DeleteLicenseAction.INSTANCE, new DeleteLicenseRequest()).get()); + assertAcked(client().execute(PostStartBasicAction.INSTANCE, new PostStartBasicRequest()).get()); + + ensureClusterSizeConsistency(); + ensureClusterStateConsistency(); + + // check that shards have been failed as part of invalid license + assertBusy( + () -> assertEquals( + ClusterHealthStatus.RED, + client().admin().cluster().prepareHealth(indexName).get().getIndices().get(indexName).getStatus() + ) + ); + + waitNoPendingTasksOnAll(); + ensureClusterStateConsistency(); + + // add a valid license again + // This is a bit of a hack in tests, as we can't readd a trial license + // We force this by clearing the existing basic license first + updateClusterState( + currentState -> ClusterState.builder(currentState) + .metadata(Metadata.builder(currentState.metadata()).removeCustom(LicensesMetadata.TYPE).build()) + .build() + ); + + waitNoPendingTasksOnAll(); + ensureClusterStateConsistency(); + + PostStartTrialRequest request = new PostStartTrialRequest().setType(License.LicenseType.TRIAL.getTypeName()).acknowledge(true); + final PostStartTrialResponse response = client().execute(PostStartTrialAction.INSTANCE, request).get(); + assertThat( + response.getStatus(), + oneOf( + PostStartTrialResponse.Status.UPGRADED_TO_TRIAL, + // The LicenceService automatically generates a license of {@link LicenceService#SELF_GENERATED_LICENSE_TYPE} type + // if there is no license found in the cluster state (see {@link LicenceService#registerOrUpdateSelfGeneratedLicense). + // Since this test explicitly removes the LicensesMetadata from cluster state it is possible that the self generated + // license is created before the PostStartTrialRequest is acked. + PostStartTrialResponse.Status.TRIAL_ALREADY_ACTIVATED + ) + ); + // check if cluster goes green again after valid license has been put in place + ensureGreen(indexName); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/LocalStateOldLuceneVersions.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/LocalStateOldLuceneVersions.java new file mode 100644 index 0000000000000..e4a6110be7693 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/LocalStateOldLuceneVersions.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; + +import java.nio.file.Path; + +public class LocalStateOldLuceneVersions extends LocalStateCompositeXPackPlugin { + + private final OldLuceneVersions plugin; + + public LocalStateOldLuceneVersions(final Settings settings, final Path configPath) { + super(settings, configPath); + this.plugin = new OldLuceneVersions() { + + @Override + protected XPackLicenseState getLicenseState() { + return LocalStateOldLuceneVersions.this.getLicenseState(); + } + + }; + plugins.add(plugin); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDecider.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDecider.java new file mode 100644 index 0000000000000..fdd3cf1f5f8e5 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDecider.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; + +import java.util.function.BooleanSupplier; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.isArchiveIndex; + +public class ArchiveAllocationDecider extends AllocationDecider { + + static final String NAME = "archive"; + + private final BooleanSupplier hasValidLicenseSupplier; + + public ArchiveAllocationDecider(BooleanSupplier hasValidLicenseSupplier) { + this.hasValidLicenseSupplier = hasValidLicenseSupplier; + } + + @Override + public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + return allowAllocation(allocation.metadata().getIndexSafe(shardRouting.index()), allocation); + } + + @Override + public Decision canAllocate(ShardRouting shardRouting, RoutingAllocation allocation) { + return allowAllocation(allocation.metadata().getIndexSafe(shardRouting.index()), allocation); + } + + @Override + public Decision canAllocate(IndexMetadata indexMetadata, RoutingNode node, RoutingAllocation allocation) { + return allowAllocation(indexMetadata, allocation); + } + + @Override + public Decision canForceAllocatePrimary(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + return allowAllocation(allocation.metadata().getIndexSafe(shardRouting.index()), allocation); + } + + private Decision allowAllocation(IndexMetadata indexMetadata, RoutingAllocation allocation) { + if (isArchiveIndex(indexMetadata.getCreationVersion())) { + if (hasValidLicenseSupplier.getAsBoolean()) { + return allocation.decision(Decision.YES, NAME, "valid license for archive functionality"); + } else { + return allocation.decision(Decision.NO, NAME, "invalid license for archive functionality"); + } + } else { + return allocation.decision(Decision.YES, NAME, "decider only applicable for indices backed by archive functionality"); + } + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTracker.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTracker.java new file mode 100644 index 0000000000000..1dcd658c1666c --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTracker.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.license.XPackLicenseState; + +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.ARCHIVE_FEATURE; + +final class ArchiveUsageTracker implements Runnable { + + private final XPackLicenseState licenseState; + private final Supplier clusterStateSupplier; + + ArchiveUsageTracker(XPackLicenseState licenseState, Supplier clusterStateSupplier) { + this.clusterStateSupplier = clusterStateSupplier; + this.licenseState = licenseState; + } + + @Override + public void run() { + if (hasArchiveIndices(clusterStateSupplier.get())) { + ARCHIVE_FEATURE.check(licenseState); + } + } + + private static boolean hasArchiveIndices(ClusterState state) { + for (IndexMetadata indexMetadata : state.metadata()) { + if (OldLuceneVersions.isArchiveIndex(indexMetadata.getCreationVersion())) { + return true; + } + } + return false; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java new file mode 100644 index 0000000000000..2cf7160518d74 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.store.AlreadyClosedException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.routing.RerouteService; +import org.elasticsearch.common.Priority; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.shard.IndexEventListener; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.license.LicenseStateListener; +import org.elasticsearch.license.XPackLicenseState; + +import java.util.HashSet; +import java.util.Set; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.ARCHIVE_FEATURE; + +public class FailShardsOnInvalidLicenseClusterListener implements LicenseStateListener, IndexEventListener { + + private static final Logger logger = LogManager.getLogger(FailShardsOnInvalidLicenseClusterListener.class); + + private final XPackLicenseState xPackLicenseState; + + private final RerouteService rerouteService; + + final Set shardsToFail = new HashSet<>(); + + private boolean allowed; + + public FailShardsOnInvalidLicenseClusterListener(XPackLicenseState xPackLicenseState, RerouteService rerouteService) { + this.xPackLicenseState = xPackLicenseState; + this.rerouteService = rerouteService; + this.allowed = ARCHIVE_FEATURE.checkWithoutTracking(xPackLicenseState); + xPackLicenseState.addListener(this); + } + + @Override + public synchronized void afterIndexShardStarted(IndexShard indexShard) { + shardsToFail.add(indexShard); + failActiveShardsIfNecessary(); + } + + @Override + public synchronized void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) { + if (indexShard != null) { + shardsToFail.remove(indexShard); + } + } + + @Override + public synchronized void licenseStateChanged() { + final boolean allowed = ARCHIVE_FEATURE.checkWithoutTracking(xPackLicenseState); + if (allowed && this.allowed == false) { + rerouteService.reroute("reroute after license activation", Priority.NORMAL, new ActionListener() { + @Override + public void onResponse(ClusterState clusterState) { + logger.trace("successful reroute after license activation"); + } + + @Override + public void onFailure(Exception e) { + logger.debug("unsuccessful reroute after license activation"); + } + }); + } + this.allowed = allowed; + failActiveShardsIfNecessary(); + } + + private void failActiveShardsIfNecessary() { + assert Thread.holdsLock(this); + if (allowed == false) { + for (IndexShard indexShard : shardsToFail) { + try { + indexShard.failShard("invalid license", null); + } catch (AlreadyClosedException ignored) { + // ignore + } catch (Exception e) { + logger.warn(new ParameterizedMessage("Could not close shard {} due to invalid license", indexShard.shardId()), e); + } + } + shardsToFail.clear(); + } + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java index 40b021f9ea9d8..631de49d4fa1d 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java @@ -10,37 +10,126 @@ import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.Version; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.LicensedFeature; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.lucene.bwc.codecs.BWCCodec; import java.io.IOException; import java.io.UncheckedIOException; +import java.util.Collection; import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.function.Consumer; +import java.util.function.Supplier; -public class OldLuceneVersions extends Plugin implements IndexStorePlugin { +public class OldLuceneVersions extends Plugin implements IndexStorePlugin, ClusterPlugin, RepositoryPlugin { + + public static final LicensedFeature.Momentary ARCHIVE_FEATURE = LicensedFeature.momentary( + null, + "archive", + License.OperationMode.ENTERPRISE + ); + + public static boolean isArchiveIndex(Version version) { + return version.before(Version.CURRENT.minimumIndexCompatibilityVersion()); + } + + private final SetOnce failShardsListener = new SetOnce<>(); + + @Override + public Collection createComponents( + final Client client, + final ClusterService clusterService, + final ThreadPool threadPool, + final ResourceWatcherService resourceWatcherService, + final ScriptService scriptService, + final NamedXContentRegistry xContentRegistry, + final Environment environment, + final NodeEnvironment nodeEnvironment, + final NamedWriteableRegistry registry, + final IndexNameExpressionResolver resolver, + final Supplier repositoriesServiceSupplier + ) { + this.failShardsListener.set(new FailShardsOnInvalidLicenseClusterListener(getLicenseState(), clusterService.getRerouteService())); + if (DiscoveryNode.isMasterNode(environment.settings())) { + // We periodically look through the indices and identify if there are any archive indices, + // then marking the feature as used. We do this on each master node so that if one master fails, the + // continue reporting usage state. + var usageTracker = new ArchiveUsageTracker(getLicenseState(), clusterService::state); + threadPool.scheduleWithFixedDelay(usageTracker, TimeValue.timeValueMinutes(15), ThreadPool.Names.GENERIC); + } + return List.of(); + } + + // overridable by tests + protected XPackLicenseState getLicenseState() { + return XPackPlugin.getSharedLicenseState(); + } + + @Override + public Collection createAllocationDeciders(Settings settings, ClusterSettings clusterSettings) { + return List.of(new ArchiveAllocationDecider(() -> ARCHIVE_FEATURE.checkWithoutTracking(getLicenseState()))); + } @Override public void onIndexModule(IndexModule indexModule) { - if (indexModule.indexSettings().getIndexVersionCreated().before(Version.CURRENT.minimumIndexCompatibilityVersion())) { + if (isArchiveIndex(indexModule.indexSettings().getIndexVersionCreated())) { indexModule.addIndexEventListener(new IndexEventListener() { @Override public void afterFilesRestoredFromRepository(IndexShard indexShard) { convertToNewFormat(indexShard); } }); + + indexModule.addIndexEventListener(failShardsListener.get()); } } + @Override + public Consumer addPreRestoreCheck() { + return indexMetadata -> { + if (isArchiveIndex(indexMetadata.getCreationVersion())) { + if (ARCHIVE_FEATURE.checkWithoutTracking(getLicenseState()) == false) { + throw LicenseUtils.newComplianceException("archive"); + } + } + }; + } + /** * The trick used to allow newer Lucene versions to read older Lucene indices is to convert the old directory to a directory that new * Lucene versions happily operate on. The way newer Lucene versions happily comply with reading older data is to put in place a From 505b0d9451a05d7dc8b9572b7a387f78b53ddb3f Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Tue, 15 Feb 2022 08:43:50 +0100 Subject: [PATCH 093/167] refactor health service creation (#83831) This commit extracts health service creation into a separate method in order to reduce overall service wiring method complexity --- .../java/org/elasticsearch/node/Node.java | 35 ++++++++++--------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index e86ebc4494b11..e2939d47bcebe 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -96,7 +96,6 @@ import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.gateway.MetaStateService; import org.elasticsearch.gateway.PersistedClusterStateService; -import org.elasticsearch.health.HealthIndicatorService; import org.elasticsearch.health.HealthService; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.IndexSettingProviders; @@ -865,7 +864,7 @@ protected Node( metadataCreateIndexService, settingsModule.getIndexScopedSettings() ); - final List> builtinTaskExecutors = Arrays.asList(systemIndexMigrationExecutor); + final List> builtinTaskExecutors = List.of(systemIndexMigrationExecutor); final List> pluginTaskExectors = pluginsService.filterPlugins(PersistentTaskPlugin.class) .stream() .map( @@ -879,10 +878,9 @@ protected Node( ) .flatMap(List::stream) .collect(toList()); - final List> allTasksExectors = Stream.of(pluginTaskExectors, builtinTaskExecutors) - .flatMap(List::stream) - .collect(toList()); - final PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(allTasksExectors); + final PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry( + concatLists(pluginTaskExectors, builtinTaskExecutors) + ); final PersistentTasksClusterService persistentTasksClusterService = new PersistentTasksClusterService( settings, registry, @@ -901,15 +899,7 @@ protected Node( clusterService.getClusterSettings() ); - List serverHealthIndicatorServices = List.of( - new InstanceHasMasterHealthIndicatorService(clusterService), - new RepositoryIntegrityHealthIndicatorService(clusterService) - ); - List pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) - .stream() - .flatMap(plugin -> plugin.getHealthIndicatorServices().stream()) - .toList(); - HealthService healthService = new HealthService(concatLists(serverHealthIndicatorServices, pluginHealthIndicatorServices)); + HealthService healthService = createHealthService(clusterService); modules.add(b -> { b.bind(Node.class).toInstance(this); @@ -1042,6 +1032,18 @@ protected Node( } } + private HealthService createHealthService(ClusterService clusterService) { + var serverHealthIndicatorServices = List.of( + new InstanceHasMasterHealthIndicatorService(clusterService), + new RepositoryIntegrityHealthIndicatorService(clusterService) + ); + var pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) + .stream() + .flatMap(plugin -> plugin.getHealthIndicatorServices().stream()) + .toList(); + return new HealthService(concatLists(serverHealthIndicatorServices, pluginHealthIndicatorServices)); + } + private RecoveryPlannerService getRecoveryPlannerService( ThreadPool threadPool, ClusterService clusterService, @@ -1062,8 +1064,7 @@ private RecoveryPlannerService getRecoveryPlannerService( threadPool, clusterService ); - final RecoveryPlannerPlugin recoveryPlannerPlugin = recoveryPlannerPlugins.get(0); - return recoveryPlannerPlugin.createRecoveryPlannerService(shardSnapshotsService); + return recoveryPlannerPlugins.get(0).createRecoveryPlannerService(shardSnapshotsService); } protected TransportService newTransportService( From 5d8421744aaafa1c9f56ef509b35523db6817997 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Tue, 15 Feb 2022 13:00:52 +0400 Subject: [PATCH 094/167] Fix link to benchmark page (#83887) --- benchmarks/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmarks/README.md b/benchmarks/README.md index a4d238c343c1c..9e86dd49b60f6 100644 --- a/benchmarks/README.md +++ b/benchmarks/README.md @@ -5,7 +5,7 @@ This directory contains the microbenchmark suite of Elasticsearch. It relies on ## Purpose We do not want to microbenchmark everything but the kitchen sink and should typically rely on our -[macrobenchmarks](https://elasticsearch-benchmarks.elastic.co/app/kibana#/dashboard/Nightly-Benchmark-Overview) with +[macrobenchmarks](https://elasticsearch-benchmarks.elastic.co/) with [Rally](http://github.com/elastic/rally). Microbenchmarks are intended to spot performance regressions in performance-critical components. The microbenchmark suite is also handy for ad-hoc microbenchmarks but please remove them again before merging your PR. From 68a04a36cfa2308302df269f5ed2d0b81cb228da Mon Sep 17 00:00:00 2001 From: Lukas Wegmann Date: Tue, 15 Feb 2022 11:10:55 +0100 Subject: [PATCH 095/167] SQL: Replace scroll cursors with point-in-time and search_after (#83381) Resolves #61873 The goal of this PR is to remove the use of the deprecated scroll cursors in SQL. Functionality and APIs should remain the same with one notable difference: The last page of a search hit query used to always include a scroll cursor if it is non-empty. This is no longer the case, if a result set is exhausted, the PIT will be closed and the last page does not include a cursor. Note, PIT can also be used for aggregation and PIVOT queries but this is not in the scope of this PR and will be implemented in a follow up. Additionally, this PR resolves #80523 because the total doc count is no longer required. --- docs/changelog/83381.yaml | 7 + x-pack/plugin/build.gradle | 55 +++-- .../sql/qa/jdbc/JdbcIntegrationTestCase.java | 2 +- .../xpack/sql/qa/mixed_node/SqlCompatIT.java | 55 ++++- .../sql/qa/security/RestSqlSecurityIT.java | 31 ++- .../sql/qa/jdbc/JdbcIntegrationTestCase.java | 2 +- .../xpack/sql/qa/rest/RestSqlTestCase.java | 39 ++- .../xpack/sql/action/SqlCancellationIT.java | 12 +- .../xpack/sql/execution/PlanExecutor.java | 2 +- .../execution/search/CompositeAggCursor.java | 37 +-- .../xpack/sql/execution/search/Querier.java | 158 +++++++----- .../search/SchemaSearchHitRowSet.java | 6 +- .../sql/execution/search/ScrollCursor.java | 176 -------------- .../sql/execution/search/SearchHitCursor.java | 229 ++++++++++++++++++ .../sql/execution/search/SearchHitRowSet.java | 42 ++-- .../xpack/sql/plugin/TextFormatterCursor.java | 4 +- .../xpack/sql/session/Cursor.java | 2 +- .../xpack/sql/session/Cursors.java | 4 +- .../xpack/sql/session/EmptyCursor.java | 2 +- .../xpack/sql/session/ListCursor.java | 2 +- .../xpack/sql/analysis/CancellationTests.java | 36 ++- .../sql/execution/search/QuerierTests.java | 2 +- ...orTests.java => SearchHitCursorTests.java} | 32 +-- .../xpack/sql/plugin/CursorTests.java | 36 +-- .../resources/rest-api-spec/test/sql/sql.yml | 38 +++ 25 files changed, 585 insertions(+), 426 deletions(-) create mode 100644 docs/changelog/83381.yaml delete mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursor.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java rename x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/{ScrollCursorTests.java => SearchHitCursorTests.java} (72%) diff --git a/docs/changelog/83381.yaml b/docs/changelog/83381.yaml new file mode 100644 index 0000000000000..73066155ac6ba --- /dev/null +++ b/docs/changelog/83381.yaml @@ -0,0 +1,7 @@ +pr: 83381 +summary: Replace scroll cursors with point-in-time and `search_after` +area: SQL +type: enhancement +issues: + - 61873 + - 80523 diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index f79cdc65cdab1..e184c9ffb2567 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -1,8 +1,8 @@ import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.info.BuildParams -import org.elasticsearch.gradle.util.GradleUtils import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' @@ -77,16 +77,19 @@ tasks.named("yamlRestTest").configure { } tasks.named("yamlRestTestV7CompatTest").configure { - systemProperty 'tests.rest.blacklist', [ - 'unsigned_long/50_script_values/Scripted sort values', - 'unsigned_long/50_script_values/script_score query', - 'unsigned_long/50_script_values/Script query', - 'data_stream/140_data_stream_aliases/Fix IndexNotFoundException error when handling remove alias action', - ].join(',') + systemProperty 'tests.rest.blacklist', [ + 'unsigned_long/50_script_values/Scripted sort values', + 'unsigned_long/50_script_values/script_score query', + 'unsigned_long/50_script_values/Script query', + 'data_stream/140_data_stream_aliases/Fix IndexNotFoundException error when handling remove alias action', + ].join(',') } -tasks.named("yamlRestTestV7CompatTransform").configure{ task -> - task.skipTest("vectors/10_dense_vector_basic/Deprecated function signature", "to support it, it would require to almost revert back the #48725 and complicate the code" ) +tasks.named("yamlRestTestV7CompatTransform").configure { task -> + task.skipTest( + "vectors/10_dense_vector_basic/Deprecated function signature", + "to support it, it would require to almost revert back the #48725 and complicate the code" + ) task.skipTest("vectors/30_sparse_vector_basic/Cosine Similarity", "not supported for compatibility") task.skipTest("vectors/30_sparse_vector_basic/Deprecated function signature", "not supported for compatibility") task.skipTest("vectors/30_sparse_vector_basic/Dot Product", "not supported for compatibility") @@ -94,26 +97,42 @@ tasks.named("yamlRestTestV7CompatTransform").configure{ task -> task.skipTest("vectors/35_sparse_vector_l1l2/L2 norm", "not supported for compatibility") task.skipTest("vectors/40_sparse_vector_special_cases/Dimensions can be sorted differently", "not supported for compatibility") task.skipTest("vectors/40_sparse_vector_special_cases/Documents missing a vector field", "not supported for compatibility") - task.skipTest("vectors/40_sparse_vector_special_cases/Query vector has different dimensions from documents' vectors", "not supported for compatibility") + task.skipTest( + "vectors/40_sparse_vector_special_cases/Query vector has different dimensions from documents' vectors", + "not supported for compatibility" + ) task.skipTest("vectors/40_sparse_vector_special_cases/Sparse vectors should error with dense vector functions", "not supported for compatibility") task.skipTest("vectors/40_sparse_vector_special_cases/Vectors of different dimensions and data types", "not supported for compatibility") task.skipTest("vectors/50_vector_stats/Usage stats on vector fields", "not supported for compatibility") - task.skipTest("roles/30_prohibited_role_query/Test use prohibited query inside role query", "put role request with a term lookup (deprecated) and type. Requires validation in REST layer") + task.skipTest( + "roles/30_prohibited_role_query/Test use prohibited query inside role query", + "put role request with a term lookup (deprecated) and type. Requires validation in REST layer" + ) task.skipTest("ml/jobs_crud/Test create job with delimited format", "removing undocumented functionality") task.skipTest("ml/datafeeds_crud/Test update datafeed to point to missing job", "behaviour change #44752 - not allowing to update datafeed job_id") - task.skipTest("ml/datafeeds_crud/Test update datafeed to point to different job", "behaviour change #44752 - not allowing to update datafeed job_id") - task.skipTest("ml/datafeeds_crud/Test update datafeed to point to job already attached to another datafeed", "behaviour change #44752 - not allowing to update datafeed job_id") + task.skipTest( + "ml/datafeeds_crud/Test update datafeed to point to different job", + "behaviour change #44752 - not allowing to update datafeed job_id" + ) + task.skipTest( + "ml/datafeeds_crud/Test update datafeed to point to job already attached to another datafeed", + "behaviour change #44752 - not allowing to update datafeed job_id" + ) task.skipTest("rollup/delete_job/Test basic delete_job", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/delete_job/Test delete job twice", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/delete_job/Test delete running job", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/get_jobs/Test basic get_jobs", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/put_job/Test basic put_job", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/start_job/Test start job twice", "rollup was an experimental feature, also see #41227") - task.skipTest("ml/trained_model_cat_apis/Test cat trained models", "A type field was added to cat.ml_trained_models #73660, this is a backwards compatible change. Still this is a cat api, and we don't support them with rest api compatibility. (the test would be very hard to transform too)") + task.skipTest( + "ml/trained_model_cat_apis/Test cat trained models", + "A type field was added to cat.ml_trained_models #73660, this is a backwards compatible change. Still this is a cat api, and we don't support them with rest api compatibility. (the test would be very hard to transform too)" + ) task.skipTest("indices.freeze/30_usage/Usage stats on frozen indices", "#70192 -- the freeze index API is removed from 8.0") task.skipTest("indices.freeze/20_stats/Translog stats on frozen indices", "#70192 -- the freeze index API is removed from 8.0") task.skipTest("indices.freeze/10_basic/Basic", "#70192 -- the freeze index API is removed from 8.0") task.skipTest("indices.freeze/10_basic/Test index options", "#70192 -- the freeze index API is removed from 8.0") + task.skipTest("sql/sql/Paging through results", "scrolling through search hit queries no longer produces empty last page in 8.2") task.skipTest("service_accounts/10_basic/Test get service accounts", "new service accounts are added") task.replaceValueInMatch("_type", "_doc") @@ -121,10 +140,12 @@ tasks.named("yamlRestTestV7CompatTransform").configure{ task -> task.addAllowedWarningRegexForTest("Including \\[accept_enterprise\\] in get license.*", "Installing enterprise license") task.addAllowedWarningRegex("bucket_span .* is not an integral .* of the number of seconds in 1d.* This is now deprecated.*") - task.replaceValueTextByKeyValue("catch", + task.replaceValueTextByKeyValue( + "catch", 'bad_request', '/It is no longer possible to freeze indices, but existing frozen indices can still be unfrozen/', - "Cannot freeze write index for data stream") + "Cannot freeze write index for data stream" + ) } diff --git a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java index 34d91f69d8fd4..16bd33ca31d74 100644 --- a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java +++ b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java @@ -31,7 +31,7 @@ public abstract class JdbcIntegrationTestCase extends ESRestTestCase { @After public void checkSearchContent() throws IOException { - // Some context might linger due to fire and forget nature of scroll cleanup + // Some context might linger due to fire and forget nature of PIT cleanup assertNoSearchContexts(); } diff --git a/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java b/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java index 3cc5383defed8..5d93943f4223a 100644 --- a/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java +++ b/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentHelper; @@ -21,6 +22,7 @@ import org.elasticsearch.xpack.ql.TestNode; import org.elasticsearch.xpack.ql.TestNodes; import org.elasticsearch.xpack.sql.qa.rest.BaseRestSqlTestCase; +import org.hamcrest.Matchers; import org.junit.AfterClass; import org.junit.Before; @@ -111,8 +113,7 @@ private void testNullsOrderWithMissingOrderSupport(RestClient client) throws IOE assertNull(result.get(2)); } - @SuppressWarnings("unchecked") - private List runOrderByNullsLastQuery(RestClient queryClient) throws IOException { + private void indexDocs() throws IOException { Request putIndex = new Request("PUT", "/test"); putIndex.setJsonEntity(""" {"settings":{"index":{"number_of_shards":3}}}"""); @@ -124,17 +125,19 @@ private List runOrderByNullsLastQuery(RestClient queryClient) throws IO for (String doc : Arrays.asList("{\"int\":1,\"kw\":\"foo\"}", "{\"int\":2,\"kw\":\"bar\"}", "{\"kw\":\"bar\"}")) { bulk.append("{\"index\":{}}\n").append(doc).append("\n"); } + indexDocs.setJsonEntity(bulk.toString()); client().performRequest(indexDocs); + } + + @SuppressWarnings("unchecked") + private List runOrderByNullsLastQuery(RestClient queryClient) throws IOException { + indexDocs(); Request query = new Request("POST", "_sql"); query.setJsonEntity(sqlQueryEntityWithOptionalMode("SELECT int FROM test GROUP BY 1 ORDER BY 1 NULLS LAST", bwcVersion)); - Response queryResponse = queryClient.performRequest(query); - - assertEquals(200, queryResponse.getStatusLine().getStatusCode()); + Map result = performRequestAndReadBodyAsJson(queryClient, query); - InputStream content = queryResponse.getEntity().getContent(); - Map result = XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); List> rows = (List>) result.get("rows"); return rows.stream().map(row -> (Integer) row.get(0)).collect(Collectors.toList()); } @@ -156,4 +159,42 @@ public static String sqlQueryEntityWithOptionalMode(String query, Version bwcVer return Strings.toString(json); } + public void testCursorFromOldNodeFailsOnNewNode() throws IOException { + assertCursorNotCompatibleAcrossVersions(bwcVersion, oldNodesClient, Version.CURRENT, newNodesClient); + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/83726") + public void testCursorFromNewNodeFailsOnOldNode() throws IOException { + assertCursorNotCompatibleAcrossVersions(Version.CURRENT, newNodesClient, bwcVersion, oldNodesClient); + } + + private void assertCursorNotCompatibleAcrossVersions(Version version1, RestClient client1, Version version2, RestClient client2) + throws IOException { + indexDocs(); + + Request req = new Request("POST", "_sql"); + // GROUP BY queries always return a cursor + req.setJsonEntity(sqlQueryEntityWithOptionalMode("SELECT int FROM test GROUP BY 1", bwcVersion)); + Map json = performRequestAndReadBodyAsJson(client1, req); + String cursor = (String) json.get("cursor"); + assertThat(cursor, Matchers.not(Matchers.emptyString())); + + Request scrollReq = new Request("POST", "_sql"); + scrollReq.setJsonEntity("{\"cursor\": \"%s\"}".formatted(cursor)); + ResponseException exception = expectThrows(ResponseException.class, () -> client2.performRequest(scrollReq)); + + assertThat( + exception.getMessage(), + Matchers.containsString("Unsupported cursor version [" + version1 + "], expected [" + version2 + "]") + ); + } + + private Map performRequestAndReadBodyAsJson(RestClient client, Request request) throws IOException { + Response response = client.performRequest(request); + assertEquals(200, response.getStatusLine().getStatusCode()); + try (InputStream content = response.getEntity().getContent()) { + return XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); + } + } + } diff --git a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java index b27d6c2138a01..ab7b594e10b21 100644 --- a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java +++ b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java @@ -281,18 +281,27 @@ protected AuditLogAsserter createAuditLogAsserter() { } /** - * Test the hijacking a scroll fails. This test is only implemented for - * REST because it is the only API where it is simple to hijack a scroll. + * Test the hijacking a cursor fails. This test is only implemented for + * REST because it is the only API where it is simple to hijack a cursor. * It should exercise the same code as the other APIs but if we were truly * paranoid we'd hack together something to test the others as well. */ - public void testHijackScrollFails() throws Exception { - createUser("full_access", "rest_minimal"); + public void testHijackCursorFails() throws Exception { + createUser("no_read", "read_nothing"); final String mode = randomMode(); + final String query = randomFrom( + List.of( + "SELECT * FROM test", + "SELECT a FROM test GROUP BY a", + "SELECT MAX(a) FROM test GROUP BY a ORDER BY 1", + "SHOW COLUMNS IN test" + ) + ); + Map adminResponse = RestActions.runSql( null, - new StringEntity(query("SELECT * FROM test").mode(mode).fetchSize(1).toString(), ContentType.APPLICATION_JSON), + new StringEntity(query(query).mode(mode).fetchSize(1).toString(), ContentType.APPLICATION_JSON), mode, false ); @@ -303,20 +312,18 @@ public void testHijackScrollFails() throws Exception { ResponseException e = expectThrows( ResponseException.class, () -> RestActions.runSql( - "full_access", + "no_read", new StringEntity(cursor(cursor).mode(mode).toString(), ContentType.APPLICATION_JSON), mode, false ) ); - // TODO return a better error message for bad scrolls - assertThat(e.getMessage(), containsString("No search context found for id")); - assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); + + assertThat(e.getMessage(), containsString("is unauthorized for user")); + assertEquals(403, e.getResponse().getStatusLine().getStatusCode()); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test") - .expect(true, SQL_ACTION_NAME, "full_access", empty()) - // one scroll access denied per shard - .expect("access_denied", SQL_ACTION_NAME, "full_access", "default_native", empty(), "InternalScrollSearchRequest") + .expect("access_denied", SQL_ACTION_NAME, "no_read", "default_native", empty(), "SqlQueryRequest") .assertLogs(); } diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java index 900e257e9d56f..2f7bcb71a79f4 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java @@ -31,7 +31,7 @@ public abstract class JdbcIntegrationTestCase extends RemoteClusterAwareSqlRestT @After public void checkSearchContent() throws Exception { - // Some context might linger due to fire and forget nature of scroll cleanup + // Some context might linger due to fire and forget nature of PIT cleanup assertNoSearchContexts(provisioningClient()); } diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java index 6879422786822..8a236202fbec7 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java @@ -254,6 +254,7 @@ public void testNextPageWithDatetimeAndTimezoneParam() throws IOException { expected.put("columns", singletonList(columnInfo(mode, "tz", "integer", JDBCType.INTEGER, 11))); response = runSql(new StringEntity(sqlRequest, ContentType.APPLICATION_JSON), "", mode); } else { + assertNotNull(cursor); response = runSql( new StringEntity(cursor(cursor).mode(mode).toString(), ContentType.APPLICATION_JSON), StringUtils.EMPTY, @@ -270,16 +271,12 @@ public void testNextPageWithDatetimeAndTimezoneParam() throws IOException { ); } expected.put("rows", values); + assertTrue(response.containsKey("cursor") == false || response.get("cursor") != null); cursor = (String) response.remove("cursor"); assertResponse(expected, response); - assertNotNull(cursor); } - Map expected = new HashMap<>(); - expected.put("rows", emptyList()); - assertResponse( - expected, - runSql(new StringEntity(cursor(cursor).mode(mode).toString(), ContentType.APPLICATION_JSON), StringUtils.EMPTY, mode) - ); + + assertNull(cursor); deleteIndex("test_date_timezone"); } @@ -1182,7 +1179,7 @@ private void executeQueryWithNextPage(String format, String expectedHeader, Stri .toString(); String cursor = null; - for (int i = 0; i < 20; i += 2) { + for (int i = 0; i <= 20; i += 2) { Tuple response; if (i == 0) { response = runSqlAsText(StringUtils.EMPTY, new StringEntity(request, ContentType.APPLICATION_JSON), format); @@ -1201,25 +1198,17 @@ private void executeQueryWithNextPage(String format, String expectedHeader, Stri expected.append("---------------+---------------+---------------\n"); } } - expected.append(String.format(Locale.ROOT, expectedLineFormat, "text" + i, i, i + 5)); - expected.append(String.format(Locale.ROOT, expectedLineFormat, "text" + (i + 1), i + 1, i + 6)); + cursor = response.v2(); - assertEquals(expected.toString(), response.v1()); - assertNotNull(cursor); + if (i < 20) { + expected.append(String.format(Locale.ROOT, expectedLineFormat, "text" + i, i, i + 5)); + expected.append(String.format(Locale.ROOT, expectedLineFormat, "text" + (i + 1), i + 1, i + 6)); + assertEquals(expected.toString(), response.v1()); + assertNotNull(cursor); + } else { + assertNull(cursor); + } } - Map expected = new HashMap<>(); - expected.put("rows", emptyList()); - assertResponse( - expected, - runSql(new StringEntity(cursor(cursor).toString(), ContentType.APPLICATION_JSON), StringUtils.EMPTY, Mode.PLAIN.toString()) - ); - - Map response = runSql( - new StringEntity(cursor(cursor).toString(), ContentType.APPLICATION_JSON), - "/close", - Mode.PLAIN.toString() - ); - assertEquals(true, response.get("succeeded")); assertEquals(0, getNumberOfSearchContexts(provisioningClient(), "test")); } diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java index 51cbe5f4ca215..1ef55fc6d911e 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java @@ -9,7 +9,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; import org.junit.After; @@ -24,7 +23,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.greaterThan; public class SqlCancellationIT extends AbstractSqlBlockingIntegTestCase { @@ -91,18 +90,13 @@ public void testCancellation() throws Exception { disableBlocks(plugins); Exception exception = expectThrows(Exception.class, future::get); - Throwable inner = ExceptionsHelper.unwrap(exception, SearchPhaseExecutionException.class); + assertNotNull(ExceptionsHelper.unwrap(exception, TaskCancelledException.class)); if (cancelDuringSearch) { // Make sure we cancelled inside search - assertNotNull(inner); - assertThat(inner, instanceOf(SearchPhaseExecutionException.class)); - assertThat(inner.getCause(), instanceOf(TaskCancelledException.class)); + assertThat(getNumberOfContexts(plugins), greaterThan(0)); } else { // Make sure we were not cancelled inside search - assertNull(inner); assertThat(getNumberOfContexts(plugins), equalTo(0)); - Throwable cancellationException = ExceptionsHelper.unwrap(exception, TaskCancelledException.class); - assertNotNull(cancellationException); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java index c8f8ebbf268ab..19a3a8d18bee0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java @@ -124,7 +124,7 @@ public void nextPageInternal(SqlConfiguration cfg, Cursor cursor, ActionListener } public void cleanCursor(Cursor cursor, ActionListener listener) { - cursor.clear(client, listener); + cursor.clear(client, writableRegistry, listener); } public Client client() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java index 3c51a495f3568..5fd156a8a8b84 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java @@ -12,9 +12,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -41,6 +38,11 @@ import java.util.function.BiFunction; import java.util.function.Supplier; +import static org.elasticsearch.xpack.sql.execution.search.Querier.deserializeQuery; +import static org.elasticsearch.xpack.sql.execution.search.Querier.logSearchResponse; +import static org.elasticsearch.xpack.sql.execution.search.Querier.prepareRequest; +import static org.elasticsearch.xpack.sql.execution.search.Querier.serializeQuery; + /** * Cursor for composite aggregation (GROUP BY). * Stores the query that gets updated/slides across requests. @@ -132,7 +134,7 @@ public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry log.trace("About to execute composite query {} on {}", StringUtils.toString(query), indices); } - SearchRequest request = Querier.prepareRequest(query, cfg.requestTimeout(), includeFrozen, indices); + SearchRequest request = prepareRequest(query, cfg.requestTimeout(), includeFrozen, indices); client.search(request, new ActionListener.Delegating<>(listener) { @Override @@ -169,7 +171,7 @@ static void handle( ) { if (log.isTraceEnabled()) { - Querier.logSearchResponse(response, log); + logSearchResponse(response, log); } // there are some results if (response.getAggregations().asList().isEmpty() == false) { @@ -244,31 +246,8 @@ private static void updateSourceAfterKey(Map afterKey, SearchSou } } - /** - * Deserializes the search source from a byte array. - */ - private static SearchSourceBuilder deserializeQuery(NamedWriteableRegistry registry, byte[] source) throws IOException { - try (NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(source), registry)) { - return new SearchSourceBuilder(in); - } - } - - /** - * Serializes the search source to a byte array. - */ - private static byte[] serializeQuery(SearchSourceBuilder source) throws IOException { - if (source == null) { - return new byte[0]; - } - - try (BytesStreamOutput out = new BytesStreamOutput()) { - source.writeTo(out); - return BytesReference.toBytes(out.bytes()); - } - } - @Override - public void clear(Client client, ActionListener listener) { + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { listener.onResponse(true); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index dbdb23b30d914..298e758a6ff50 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -10,12 +10,20 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.ClosePointInTimeAction; +import org.elasticsearch.action.search.ClosePointInTimeRequest; +import org.elasticsearch.action.search.OpenPointInTimeAction; +import org.elasticsearch.action.search.OpenPointInTimeRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.aggregations.Aggregation; @@ -23,6 +31,7 @@ import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.filter.Filters; +import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.xcontent.XContentBuilder; @@ -122,27 +131,64 @@ public void query(List output, QueryContainer query, String index, Ac List> sortingColumns = query.sortingColumns(); listener = sortingColumns.isEmpty() ? listener : new LocalAggregationSorterListener(listener, sortingColumns, query.limit()); - ActionListener l = null; - if (query.isAggsOnly()) { + if (cfg.task() != null && cfg.task().isCancelled()) { + listener.onFailure(new TaskCancelledException("cancelled")); + } else if (query.isAggsOnly()) { + ActionListener l; if (query.aggs().useImplicitGroupBy()) { l = new ImplicitGroupActionListener(listener, client, cfg, output, query, search); } else { l = new CompositeActionListener(listener, client, cfg, output, query, search); } + client.search(search, l); } else { - search.scroll(cfg.pageTimeout()); - l = new ScrollActionListener(listener, client, cfg, output, query); + searchWithPointInTime(search, new SearchHitActionListener(listener, client, cfg, output, query, sourceBuilder)); } + } - if (cfg.task() != null && cfg.task().isCancelled()) { - listener.onFailure(new TaskCancelledException("cancelled")); - return; + private void searchWithPointInTime(SearchRequest search, ActionListener listener) { + final OpenPointInTimeRequest openPitRequest = new OpenPointInTimeRequest(search.indices()).indicesOptions(search.indicesOptions()) + .keepAlive(cfg.pageTimeout()); + + client.execute(OpenPointInTimeAction.INSTANCE, openPitRequest, wrap(openPointInTimeResponse -> { + String pitId = openPointInTimeResponse.getPointInTimeId(); + search.indices(Strings.EMPTY_ARRAY); + search.source().pointInTimeBuilder(new PointInTimeBuilder(pitId)); + ActionListener closePitOnErrorListener = wrap(searchResponse -> { + try { + listener.onResponse(searchResponse); + } catch (Exception e) { + closePointInTimeAfterError(client, pitId, e, listener); + } + }, searchError -> closePointInTimeAfterError(client, pitId, searchError, listener)); + client.search(search, closePitOnErrorListener); + }, listener::onFailure)); + } + + private static void closePointInTimeAfterError(Client client, String pointInTimeId, Exception e, ActionListener listener) { + closePointInTime(client, pointInTimeId, wrap(r -> listener.onFailure(e), closeError -> { + e.addSuppressed(closeError); + listener.onFailure(e); + })); + } + + public static void closePointInTime(Client client, String pointInTimeId, ActionListener listener) { + if (pointInTimeId != null) { + // request should not be made with the parent task assigned because the parent task might already be canceled + client = client instanceof ParentTaskAssigningClient wrapperClient ? wrapperClient.unwrap() : client; + + client.execute( + ClosePointInTimeAction.INSTANCE, + new ClosePointInTimeRequest(pointInTimeId), + wrap(clearPointInTimeResponse -> listener.onResponse(clearPointInTimeResponse.isSucceeded()), listener::onFailure) + ); + } else { + listener.onResponse(true); } - client.search(search, l); } - public static SearchRequest prepareRequest(SearchSourceBuilder source, TimeValue timeout, boolean includeFrozen, String... indices) { - source.timeout(timeout); + public static SearchRequest prepareRequest(SearchSourceBuilder source, TimeValue timeOut, boolean includeFrozen, String... indices) { + source.timeout(timeOut); SearchRequest searchRequest = new SearchRequest(INTRODUCING_UNSIGNED_LONG); searchRequest.indices(indices); @@ -181,6 +227,29 @@ protected static void logSearchResponse(SearchResponse response, Logger logger) ); } + /** + * Deserializes the search source from a byte array. + */ + public static SearchSourceBuilder deserializeQuery(NamedWriteableRegistry registry, byte[] source) throws IOException { + try (NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(source), registry)) { + return new SearchSourceBuilder(in); + } + } + + /** + * Serializes the search source to a byte array. + */ + public static byte[] serializeQuery(SearchSourceBuilder source) throws IOException { + if (source == null) { + return new byte[0]; + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + source.writeTo(out); + return BytesReference.toBytes(out.bytes()); + } + } + /** * Listener used for local sorting (typically due to aggregations used inside `ORDER BY`). * @@ -504,24 +573,27 @@ private BucketExtractor createExtractor(FieldExtraction ref, BucketExtractor tot } /** - * Dedicated listener for column retrieval/non-grouped queries (scrolls). + * Dedicated listener for column retrieval/non-grouped queries (search hits). */ - static class ScrollActionListener extends BaseActionListener { + static class SearchHitActionListener extends BaseActionListener { private final QueryContainer query; private final BitSet mask; private final boolean multiValueFieldLeniency; + private final SearchSourceBuilder source; - ScrollActionListener( + SearchHitActionListener( ActionListener listener, Client client, SqlConfiguration cfg, List output, - QueryContainer query + QueryContainer query, + SearchSourceBuilder source ) { super(listener, client, cfg, output); this.query = query; this.mask = query.columnMask(output); this.multiValueFieldLeniency = cfg.multiValueFieldLeniency(); + this.source = source; } @Override @@ -534,12 +606,13 @@ protected void handleResponse(SearchResponse response, ActionListener list exts.add(createExtractor(ref.extraction())); } - ScrollCursor.handle( + SearchHitCursor.handle( + client, response, - () -> new SchemaSearchHitRowSet(schema, exts, mask, query.limit(), response), - p -> listener.onResponse(p), - p -> clear(response.getScrollId(), wrap(success -> listener.onResponse(p), listener::onFailure)), - schema + source, + () -> new SchemaSearchHitRowSet(schema, exts, mask, source.size(), query.limit(), response), + listener, + query.shouldIncludeFrozen() ); } @@ -579,7 +652,7 @@ private HitExtractor createExtractor(FieldExtraction ref) { /** * Base listener class providing clean-up and exception handling. - * Handles both scroll queries (scan/scroll) and regular/composite-aggs queries. + * Handles both search hits and composite-aggs queries. */ abstract static class BaseActionListener extends ActionListener.Delegating { @@ -595,52 +668,13 @@ abstract static class BaseActionListener extends ActionListener.Delegating cleanup(response, e))); - } - } catch (Exception ex) { - cleanup(response, ex); - } + handleResponse(response, delegate); } protected abstract void handleResponse(SearchResponse response, ActionListener listener); - // clean-up the scroll in case of exception - protected final void cleanup(SearchResponse response, Exception ex) { - if (response != null && response.getScrollId() != null) { - client.prepareClearScroll() - .addScrollId(response.getScrollId()) - // in case of failure, report the initial exception instead of the one resulting from cleaning the scroll - .execute(ActionListener.wrap(r -> delegate.onFailure(ex), e -> { - ex.addSuppressed(e); - delegate.onFailure(ex); - })); - } else { - delegate.onFailure(ex); - } - } - - protected final void clear(String scrollId, ActionListener listener) { - if (scrollId != null) { - client.prepareClearScroll() - .addScrollId(scrollId) - .execute( - ActionListener.wrap( - clearScrollResponse -> listener.onResponse(clearScrollResponse.isSucceeded()), - listener::onFailure - ) - ); - } else { - listener.onResponse(false); - } - } } @SuppressWarnings("rawtypes") diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaSearchHitRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaSearchHitRowSet.java index ea6131c564bf3..67712658529fb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaSearchHitRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaSearchHitRowSet.java @@ -15,15 +15,15 @@ import java.util.List; /** - * Initial results from a scroll search. Distinct from the following pages + * Initial results from a search hit search. Distinct from the following pages * because it has a {@link Schema} available. See {@link SearchHitRowSet} * for the next pages. */ class SchemaSearchHitRowSet extends SearchHitRowSet implements SchemaRowSet { private final Schema schema; - SchemaSearchHitRowSet(Schema schema, List exts, BitSet mask, int limitHits, SearchResponse response) { - super(exts, mask, limitHits, response); + SchemaSearchHitRowSet(Schema schema, List exts, BitSet mask, int sizeRequested, int limitHits, SearchResponse response) { + super(exts, mask, sizeRequested, limitHits, response); this.schema = schema; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursor.java deleted file mode 100644 index e240ca06375d7..0000000000000 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursor.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.sql.execution.search; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.search.ClearScrollRequest; -import org.elasticsearch.action.search.ClearScrollResponse; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchScrollRequest; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.xpack.ql.execution.search.extractor.HitExtractor; -import org.elasticsearch.xpack.ql.type.Schema; -import org.elasticsearch.xpack.sql.session.Cursor; -import org.elasticsearch.xpack.sql.session.Rows; -import org.elasticsearch.xpack.sql.session.SqlConfiguration; - -import java.io.IOException; -import java.util.BitSet; -import java.util.List; -import java.util.Objects; -import java.util.function.Consumer; -import java.util.function.Supplier; - -import static org.elasticsearch.action.ActionListener.wrap; - -public class ScrollCursor implements Cursor { - - private static final Logger log = LogManager.getLogger(ScrollCursor.class); - - public static final String NAME = "s"; - - private final String scrollId; - private final List extractors; - private final BitSet mask; - private final int limit; - - public ScrollCursor(String scrollId, List extractors, BitSet mask, int limit) { - this.scrollId = scrollId; - this.extractors = extractors; - this.mask = mask; - this.limit = limit; - } - - public ScrollCursor(StreamInput in) throws IOException { - scrollId = in.readString(); - limit = in.readVInt(); - - extractors = in.readNamedWriteableList(HitExtractor.class); - mask = BitSet.valueOf(in.readByteArray()); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(scrollId); - out.writeVInt(limit); - - out.writeNamedWriteableList(extractors); - out.writeByteArray(mask.toByteArray()); - } - - @Override - public String getWriteableName() { - return NAME; - } - - String scrollId() { - return scrollId; - } - - BitSet mask() { - return mask; - } - - List extractors() { - return extractors; - } - - int limit() { - return limit; - } - - @Override - public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry registry, ActionListener listener) { - if (log.isTraceEnabled()) { - log.trace("About to execute scroll query {}", scrollId); - } - - SearchScrollRequest request = new SearchScrollRequest(scrollId).scroll(cfg.pageTimeout()); - client.searchScroll(request, wrap(response -> { - handle( - response, - () -> new SearchHitRowSet(extractors, mask, limit, response), - p -> listener.onResponse(p), - p -> clear(client, wrap(success -> listener.onResponse(p), listener::onFailure)), - Schema.EMPTY - ); - }, listener::onFailure)); - } - - @Override - public void clear(Client client, ActionListener listener) { - cleanCursor( - client, - scrollId, - wrap(clearScrollResponse -> listener.onResponse(clearScrollResponse.isSucceeded()), listener::onFailure) - ); - } - - static void handle( - SearchResponse response, - Supplier makeRowHit, - Consumer onPage, - Consumer clearScroll, - Schema schema - ) { - if (log.isTraceEnabled()) { - Querier.logSearchResponse(response, log); - } - SearchHit[] hits = response.getHits().getHits(); - // clean-up - if (hits.length > 0) { - SearchHitRowSet rowSet = makeRowHit.get(); - Tuple nextScrollData = rowSet.nextScrollData(); - - if (nextScrollData == null) { - // no more data, let's clean the scroll before continuing - clearScroll.accept(Page.last(rowSet)); - } else { - Cursor next = new ScrollCursor(nextScrollData.v1(), rowSet.extractors(), rowSet.mask(), nextScrollData.v2()); - onPage.accept(new Page(rowSet, next)); - } - } - // no-hits - else { - clearScroll.accept(Page.last(Rows.empty(schema))); - } - } - - @Override - public boolean equals(Object obj) { - if (obj == null || obj.getClass() != getClass()) { - return false; - } - ScrollCursor other = (ScrollCursor) obj; - return Objects.equals(scrollId, other.scrollId) - && Objects.equals(extractors, other.extractors) - && Objects.equals(limit, other.limit); - } - - @Override - public int hashCode() { - return Objects.hash(scrollId, extractors, limit); - } - - @Override - public String toString() { - return "cursor for scroll [" + scrollId + "]"; - } - - public static void cleanCursor(Client client, String scrollId, ActionListener listener) { - ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); - clearScrollRequest.addScrollId(scrollId); - client.clearScroll(clearScrollRequest, listener); - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java new file mode 100644 index 0000000000000..5258492a29af1 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java @@ -0,0 +1,229 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.execution.search; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.PointInTimeBuilder; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.ql.execution.search.extractor.HitExtractor; +import org.elasticsearch.xpack.ql.util.StringUtils; +import org.elasticsearch.xpack.sql.session.Cursor; +import org.elasticsearch.xpack.sql.session.SqlConfiguration; +import org.elasticsearch.xpack.sql.util.Check; + +import java.io.IOException; +import java.util.Arrays; +import java.util.BitSet; +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.sql.execution.search.Querier.closePointInTime; +import static org.elasticsearch.xpack.sql.execution.search.Querier.deserializeQuery; +import static org.elasticsearch.xpack.sql.execution.search.Querier.logSearchResponse; +import static org.elasticsearch.xpack.sql.execution.search.Querier.prepareRequest; +import static org.elasticsearch.xpack.sql.execution.search.Querier.serializeQuery; + +public class SearchHitCursor implements Cursor { + + private static final Logger log = LogManager.getLogger(SearchHitCursor.class); + + public static final String NAME = "h"; + + private final byte[] nextQuery; + private final List extractors; + private final BitSet mask; + private final int limit; + private final boolean includeFrozen; + + /** + * @param nextQuery a serialized {@link SearchSourceBuilder} representing the query to fetch the next page. The query is serialized + * because cursors have to be (de)serialized on the transport layer in {@code TextFormat.PLAIN_TEXT.format} which does + * not have all the required {@link NamedWriteable}`s available that is required to deserialize + * {@link SearchSourceBuilder}. As a workaround the deserialization of {@code nextQuery} is deferred until the query is + * needed. + */ + SearchHitCursor(byte[] nextQuery, List exts, BitSet mask, int remainingLimit, boolean includeFrozen) { + this.nextQuery = nextQuery; + this.extractors = exts; + this.mask = mask; + this.limit = remainingLimit; + this.includeFrozen = includeFrozen; + } + + public SearchHitCursor(StreamInput in) throws IOException { + nextQuery = in.readByteArray(); + limit = in.readVInt(); + + extractors = in.readNamedWriteableList(HitExtractor.class); + mask = BitSet.valueOf(in.readByteArray()); + includeFrozen = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeByteArray(nextQuery); + out.writeVInt(limit); + + out.writeNamedWriteableList(extractors); + out.writeByteArray(mask.toByteArray()); + out.writeBoolean(includeFrozen); + } + + @Override + public String getWriteableName() { + return NAME; + } + + byte[] next() { + return nextQuery; + } + + BitSet mask() { + return mask; + } + + List extractors() { + return extractors; + } + + int limit() { + return limit; + } + + boolean includeFrozen() { + return includeFrozen; + } + + @Override + public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry registry, ActionListener listener) { + SearchSourceBuilder q; + try { + q = deserializeQuery(registry, nextQuery); + } catch (Exception ex) { + listener.onFailure(ex); + return; + } + + SearchSourceBuilder query = q; + if (log.isTraceEnabled()) { + log.trace("About to execute search hit query {}", StringUtils.toString(query)); + } + + SearchRequest request = prepareRequest(query, cfg.requestTimeout(), includeFrozen); + + client.search( + request, + ActionListener.wrap( + (SearchResponse response) -> handle( + client, + response, + request.source(), + makeRowSet(query.size(), response), + listener, + includeFrozen + ), + listener::onFailure + ) + ); + } + + private Supplier makeRowSet(int sizeRequested, SearchResponse response) { + return () -> new SearchHitRowSet(extractors, mask, sizeRequested, limit, response); + } + + static void handle( + Client client, + SearchResponse response, + SearchSourceBuilder source, + Supplier makeRowSet, + ActionListener listener, + boolean includeFrozen + ) { + + if (log.isTraceEnabled()) { + logSearchResponse(response, log); + } + + SearchHit[] hits = response.getHits().getHits(); + + SearchHitRowSet rowSet = makeRowSet.get(); + + if (rowSet.hasRemaining() == false) { + closePointInTime( + client, + response.pointInTimeId(), + ActionListener.wrap(r -> listener.onResponse(Page.last(rowSet)), listener::onFailure) + ); + } else { + source.pointInTimeBuilder(new PointInTimeBuilder(response.pointInTimeId())); + updateSearchAfter(hits, source); + + byte[] nextQuery; + try { + nextQuery = serializeQuery(source); + } catch (IOException e) { + listener.onFailure(e); + return; + } + + SearchHitCursor nextCursor = new SearchHitCursor( + nextQuery, + rowSet.extractors(), + rowSet.mask(), + rowSet.getRemainingLimit(), + includeFrozen + ); + listener.onResponse(new Page(rowSet, nextCursor)); + } + } + + private static void updateSearchAfter(SearchHit[] hits, SearchSourceBuilder source) { + SearchHit lastHit = hits[hits.length - 1]; + source.searchAfter(lastHit.getSortValues()); + } + + @Override + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { + SearchSourceBuilder query; + try { + query = deserializeQuery(registry, nextQuery); + } catch (IOException e) { + listener.onFailure(e); + return; + } + Check.isTrue(query.pointInTimeBuilder() != null, "Expected cursor with point-in-time id but got null"); + closePointInTime(client, query.pointInTimeBuilder().getEncodedId(), listener); + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(nextQuery), extractors, limit, mask, includeFrozen); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + SearchHitCursor other = (SearchHitCursor) obj; + return Arrays.equals(nextQuery, other.nextQuery) + && Objects.equals(extractors, other.extractors) + && Objects.equals(limit, other.limit) + && Objects.equals(includeFrozen, other.includeFrozen); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java index 2b453ff827df5..ba6a9854e4254 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java @@ -6,9 +6,7 @@ */ package org.elasticsearch.xpack.sql.execution.search; -import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.core.Tuple; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.xpack.ql.execution.search.extractor.HitExtractor; @@ -37,11 +35,11 @@ class SearchHitRowSet extends ResultRowSet { private final int size; private final int[] indexPerLevel; - private final Tuple nextScrollData; + private final int remainingLimit; private int row = 0; - SearchHitRowSet(List exts, BitSet mask, int limit, SearchResponse response) { + SearchHitRowSet(List exts, BitSet mask, int sizeRequested, int limit, SearchResponse response) { super(exts, mask); this.hits = response.getHits().getHits(); @@ -85,30 +83,22 @@ class SearchHitRowSet extends ResultRowSet { indexPerLevel = new int[maxDepth + 1]; this.innerHit = innerHit; - String scrollId = response.getScrollId(); - - if (scrollId == null) { - /* SearchResponse can contain a null scroll when you start a - * scroll but all results fit in the first page. */ - nextScrollData = null; + // compute remaining limit (only if the limit is specified - that is, positive). + int remaining = limit < 0 ? limit : limit - size; + // either the search returned fewer records than requested or the limit is exhausted + if (size < sizeRequested || remaining == 0) { + remainingLimit = 0; } else { - TotalHits totalHits = response.getHits().getTotalHits(); - - // compute remaining limit (only if the limit is specified - that is, positive). - int remainingLimit = limit < 0 ? limit : limit - size; - // if the computed limit is zero, or the size is zero it means either there's nothing left or the limit has been reached - if (size == 0 || remainingLimit == 0 - // or the scroll has ended - || totalHits != null && totalHits.value == hits.length) { - nextScrollData = null; - } else { - nextScrollData = new Tuple<>(scrollId, remainingLimit); - } + remainingLimit = remaining; } } - protected boolean isLimitReached() { - return nextScrollData == null; + public boolean hasRemaining() { + return remainingLimit != 0; + } + + public int getRemainingLimit() { + return remainingLimit; } @Override @@ -218,8 +208,4 @@ protected void doReset() { public int size() { return size; } - - Tuple nextScrollData() { - return nextScrollData; - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormatterCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormatterCursor.java index b55adc0af34d3..a6ba80e42708b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormatterCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormatterCursor.java @@ -59,8 +59,8 @@ public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry } @Override - public void clear(Client client, ActionListener listener) { - delegate.clear(client, listener); + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { + delegate.clear(client, registry, listener); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursor.java index 5dd5bb203c519..be45132d78314 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursor.java @@ -48,5 +48,5 @@ public static Page last(RowSet rowSet) { /** * Cleans the resources associated with the cursor */ - void clear(Client client, ActionListener listener); + void clear(Client client, NamedWriteableRegistry registry, ActionListener listener); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java index 9384e1b5f989e..b94b60a850dab 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.sql.common.io.SqlStreamOutput; import org.elasticsearch.xpack.sql.execution.search.CompositeAggCursor; import org.elasticsearch.xpack.sql.execution.search.PivotCursor; -import org.elasticsearch.xpack.sql.execution.search.ScrollCursor; +import org.elasticsearch.xpack.sql.execution.search.SearchHitCursor; import org.elasticsearch.xpack.sql.execution.search.extractor.SqlBucketExtractors; import org.elasticsearch.xpack.sql.execution.search.extractor.SqlHitExtractors; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; @@ -46,7 +46,7 @@ public static List getNamedWriteables() { // cursors entries.add(new NamedWriteableRegistry.Entry(Cursor.class, EmptyCursor.NAME, in -> Cursor.EMPTY)); - entries.add(new NamedWriteableRegistry.Entry(Cursor.class, ScrollCursor.NAME, ScrollCursor::new)); + entries.add(new NamedWriteableRegistry.Entry(Cursor.class, SearchHitCursor.NAME, SearchHitCursor::new)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, CompositeAggCursor.NAME, CompositeAggCursor::new)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, PivotCursor.NAME, PivotCursor::new)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, TextFormatterCursor.NAME, TextFormatterCursor::new)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/EmptyCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/EmptyCursor.java index e1ed687e6006c..6e10629c5ffc9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/EmptyCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/EmptyCursor.java @@ -38,7 +38,7 @@ public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry } @Override - public void clear(Client client, ActionListener listener) { + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { // There is nothing to clean listener.onResponse(false); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java index fe72838695b78..f4cd86ac1b8dc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java @@ -87,7 +87,7 @@ public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry } @Override - public void clear(Client client, ActionListener listener) { + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { listener.onResponse(true); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java index 0d359480de949..30e1178a20781 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java @@ -9,6 +9,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.action.search.ClosePointInTimeAction; +import org.elasticsearch.action.search.ClosePointInTimeRequest; +import org.elasticsearch.action.search.ClosePointInTimeResponse; +import org.elasticsearch.action.search.OpenPointInTimeAction; +import org.elasticsearch.action.search.OpenPointInTimeResponse; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; @@ -42,11 +47,13 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -158,6 +165,7 @@ public void testCancellationDuringSearch() throws InterruptedException { ClusterService mockClusterService = mockClusterService(nodeId); String[] indices = new String[] { "endgame" }; + String pitId = randomAlphaOfLength(10); // Emulation of field capabilities FieldCapabilitiesResponse fieldCapabilitiesResponse = mock(FieldCapabilitiesResponse.class); @@ -170,12 +178,21 @@ public void testCancellationDuringSearch() throws InterruptedException { return null; }).when(client).fieldCaps(any(), any()); + // Emulation of open pit + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(new OpenPointInTimeResponse(pitId)); + return null; + }).when(client).execute(eq(OpenPointInTimeAction.INSTANCE), any(), any()); + // Emulation of search cancellation ArgumentCaptor searchRequestCaptor = ArgumentCaptor.forClass(SearchRequest.class); when(client.prepareSearch(any())).thenReturn(new SearchRequestBuilder(client, SearchAction.INSTANCE).setIndices(indices)); doAnswer((Answer) invocation -> { @SuppressWarnings("unchecked") SearchRequest request = (SearchRequest) invocation.getArguments()[1]; + assertEquals(pitId, request.pointInTimeBuilder().getEncodedId()); TaskId parentTask = request.getParentTask(); assertNotNull(parentTask); assertEquals(task.getId(), parentTask.getId()); @@ -184,7 +201,18 @@ public void testCancellationDuringSearch() throws InterruptedException { ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(new TaskCancelledException("cancelled")); return null; - }).when(client).execute(any(), searchRequestCaptor.capture(), any()); + }).when(client).execute(eq(SearchAction.INSTANCE), searchRequestCaptor.capture(), any()); + + // Emulation of close pit + doAnswer(invocation -> { + ClosePointInTimeRequest request = (ClosePointInTimeRequest) invocation.getArguments()[1]; + assertEquals(pitId, request.getId()); + + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(new ClosePointInTimeResponse(true, 1)); + return null; + }).when(client).execute(eq(ClosePointInTimeAction.INSTANCE), any(), any()); IndexResolver indexResolver = indexResolver(client); PlanExecutor planExecutor = new PlanExecutor(client, indexResolver, new NamedWriteableRegistry(Collections.emptyList())); @@ -204,10 +232,12 @@ public void onFailure(Exception e) { countDownLatch.countDown(); } }, "", mock(TransportService.class), mockClusterService); - countDownLatch.await(); + assertTrue(countDownLatch.await(5, TimeUnit.SECONDS)); // Final verification to ensure no more interaction verify(client).fieldCaps(any(), any()); - verify(client).execute(any(), any(), any()); + verify(client, times(1)).execute(eq(OpenPointInTimeAction.INSTANCE), any(), any()); + verify(client, times(1)).execute(eq(SearchAction.INSTANCE), any(), any()); + verify(client, times(1)).execute(eq(ClosePointInTimeAction.INSTANCE), any(), any()); verify(client, times(1)).settings(); verify(client, times(1)).threadPool(); verifyNoMoreInteractions(client); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java index 44016669595cf..bfe2394b8d822 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java @@ -27,7 +27,7 @@ import java.util.concurrent.atomic.AtomicInteger; import static java.util.Collections.emptyList; -import static org.elasticsearch.xpack.sql.execution.search.ScrollCursorTests.randomHitExtractor; +import static org.elasticsearch.xpack.sql.execution.search.SearchHitCursorTests.randomHitExtractor; public class QuerierTests extends ESTestCase { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursorTests.java similarity index 72% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursorTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursorTests.java index f4e19175134fe..e7146e1664c88 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursorTests.java @@ -21,18 +21,19 @@ import java.util.List; import java.util.function.Supplier; -public class ScrollCursorTests extends AbstractSqlWireSerializingTestCase { - public static ScrollCursor randomScrollCursor() { +public class SearchHitCursorTests extends AbstractSqlWireSerializingTestCase { + public static SearchHitCursor randomSearchHitCursor() { int extractorsSize = between(1, 20); List extractors = new ArrayList<>(extractorsSize); for (int i = 0; i < extractorsSize; i++) { extractors.add(randomHitExtractor(0)); } - return new ScrollCursor( - randomAlphaOfLength(5), + return new SearchHitCursor( + new byte[randomInt(256)], extractors, CompositeAggregationCursorTests.randomBitSet(extractorsSize), - randomIntBetween(10, 1024) + randomIntBetween(10, 1024), + randomBoolean() ); } @@ -46,12 +47,13 @@ static HitExtractor randomHitExtractor(int depth) { } @Override - protected ScrollCursor mutateInstance(ScrollCursor instance) throws IOException { - return new ScrollCursor( - instance.scrollId(), + protected SearchHitCursor mutateInstance(SearchHitCursor instance) throws IOException { + return new SearchHitCursor( + instance.next(), instance.extractors(), randomValueOtherThan(instance.mask(), () -> CompositeAggregationCursorTests.randomBitSet(instance.extractors().size())), - randomValueOtherThan(instance.limit(), () -> randomIntBetween(1, 1024)) + randomValueOtherThan(instance.limit(), () -> randomIntBetween(1, 1024)), + instance.includeFrozen() == false ); } @@ -61,22 +63,22 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { } @Override - protected ScrollCursor createTestInstance() { - return randomScrollCursor(); + protected SearchHitCursor createTestInstance() { + return randomSearchHitCursor(); } @Override - protected Reader instanceReader() { - return ScrollCursor::new; + protected Reader instanceReader() { + return SearchHitCursor::new; } @Override - protected ScrollCursor copyInstance(ScrollCursor instance, Version version) throws IOException { + protected SearchHitCursor copyInstance(SearchHitCursor instance, Version version) throws IOException { /* Randomly choose between internal protocol round trip and String based * round trips used to toXContent. */ if (randomBoolean()) { return super.copyInstance(instance, version); } - return (ScrollCursor) CursorTests.decodeFromString(Cursors.encodeToString(instance, randomZone())); + return (SearchHitCursor) CursorTests.decodeFromString(Cursors.encodeToString(instance, randomZone())); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CursorTests.java index ea5a55b92e8fb..08e0f6fca8912 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CursorTests.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.sql.plugin; import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.logging.LoggerMessageFormat; @@ -16,26 +14,21 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.action.BasicFormatter; import org.elasticsearch.xpack.sql.action.SqlQueryResponse; -import org.elasticsearch.xpack.sql.execution.search.ScrollCursor; -import org.elasticsearch.xpack.sql.execution.search.ScrollCursorTests; +import org.elasticsearch.xpack.sql.execution.search.SearchHitCursorTests; import org.elasticsearch.xpack.sql.proto.ColumnInfo; import org.elasticsearch.xpack.sql.proto.Mode; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.Cursors; import org.elasticsearch.xpack.sql.session.CursorsTestUtil; -import org.mockito.ArgumentCaptor; import java.util.ArrayList; -import java.util.BitSet; import java.util.Collections; import java.util.List; import java.util.function.Supplier; import static org.elasticsearch.action.support.PlainActionFuture.newFuture; import static org.elasticsearch.xpack.sql.proto.SqlVersion.DATE_NANOS_SUPPORT_VERSION; -import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; public class CursorTests extends ESTestCase { @@ -44,26 +37,11 @@ public void testEmptyCursorClearCursor() { Client clientMock = mock(Client.class); Cursor cursor = Cursor.EMPTY; PlainActionFuture future = newFuture(); - cursor.clear(clientMock, future); + cursor.clear(clientMock, writableRegistry(), future); assertFalse(future.actionGet()); verifyNoMoreInteractions(clientMock); } - @SuppressWarnings("unchecked") - public void testScrollCursorClearCursor() { - Client clientMock = mock(Client.class); - ActionListener listenerMock = mock(ActionListener.class); - String cursorString = randomAlphaOfLength(10); - Cursor cursor = new ScrollCursor(cursorString, Collections.emptyList(), new BitSet(0), randomInt()); - - cursor.clear(clientMock, listenerMock); - - ArgumentCaptor request = ArgumentCaptor.forClass(ClearScrollRequest.class); - verify(clientMock).clearScroll(request.capture(), any(ActionListener.class)); - assertEquals(Collections.singletonList(cursorString), request.getValue().getScrollIds()); - verifyNoMoreInteractions(listenerMock); - } - private static SqlQueryResponse createRandomSqlResponse() { int columnCount = between(1, 10); @@ -79,25 +57,25 @@ private static SqlQueryResponse createRandomSqlResponse() { @SuppressWarnings("unchecked") static Cursor randomNonEmptyCursor() { - Supplier cursorSupplier = randomFrom(() -> ScrollCursorTests.randomScrollCursor(), () -> { + Supplier cursorSupplier = randomFrom(SearchHitCursorTests::randomSearchHitCursor, () -> { SqlQueryResponse response = createRandomSqlResponse(); if (response.columns() != null && response.rows() != null) { return new TextFormatterCursor( - ScrollCursorTests.randomScrollCursor(), + SearchHitCursorTests.randomSearchHitCursor(), new BasicFormatter(response.columns(), response.rows(), BasicFormatter.FormatOption.CLI) ); } else { - return ScrollCursorTests.randomScrollCursor(); + return SearchHitCursorTests.randomSearchHitCursor(); } }, () -> { SqlQueryResponse response = createRandomSqlResponse(); if (response.columns() != null && response.rows() != null) { return new TextFormatterCursor( - ScrollCursorTests.randomScrollCursor(), + SearchHitCursorTests.randomSearchHitCursor(), new BasicFormatter(response.columns(), response.rows(), BasicFormatter.FormatOption.TEXT) ); } else { - return ScrollCursorTests.randomScrollCursor(); + return SearchHitCursorTests.randomSearchHitCursor(); } }); return cursorSupplier.get(); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml index 94de0e786a019..dc09cf91be72b 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml @@ -38,6 +38,9 @@ setup: --- "Paging through results": + - skip: + version: "8.1.99 - " + reason: PIT cursors introduced in 8.2 no longer return empty last page - do: sql.query: format: json @@ -73,6 +76,41 @@ setup: - is_false: cursor - length: { rows: 0 } +--- +"Paging through results with PIT cursor": + - skip: + version: " - 8.1.99" + reason: PIT cursors introduced in 8.2 no longer return empty last page + - do: + sql.query: + format: json + body: + query: "SELECT * FROM test ORDER BY int asc" + fetch_size: 2 + - match: { columns.0.name: int } + - match: { columns.1.name: str } + - match: { rows.0.0: 1 } + - match: { rows.0.1: test1 } + - match: { rows.1.0: 2 } + - match: { rows.1.1: test2 } + - is_true: cursor + - set: { cursor: cursor } + + - do: + sql.query: + format: json + body: + cursor: "$cursor" + - match: { rows.0.0: 3 } + - match: { rows.0.1: test3 } + - is_false: columns + - is_false: cursor + + - do: + indices.stats: { index: 'test' } + + - match: { indices.test.total.search.open_contexts: 0 } + --- "Getting textual representation": - do: From 909a52e2ec276d6357b21b1c55ce4f02d0353c2f Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 15 Feb 2022 11:25:21 +0100 Subject: [PATCH 096/167] Test doc-value-based searches on older indices (#83844) Tests doc-value-based searches against indices from 5.x / 6.x clusters. Relates #81210 --- .../rest/yaml/ESClientYamlSuiteTestCase.java | 6 +- .../qa/repository-old-versions/build.gradle | 18 +- .../oldrepos/DocValueOnlyFieldsIT.java | 222 ++++++++++++++++++ 3 files changed, 244 insertions(+), 2 deletions(-) create mode 100644 x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index e70434f7225e4..a58c4e21e530c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -447,7 +447,7 @@ public void test() throws IOException { inFipsJvm() && testCandidate.getTestSection().getSkipSection().getFeatures().contains("fips_140") ); - if (testCandidate.getSetupSection().isEmpty() == false) { + if (skipSetupSections() == false && testCandidate.getSetupSection().isEmpty() == false) { logger.debug("start setup test [{}]", testCandidate.getTestPath()); for (ExecutableSection executableSection : testCandidate.getSetupSection().getExecutableSections()) { executeSection(executableSection); @@ -470,6 +470,10 @@ public void test() throws IOException { } } + protected boolean skipSetupSections() { + return false; + } + /** * Execute an {@link ExecutableSection}, careful to log its place of origin on failure. */ diff --git a/x-pack/qa/repository-old-versions/build.gradle b/x-pack/qa/repository-old-versions/build.gradle index 54e6958c58ac3..2581a4e5736ce 100644 --- a/x-pack/qa/repository-old-versions/build.gradle +++ b/x-pack/qa/repository-old-versions/build.gradle @@ -18,6 +18,7 @@ import org.gradle.api.internal.artifacts.ArtifactAttributes apply plugin: 'elasticsearch.jdk-download' apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' +apply plugin: 'elasticsearch.rest-resources' configurations { oldesFixture @@ -37,6 +38,15 @@ jdks { } } +restResources { + restApi { + include '_common', 'search' + } + restTests { + includeCore 'search/390_doc_values_search.yml' + } +} + if (Os.isFamily(Os.FAMILY_WINDOWS)) { logger.warn("Disabling repository-old-versions tests because we can't get the pid file on windows") tasks.named("testingConventions").configure { enabled = false } @@ -91,15 +101,21 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { if (Architecture.current() == Architecture.AARCH64) { env 'ES_JAVA_OPTS', '-Xss512k' } + def dataPath = "${baseDir}/data" args 'oldes.OldElasticsearch', baseDir, "${ -> config.getSingleFile().toPath()}", false, - "path.repo: ${repoLocation}" + "path.repo: ${repoLocation}", + "path.data: ${dataPath}" if (version.onOrAfter('6.8.0') && Architecture.current() == Architecture.AARCH64) { // We need to explicitly disable ML when running old ES versions on ARM args 'xpack.ml.enabled: false' } + doFirst { + delete(dataPath) + mkdir(dataPath) + } maxWaitInSeconds 60 waitCondition = { fixture, ant -> // the fixture writes the ports file when Elasticsearch's HTTP service diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java new file mode 100644 index 0000000000000..7df801a174e9d --- /dev/null +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java @@ -0,0 +1,222 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.oldrepos; + +import com.carrotsearch.randomizedtesting.RandomizedTest; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.http.HttpHost; +import org.elasticsearch.Build; +import org.elasticsearch.Version; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Booleans; +import org.elasticsearch.core.PathUtils; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.junit.Before; + +import java.io.IOException; + +/** + * Tests doc-value-based searches against indices imported from clusters older than N-1. + * We reuse the YAML tests in search/390_doc_values_search.yml but have to do the setup + * manually here as the setup is done on the old cluster for which we have to use the + * low-level REST client instead of the YAML set up that only knows how to talk to + * newer ES versions. + * + * We mimic the setup in search/390_doc_values_search.yml here, but adapt it to work + * against older version clusters. + */ +public class DocValueOnlyFieldsIT extends ESClientYamlSuiteTestCase { + + final Version oldVersion = Version.fromString(System.getProperty("tests.es.version")); + static boolean setupDone; + + public DocValueOnlyFieldsIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return ESClientYamlSuiteTestCase.createParameters(); + } + + @Override + protected boolean preserveClusterUponCompletion() { + return true; + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("admin", new SecureString("admin-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + @Override + public void test() throws IOException { + assumeTrue("feature currently only enabled in snapshot builds", Build.CURRENT.isSnapshot()); + super.test(); + } + + @Override + protected boolean skipSetupSections() { + // setup in the YAML file is replaced by the method below + return true; + } + + @Before + public void setupIndex() throws IOException { + assumeTrue("feature currently only enabled in snapshot builds", Build.CURRENT.isSnapshot()); + + final boolean afterRestart = Booleans.parseBoolean(System.getProperty("tests.after_restart")); + if (afterRestart) { + return; + } + + // The following is bit of a hack. While we wish we could make this an @BeforeClass, it does not work because the client() is only + // initialized later, so we do it when running the first test + if (setupDone) { + return; + } + + setupDone = true; + + String repoLocation = PathUtils.get(System.getProperty("tests.repo.location")) + .resolve(RandomizedTest.getContext().getTargetClass().getName()) + .toString(); + + String indexName = "test"; + String repoName = "doc_values_repo"; + String snapshotName = "snap"; + String[] basicTypes = new String[] { + "byte", + "double", + "float", + "half_float", + "integer", + "long", + "short", + "boolean", + "keyword", + "ip", + "geo_point" }; // date is manually added as it need further configuration + + int oldEsPort = Integer.parseInt(System.getProperty("tests.es.port")); + try (RestClient oldEs = RestClient.builder(new HttpHost("127.0.0.1", oldEsPort)).build()) { + Request createIndex = new Request("PUT", "/" + indexName); + int numberOfShards = randomIntBetween(1, 3); + + boolean multiTypes = oldVersion.before(Version.V_7_0_0); + + XContentBuilder settingsBuilder = XContentFactory.jsonBuilder() + .startObject() + .startObject("settings") + .field("index.number_of_shards", numberOfShards) + .endObject() + .startObject("mappings"); + if (multiTypes) { + settingsBuilder.startObject("doc"); + } + settingsBuilder.field("dynamic", false).startObject("properties"); + for (String type : basicTypes) { + settingsBuilder.startObject(type).field("type", type).endObject(); + } + settingsBuilder.startObject("date").field("type", "date").field("format", "yyyy/MM/dd").endObject(); + if (multiTypes) { + settingsBuilder.endObject(); + } + settingsBuilder.endObject().endObject().endObject(); + + createIndex.setJsonEntity(Strings.toString(settingsBuilder)); + assertOK(oldEs.performRequest(createIndex)); + + Request doc1 = new Request("PUT", "/" + indexName + "/" + "doc" + "/" + "1"); + doc1.addParameter("refresh", "true"); + XContentBuilder bodyDoc1 = XContentFactory.jsonBuilder() + .startObject() + .field("byte", 1) + .field("double", 1.0) + .field("float", 1.0) + .field("half_float", 1.0) + .field("integer", 1) + .field("long", 1) + .field("short", 1) + .field("date", "2017/01/01") + .field("keyword", "key1") + .field("boolean", false) + .field("ip", "192.168.0.1") + .array("geo_point", 13.5, 34.89) + .endObject(); + doc1.setJsonEntity(Strings.toString(bodyDoc1)); + assertOK(oldEs.performRequest(doc1)); + + Request doc2 = new Request("PUT", "/" + indexName + "/" + "doc" + "/" + "2"); + doc2.addParameter("refresh", "true"); + XContentBuilder bodyDoc2 = XContentFactory.jsonBuilder() + .startObject() + .field("byte", 2) + .field("double", 2.0) + .field("float", 2.0) + .field("half_float", 2.0) + .field("integer", 2) + .field("long", 2) + .field("short", 2) + .field("date", "2017/01/02") + .field("keyword", "key2") + .field("boolean", true) + .field("ip", "192.168.0.2") + .array("geo_point", -63.24, 31.0) + .endObject(); + doc2.setJsonEntity(Strings.toString(bodyDoc2)); + assertOK(oldEs.performRequest(doc2)); + + // register repo on old ES and take snapshot + Request createRepoRequest = new Request("PUT", "/_snapshot/" + repoName); + createRepoRequest.setJsonEntity(""" + {"type":"fs","settings":{"location":"%s"}} + """.formatted(repoLocation)); + assertOK(oldEs.performRequest(createRepoRequest)); + + Request createSnapshotRequest = new Request("PUT", "/_snapshot/" + repoName + "/" + snapshotName); + createSnapshotRequest.addParameter("wait_for_completion", "true"); + createSnapshotRequest.setJsonEntity("{\"indices\":\"" + indexName + "\"}"); + assertOK(oldEs.performRequest(createSnapshotRequest)); + } + + // register repo on new ES and restore snapshot + Request createRepoRequest2 = new Request("PUT", "/_snapshot/" + repoName); + createRepoRequest2.setJsonEntity(""" + {"type":"fs","settings":{"location":"%s","allow_bwc_indices":true}} + """.formatted(repoLocation)); + assertOK(client().performRequest(createRepoRequest2)); + + final Request createRestoreRequest = new Request("POST", "/_snapshot/" + repoName + "/" + snapshotName + "/_restore"); + createRestoreRequest.addParameter("wait_for_completion", "true"); + createRestoreRequest.setJsonEntity("{\"indices\":\"" + indexName + "\"}"); + assertOK(client().performRequest(createRestoreRequest)); + + // add mappings (they will be auto-converted later) + Request putMappingsRequest = new Request("PUT", "/" + indexName + "/_mappings"); + XContentBuilder mappingsBuilder = XContentFactory.jsonBuilder().startObject().startObject("properties"); + for (String type : basicTypes) { + mappingsBuilder.startObject(type).field("type", type).field("index", false).endObject(); + } + mappingsBuilder.startObject("date").field("type", "date").field("index", false).field("format", "yyyy/MM/dd").endObject(); + mappingsBuilder.endObject().endObject(); + putMappingsRequest.setJsonEntity(Strings.toString(mappingsBuilder)); + assertOK(client().performRequest(putMappingsRequest)); + } +} From ad44b8852feffcfa5fbdc53a2c07563cabde8cd0 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 15 Feb 2022 11:30:32 +0000 Subject: [PATCH 097/167] [ML] Delete ML HLRC client and request/response classes (#83912) --- .../client/MLRequestConverters.java | 907 ----- .../client/MachineLearningClient.java | 2992 ----------------- .../client/RestHighLevelClient.java | 18 - .../InferencePipelineAggregationBuilder.java | 138 - .../client/analytics/ParsedInference.java | 132 - .../client/ml/AbstractResultResponse.java | 49 - .../client/ml/CloseJobRequest.java | 183 - .../client/ml/CloseJobResponse.java | 80 - .../client/ml/DeleteCalendarEventRequest.java | 60 - .../client/ml/DeleteCalendarJobRequest.java | 70 - .../client/ml/DeleteCalendarRequest.java | 48 - .../ml/DeleteDataFrameAnalyticsRequest.java | 83 - .../client/ml/DeleteDatafeedRequest.java | 63 - .../client/ml/DeleteExpiredDataRequest.java | 104 - .../client/ml/DeleteExpiredDataResponse.java | 77 - .../client/ml/DeleteFilterRequest.java | 49 - .../client/ml/DeleteForecastRequest.java | 167 - .../client/ml/DeleteJobRequest.java | 86 - .../client/ml/DeleteJobResponse.java | 104 - .../client/ml/DeleteModelSnapshotRequest.java | 56 - .../ml/DeleteTrainedModelAliasRequest.java | 46 - .../client/ml/DeleteTrainedModelRequest.java | 53 - .../client/ml/EstimateModelMemoryRequest.java | 99 - .../ml/EstimateModelMemoryResponse.java | 69 - .../client/ml/EvaluateDataFrameRequest.java | 142 - .../client/ml/EvaluateDataFrameResponse.java | 103 - .../ml/ExplainDataFrameAnalyticsRequest.java | 61 - .../ml/ExplainDataFrameAnalyticsResponse.java | 83 - .../client/ml/FlushJobRequest.java | 181 - .../client/ml/FlushJobResponse.java | 104 - .../client/ml/ForecastJobRequest.java | 161 - .../client/ml/ForecastJobResponse.java | 90 - .../client/ml/GetBucketsRequest.java | 250 -- .../client/ml/GetBucketsResponse.java | 70 - .../client/ml/GetCalendarEventsRequest.java | 154 - .../client/ml/GetCalendarEventsResponse.java | 79 - .../client/ml/GetCalendarsRequest.java | 87 - .../client/ml/GetCalendarsResponse.java | 77 - .../client/ml/GetCategoriesRequest.java | 131 - .../client/ml/GetCategoriesResponse.java | 70 - .../ml/GetDataFrameAnalyticsRequest.java | 111 - .../ml/GetDataFrameAnalyticsResponse.java | 63 - .../ml/GetDataFrameAnalyticsStatsRequest.java | 88 - .../GetDataFrameAnalyticsStatsResponse.java | 101 - .../client/ml/GetDatafeedRequest.java | 148 - .../client/ml/GetDatafeedResponse.java | 80 - .../client/ml/GetDatafeedStatsRequest.java | 134 - .../client/ml/GetDatafeedStatsResponse.java | 79 - .../client/ml/GetFiltersRequest.java | 105 - .../client/ml/GetFiltersResponse.java | 80 - .../client/ml/GetInfluencersRequest.java | 212 -- .../client/ml/GetInfluencersResponse.java | 70 - .../client/ml/GetJobRequest.java | 148 - .../client/ml/GetJobResponse.java | 80 - .../client/ml/GetJobStatsRequest.java | 132 - .../client/ml/GetJobStatsResponse.java | 79 - .../client/ml/GetModelSnapshotsRequest.java | 191 -- .../client/ml/GetModelSnapshotsResponse.java | 71 - .../client/ml/GetOverallBucketsRequest.java | 251 -- .../client/ml/GetOverallBucketsResponse.java | 70 - .../client/ml/GetRecordsRequest.java | 211 -- .../client/ml/GetRecordsResponse.java | 70 - .../client/ml/GetTrainedModelsRequest.java | 198 -- .../client/ml/GetTrainedModelsResponse.java | 74 - .../ml/GetTrainedModelsStatsRequest.java | 92 - .../ml/GetTrainedModelsStatsResponse.java | 74 - .../client/ml/MlInfoRequest.java | 13 - .../client/ml/MlInfoResponse.java | 50 - .../client/ml/NodeAttributes.java | 140 - .../client/ml/OpenJobRequest.java | 116 - .../client/ml/OpenJobResponse.java | 100 - .../client/ml/PostCalendarEventRequest.java | 97 - .../client/ml/PostCalendarEventResponse.java | 81 - .../client/ml/PostDataRequest.java | 213 -- .../client/ml/PostDataResponse.java | 62 - .../client/ml/PreviewDatafeedRequest.java | 136 - .../client/ml/PreviewDatafeedResponse.java | 103 - .../client/ml/PutCalendarJobRequest.java | 70 - .../client/ml/PutCalendarRequest.java | 56 - .../client/ml/PutCalendarResponse.java | 65 - .../ml/PutDataFrameAnalyticsRequest.java | 65 - .../ml/PutDataFrameAnalyticsResponse.java | 46 - .../client/ml/PutDatafeedRequest.java | 68 - .../client/ml/PutDatafeedResponse.java | 59 - .../client/ml/PutFilterRequest.java | 68 - .../client/ml/PutFilterResponse.java | 59 - .../client/ml/PutJobRequest.java | 68 - .../client/ml/PutJobResponse.java | 59 - .../ml/PutTrainedModelAliasRequest.java | 56 - .../client/ml/PutTrainedModelRequest.java | 54 - .../client/ml/PutTrainedModelResponse.java | 51 - .../client/ml/RevertModelSnapshotRequest.java | 103 - .../ml/RevertModelSnapshotResponse.java | 81 - .../client/ml/SetUpgradeModeRequest.java | 75 - .../ml/StartDataFrameAnalyticsRequest.java | 62 - .../ml/StartDataFrameAnalyticsResponse.java | 85 - .../client/ml/StartDatafeedRequest.java | 144 - .../client/ml/StartDatafeedResponse.java | 100 - .../ml/StopDataFrameAnalyticsRequest.java | 89 - .../ml/StopDataFrameAnalyticsResponse.java | 73 - .../client/ml/StopDatafeedRequest.java | 182 - .../client/ml/StopDatafeedResponse.java | 81 - .../ml/UpdateDataFrameAnalyticsRequest.java | 65 - .../client/ml/UpdateDatafeedRequest.java | 64 - .../client/ml/UpdateFilterRequest.java | 140 - .../client/ml/UpdateJobRequest.java | 64 - .../client/ml/UpdateModelSnapshotRequest.java | 118 - .../ml/UpdateModelSnapshotResponse.java | 97 - .../ml/UpgradeJobModelSnapshotRequest.java | 108 - .../ml/UpgradeJobModelSnapshotResponse.java | 89 - .../client/ml/calendars/Calendar.java | 106 - .../client/ml/calendars/ScheduledEvent.java | 123 - .../client/ml/datafeed/ChunkingConfig.java | 118 - .../client/ml/datafeed/DatafeedConfig.java | 491 --- .../client/ml/datafeed/DatafeedState.java | 34 - .../client/ml/datafeed/DatafeedStats.java | 142 - .../ml/datafeed/DatafeedTimingStats.java | 166 - .../client/ml/datafeed/DatafeedUpdate.java | 455 --- .../ml/datafeed/DelayedDataCheckConfig.java | 119 - .../client/ml/dataframe/Classification.java | 550 --- .../ml/dataframe/DataFrameAnalysis.java | 16 - .../dataframe/DataFrameAnalyticsConfig.java | 341 -- .../DataFrameAnalyticsConfigUpdate.java | 174 - .../ml/dataframe/DataFrameAnalyticsDest.java | 111 - .../dataframe/DataFrameAnalyticsSource.java | 165 - .../ml/dataframe/DataFrameAnalyticsState.java | 29 - .../ml/dataframe/DataFrameAnalyticsStats.java | 188 -- ...ataFrameAnalysisNamedXContentProvider.java | 26 - .../client/ml/dataframe/OutlierDetection.java | 253 -- .../client/ml/dataframe/PhaseProgress.java | 82 - .../client/ml/dataframe/QueryConfig.java | 71 - .../client/ml/dataframe/Regression.java | 549 --- .../ml/dataframe/evaluation/Evaluation.java | 21 - .../evaluation/EvaluationMetric.java | 32 - .../MlEvaluationNamedXContentProvider.java | 216 -- .../classification/AccuracyMetric.java | 148 - .../classification/AucRocMetric.java | 95 - .../classification/Classification.java | 148 - .../MulticlassConfusionMatrixMetric.java | 289 -- .../classification/PerClassSingleValue.java | 72 - .../classification/PrecisionMetric.java | 135 - .../classification/RecallMetric.java | 135 - .../evaluation/common/AucRocPoint.java | 92 - .../evaluation/common/AucRocResult.java | 99 - .../AbstractConfusionMatrixMetric.java | 33 - .../outlierdetection/AucRocMetric.java | 85 - .../ConfusionMatrixMetric.java | 198 -- .../outlierdetection/OutlierDetection.java | 131 - .../outlierdetection/PrecisionMetric.java | 114 - .../outlierdetection/RecallMetric.java | 114 - .../evaluation/regression/HuberMetric.java | 138 - .../regression/MeanSquaredErrorMetric.java | 118 - .../MeanSquaredLogarithmicErrorMetric.java | 137 - .../evaluation/regression/RSquaredMetric.java | 120 - .../evaluation/regression/Regression.java | 132 - .../ml/dataframe/explain/FieldSelection.java | 161 - .../dataframe/explain/MemoryEstimation.java | 96 - .../ml/dataframe/stats/AnalysisStats.java | 18 - .../AnalysisStatsNamedXContentProvider.java | 37 - .../classification/ClassificationStats.java | 126 - .../stats/classification/Hyperparameters.java | 285 -- .../stats/classification/TimingStats.java | 79 - .../stats/classification/ValidationLoss.java | 78 - .../ml/dataframe/stats/common/DataCounts.java | 102 - .../ml/dataframe/stats/common/FoldValues.java | 79 - .../dataframe/stats/common/MemoryUsage.java | 133 - .../OutlierDetectionStats.java | 98 - .../stats/outlierdetection/Parameters.java | 142 - .../stats/outlierdetection/TimingStats.java | 65 - .../stats/regression/Hyperparameters.java | 270 -- .../stats/regression/RegressionStats.java | 126 - .../stats/regression/TimingStats.java | 79 - .../stats/regression/ValidationLoss.java | 78 - .../InferenceToXContentCompressor.java | 76 - .../MlInferenceNamedXContentProvider.java | 101 - .../ml/inference/NamedXContentObject.java | 23 - .../inference/NamedXContentObjectHelper.java | 60 - .../inference/SimpleBoundedInputStream.java | 55 - .../ml/inference/TrainedModelConfig.java | 504 --- .../ml/inference/TrainedModelDefinition.java | 119 - .../ml/inference/TrainedModelInput.java | 78 - .../ml/inference/TrainedModelStats.java | 128 - .../client/ml/inference/TrainedModelType.java | 26 - .../preprocessing/CustomWordEmbedding.java | 151 - .../preprocessing/FrequencyEncoding.java | 170 - .../ml/inference/preprocessing/Multi.java | 110 - .../ml/inference/preprocessing/NGram.java | 224 -- .../preprocessing/OneHotEncoding.java | 144 - .../inference/preprocessing/PreProcessor.java | 21 - .../preprocessing/TargetMeanEncoding.java | 192 -- .../inference/results/FeatureImportance.java | 175 - .../ml/inference/results/TopClassEntry.java | 113 - .../trainedmodel/ClassificationConfig.java | 119 - .../inference/trainedmodel/IndexLocation.java | 77 - .../trainedmodel/InferenceConfig.java | 14 - .../trainedmodel/InferenceStats.java | 160 - .../trainedmodel/RegressionConfig.java | 94 - .../ml/inference/trainedmodel/TargetType.java | 29 - .../inference/trainedmodel/TrainedModel.java | 25 - .../trainedmodel/TrainedModelLocation.java | 13 - .../trainedmodel/ensemble/Ensemble.java | 198 -- .../trainedmodel/ensemble/Exponent.java | 71 - .../ensemble/LogisticRegression.java | 71 - .../ensemble/OutputAggregator.java | 17 - .../trainedmodel/ensemble/WeightedMode.java | 77 - .../trainedmodel/ensemble/WeightedSum.java | 73 - .../langident/LangIdentNeuralNetwork.java | 94 - .../trainedmodel/langident/LangNetLayer.java | 111 - .../ml/inference/trainedmodel/tree/Tree.java | 231 -- .../inference/trainedmodel/tree/TreeNode.java | 286 -- .../client/ml/job/config/AnalysisConfig.java | 446 --- .../client/ml/job/config/AnalysisLimits.java | 131 - .../config/CategorizationAnalyzerConfig.java | 347 -- .../client/ml/job/config/DataDescription.java | 176 - .../config/DefaultDetectorDescription.java | 83 - .../client/ml/job/config/DetectionRule.java | 142 - .../client/ml/job/config/Detector.java | 377 --- .../ml/job/config/DetectorFunction.java | 84 - .../client/ml/job/config/FilterRef.java | 93 - .../client/ml/job/config/Job.java | 627 ---- .../client/ml/job/config/JobState.java | 32 - .../client/ml/job/config/JobUpdate.java | 588 ---- .../client/ml/job/config/MlFilter.java | 170 - .../client/ml/job/config/ModelPlotConfig.java | 93 - .../client/ml/job/config/Operator.java | 60 - .../PerPartitionCategorizationConfig.java | 86 - .../client/ml/job/config/RuleAction.java | 30 - .../client/ml/job/config/RuleCondition.java | 108 - .../client/ml/job/config/RuleScope.java | 123 - .../client/ml/job/process/DataCounts.java | 480 --- .../client/ml/job/process/ModelSizeStats.java | 606 ---- .../client/ml/job/process/ModelSnapshot.java | 361 -- .../client/ml/job/process/Quantiles.java | 104 - .../client/ml/job/process/TimingStats.java | 200 -- .../client/ml/job/results/AnomalyCause.java | 322 -- .../client/ml/job/results/AnomalyRecord.java | 476 --- .../client/ml/job/results/Bucket.java | 249 -- .../ml/job/results/BucketInfluencer.java | 196 -- .../ml/job/results/CategoryDefinition.java | 232 -- .../client/ml/job/results/Influence.java | 91 - .../client/ml/job/results/Influencer.java | 190 -- .../client/ml/job/results/OverallBucket.java | 206 -- .../client/ml/job/results/Result.java | 25 - .../client/ml/job/stats/ForecastStats.java | 165 - .../client/ml/job/stats/JobStats.java | 229 -- .../client/ml/job/stats/SimpleStats.java | 104 - ...icsearch.plugins.spi.NamedXContentProvider | 4 - 247 files changed, 35792 deletions(-) delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedInference.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/AbstractResultResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarEventRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDataFrameAnalyticsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteFilterRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelAliasRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelAliasRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDataFrameAnalyticsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/ScheduledEvent.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/ChunkingConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedState.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedTimingStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DelayedDataCheckConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalysis.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigUpdate.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsDest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsState.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/MlDataFrameAnalysisNamedXContentProvider.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/PhaseProgress.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/QueryConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/Evaluation.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/EvaluationMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/MlEvaluationNamedXContentProvider.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AccuracyMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AucRocMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/Classification.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PerClassSingleValue.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PrecisionMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/RecallMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocPoint.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocResult.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AucRocMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/PrecisionMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/RecallMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/HuberMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredErrorMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/RSquaredMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/Regression.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/FieldSelection.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/MemoryEstimation.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStatsNamedXContentProvider.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/Hyperparameters.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLoss.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCounts.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValues.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsage.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/Parameters.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/Hyperparameters.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLoss.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/InferenceToXContentCompressor.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObject.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObjectHelper.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/SimpleBoundedInputStream.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelInput.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelType.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/CustomWordEmbedding.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/Multi.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/NGram.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/FeatureImportance.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/TopClassEntry.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ClassificationConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/IndexLocation.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/RegressionConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TargetType.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModelLocation.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Exponent.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/LogisticRegression.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/OutputAggregator.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedMode.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedSum.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangIdentNeuralNetwork.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangNetLayer.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisLimits.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/CategorizationAnalyzerConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DataDescription.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DefaultDetectorDescription.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectionRule.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectorFunction.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/FilterRef.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/MlFilter.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/ModelPlotConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Operator.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/PerPartitionCategorizationConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleAction.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleCondition.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/DataCounts.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSnapshot.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/Quantiles.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/TimingStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyCause.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/CategoryDefinition.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influence.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java deleted file mode 100644 index 7c036510d0790..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ /dev/null @@ -1,907 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.apache.http.HttpEntity; -import org.apache.http.client.methods.HttpDelete; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; -import org.apache.http.nio.entity.NByteArrayEntity; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.client.RequestConverters.EndpointBuilder; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.CloseJobRequest; -import org.elasticsearch.client.ml.DeleteCalendarEventRequest; -import org.elasticsearch.client.ml.DeleteCalendarJobRequest; -import org.elasticsearch.client.ml.DeleteCalendarRequest; -import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.DeleteDatafeedRequest; -import org.elasticsearch.client.ml.DeleteExpiredDataRequest; -import org.elasticsearch.client.ml.DeleteFilterRequest; -import org.elasticsearch.client.ml.DeleteForecastRequest; -import org.elasticsearch.client.ml.DeleteJobRequest; -import org.elasticsearch.client.ml.DeleteModelSnapshotRequest; -import org.elasticsearch.client.ml.DeleteTrainedModelAliasRequest; -import org.elasticsearch.client.ml.DeleteTrainedModelRequest; -import org.elasticsearch.client.ml.EstimateModelMemoryRequest; -import org.elasticsearch.client.ml.EvaluateDataFrameRequest; -import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.FlushJobRequest; -import org.elasticsearch.client.ml.ForecastJobRequest; -import org.elasticsearch.client.ml.GetBucketsRequest; -import org.elasticsearch.client.ml.GetCalendarEventsRequest; -import org.elasticsearch.client.ml.GetCalendarsRequest; -import org.elasticsearch.client.ml.GetCategoriesRequest; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest; -import org.elasticsearch.client.ml.GetDatafeedRequest; -import org.elasticsearch.client.ml.GetDatafeedStatsRequest; -import org.elasticsearch.client.ml.GetFiltersRequest; -import org.elasticsearch.client.ml.GetInfluencersRequest; -import org.elasticsearch.client.ml.GetJobRequest; -import org.elasticsearch.client.ml.GetJobStatsRequest; -import org.elasticsearch.client.ml.GetModelSnapshotsRequest; -import org.elasticsearch.client.ml.GetOverallBucketsRequest; -import org.elasticsearch.client.ml.GetRecordsRequest; -import org.elasticsearch.client.ml.GetTrainedModelsRequest; -import org.elasticsearch.client.ml.GetTrainedModelsStatsRequest; -import org.elasticsearch.client.ml.MlInfoRequest; -import org.elasticsearch.client.ml.OpenJobRequest; -import org.elasticsearch.client.ml.PostCalendarEventRequest; -import org.elasticsearch.client.ml.PostDataRequest; -import org.elasticsearch.client.ml.PreviewDatafeedRequest; -import org.elasticsearch.client.ml.PutCalendarJobRequest; -import org.elasticsearch.client.ml.PutCalendarRequest; -import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.PutDatafeedRequest; -import org.elasticsearch.client.ml.PutFilterRequest; -import org.elasticsearch.client.ml.PutJobRequest; -import org.elasticsearch.client.ml.PutTrainedModelAliasRequest; -import org.elasticsearch.client.ml.PutTrainedModelRequest; -import org.elasticsearch.client.ml.RevertModelSnapshotRequest; -import org.elasticsearch.client.ml.SetUpgradeModeRequest; -import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.StartDatafeedRequest; -import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.StopDatafeedRequest; -import org.elasticsearch.client.ml.UpdateDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.UpdateDatafeedRequest; -import org.elasticsearch.client.ml.UpdateFilterRequest; -import org.elasticsearch.client.ml.UpdateJobRequest; -import org.elasticsearch.client.ml.UpdateModelSnapshotRequest; -import org.elasticsearch.client.ml.UpgradeJobModelSnapshotRequest; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; - -import java.io.IOException; - -import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE; -import static org.elasticsearch.client.RequestConverters.createContentType; -import static org.elasticsearch.client.RequestConverters.createEntity; - -final class MLRequestConverters { - - private MLRequestConverters() {} - - static Request putJob(PutJobRequest putJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(putJobRequest.getJob().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getJob(GetJobRequest getJobRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(Strings.collectionToCommaDelimitedString(getJobRequest.getJobIds())) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (getJobRequest.getAllowNoMatch() != null) { - params.putParam(GetJobRequest.ALLOW_NO_MATCH.getPreferredName(), Boolean.toString(getJobRequest.getAllowNoMatch())); - } - if (getJobRequest.getExcludeGenerated() != null) { - params.putParam(GetJobRequest.EXCLUDE_GENERATED, Boolean.toString(getJobRequest.getExcludeGenerated())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request getJobStats(GetJobStatsRequest getJobStatsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(Strings.collectionToCommaDelimitedString(getJobStatsRequest.getJobIds())) - .addPathPartAsIs("_stats") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (getJobStatsRequest.getAllowNoMatch() != null) { - params.putParam("allow_no_match", Boolean.toString(getJobStatsRequest.getAllowNoMatch())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request openJob(OpenJobRequest openJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(openJobRequest.getJobId()) - .addPathPartAsIs("_open") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(openJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request closeJob(CloseJobRequest closeJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(Strings.collectionToCommaDelimitedString(closeJobRequest.getJobIds())) - .addPathPartAsIs("_close") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(closeJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request deleteExpiredData(DeleteExpiredDataRequest deleteExpiredDataRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("_delete_expired_data") - .addPathPart(deleteExpiredDataRequest.getJobId()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - request.setEntity(createEntity(deleteExpiredDataRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request deleteJob(DeleteJobRequest deleteJobRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(deleteJobRequest.getJobId()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (deleteJobRequest.getForce() != null) { - params.putParam("force", Boolean.toString(deleteJobRequest.getForce())); - } - if (deleteJobRequest.getWaitForCompletion() != null) { - params.putParam("wait_for_completion", Boolean.toString(deleteJobRequest.getWaitForCompletion())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request flushJob(FlushJobRequest flushJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(flushJobRequest.getJobId()) - .addPathPartAsIs("_flush") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(flushJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request forecastJob(ForecastJobRequest forecastJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(forecastJobRequest.getJobId()) - .addPathPartAsIs("_forecast") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(forecastJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request updateJob(UpdateJobRequest updateJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(updateJobRequest.getJobUpdate().getJobId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateJobRequest.getJobUpdate(), REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putDatafeed(PutDatafeedRequest putDatafeedRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(putDatafeedRequest.getDatafeed().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request updateDatafeed(UpdateDatafeedRequest updateDatafeedRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(updateDatafeedRequest.getDatafeedUpdate().getId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getDatafeed(GetDatafeedRequest getDatafeedRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(Strings.collectionToCommaDelimitedString(getDatafeedRequest.getDatafeedIds())) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (getDatafeedRequest.getAllowNoMatch() != null) { - params.putParam(GetDatafeedRequest.ALLOW_NO_MATCH.getPreferredName(), Boolean.toString(getDatafeedRequest.getAllowNoMatch())); - } - if (getDatafeedRequest.getExcludeGenerated() != null) { - params.putParam(GetDatafeedRequest.EXCLUDE_GENERATED, Boolean.toString(getDatafeedRequest.getExcludeGenerated())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request deleteDatafeed(DeleteDatafeedRequest deleteDatafeedRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(deleteDatafeedRequest.getDatafeedId()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (deleteDatafeedRequest.getForce() != null) { - params.putParam("force", Boolean.toString(deleteDatafeedRequest.getForce())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request startDatafeed(StartDatafeedRequest startDatafeedRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(startDatafeedRequest.getDatafeedId()) - .addPathPartAsIs("_start") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(startDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request stopDatafeed(StopDatafeedRequest stopDatafeedRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(Strings.collectionToCommaDelimitedString(stopDatafeedRequest.getDatafeedIds())) - .addPathPartAsIs("_stop") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(stopDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getDatafeedStats(GetDatafeedStatsRequest getDatafeedStatsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(Strings.collectionToCommaDelimitedString(getDatafeedStatsRequest.getDatafeedIds())) - .addPathPartAsIs("_stats") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (getDatafeedStatsRequest.getAllowNoMatch() != null) { - params.putParam("allow_no_match", Boolean.toString(getDatafeedStatsRequest.getAllowNoMatch())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request previewDatafeed(PreviewDatafeedRequest previewDatafeedRequest) throws IOException { - EndpointBuilder builder = new EndpointBuilder().addPathPartAsIs("_ml").addPathPartAsIs("datafeeds"); - String endpoint = previewDatafeedRequest.getDatafeedId() != null - ? builder.addPathPart(previewDatafeedRequest.getDatafeedId()).addPathPartAsIs("_preview").build() - : builder.addPathPartAsIs("_preview").build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - if (previewDatafeedRequest.getDatafeedId() == null) { - request.setEntity(createEntity(previewDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - } - return request; - } - - static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(deleteForecastRequest.getJobId()) - .addPathPartAsIs("_forecast") - .addPathPart(Strings.collectionToCommaDelimitedString(deleteForecastRequest.getForecastIds())) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (deleteForecastRequest.getAllowNoForecasts() != null) { - params.putParam("allow_no_forecasts", Boolean.toString(deleteForecastRequest.getAllowNoForecasts())); - } - if (deleteForecastRequest.timeout() != null) { - params.putParam("timeout", deleteForecastRequest.timeout().getStringRep()); - } - request.addParameters(params.asMap()); - return request; - } - - static Request deleteModelSnapshot(DeleteModelSnapshotRequest deleteModelSnapshotRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(deleteModelSnapshotRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .addPathPart(deleteModelSnapshotRequest.getSnapshotId()) - .build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getBucketsRequest.getJobId()) - .addPathPartAsIs("results") - .addPathPartAsIs("buckets") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getBucketsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getCategories(GetCategoriesRequest getCategoriesRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getCategoriesRequest.getJobId()) - .addPathPartAsIs("results") - .addPathPartAsIs("categories") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getCategoriesRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getModelSnapshots(GetModelSnapshotsRequest getModelSnapshotsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getModelSnapshotsRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getModelSnapshotsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request updateModelSnapshot(UpdateModelSnapshotRequest updateModelSnapshotRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(updateModelSnapshotRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .addPathPart(updateModelSnapshotRequest.getSnapshotId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateModelSnapshotRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request upgradeJobSnapshot(UpgradeJobModelSnapshotRequest upgradeJobModelSnapshotRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(upgradeJobModelSnapshotRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .addPathPart(upgradeJobModelSnapshotRequest.getSnapshotId()) - .addPathPartAsIs("_upgrade") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (upgradeJobModelSnapshotRequest.getTimeout() != null) { - params.putParam( - UpgradeJobModelSnapshotRequest.TIMEOUT.getPreferredName(), - upgradeJobModelSnapshotRequest.getTimeout().getStringRep() - ); - } - if (upgradeJobModelSnapshotRequest.getWaitForCompletion() != null) { - params.putParam( - UpgradeJobModelSnapshotRequest.WAIT_FOR_COMPLETION.getPreferredName(), - upgradeJobModelSnapshotRequest.getWaitForCompletion().toString() - ); - } - request.addParameters(params.asMap()); - return request; - } - - static Request revertModelSnapshot(RevertModelSnapshotRequest revertModelSnapshotsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(revertModelSnapshotsRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .addPathPart(revertModelSnapshotsRequest.getSnapshotId()) - .addPathPart("_revert") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(revertModelSnapshotsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getOverallBuckets(GetOverallBucketsRequest getOverallBucketsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(Strings.collectionToCommaDelimitedString(getOverallBucketsRequest.getJobIds())) - .addPathPartAsIs("results") - .addPathPartAsIs("overall_buckets") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getOverallBucketsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getRecords(GetRecordsRequest getRecordsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getRecordsRequest.getJobId()) - .addPathPartAsIs("results") - .addPathPartAsIs("records") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getRecordsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request postData(PostDataRequest postDataRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(postDataRequest.getJobId()) - .addPathPartAsIs("_data") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (postDataRequest.getResetStart() != null) { - params.putParam(PostDataRequest.RESET_START.getPreferredName(), postDataRequest.getResetStart()); - } - if (postDataRequest.getResetEnd() != null) { - params.putParam(PostDataRequest.RESET_END.getPreferredName(), postDataRequest.getResetEnd()); - } - BytesReference content = postDataRequest.getContent(); - request.addParameters(params.asMap()); - if (content != null) { - BytesRef source = postDataRequest.getContent().toBytesRef(); - HttpEntity byteEntity = new NByteArrayEntity( - source.bytes, - source.offset, - source.length, - createContentType(postDataRequest.getXContentType()) - ); - request.setEntity(byteEntity); - } - return request; - } - - static Request getInfluencers(GetInfluencersRequest getInfluencersRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getInfluencersRequest.getJobId()) - .addPathPartAsIs("results") - .addPathPartAsIs("influencers") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getInfluencersRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putCalendar(PutCalendarRequest putCalendarRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(putCalendarRequest.getCalendar().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putCalendarRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getCalendars(GetCalendarsRequest getCalendarsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(getCalendarsRequest.getCalendarId()) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getCalendarsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putCalendarJob(PutCalendarJobRequest putCalendarJobRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(putCalendarJobRequest.getCalendarId()) - .addPathPartAsIs("jobs") - .addPathPart(Strings.collectionToCommaDelimitedString(putCalendarJobRequest.getJobIds())) - .build(); - return new Request(HttpPut.METHOD_NAME, endpoint); - } - - static Request deleteCalendarJob(DeleteCalendarJobRequest deleteCalendarJobRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(deleteCalendarJobRequest.getCalendarId()) - .addPathPartAsIs("jobs") - .addPathPart(Strings.collectionToCommaDelimitedString(deleteCalendarJobRequest.getJobIds())) - .build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request deleteCalendar(DeleteCalendarRequest deleteCalendarRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(deleteCalendarRequest.getCalendarId()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - return request; - } - - static Request getCalendarEvents(GetCalendarEventsRequest getCalendarEventsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(getCalendarEventsRequest.getCalendarId()) - .addPathPartAsIs("events") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getCalendarEventsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request postCalendarEvents(PostCalendarEventRequest postCalendarEventRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(postCalendarEventRequest.getCalendarId()) - .addPathPartAsIs("events") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity( - createEntity(postCalendarEventRequest, REQUEST_BODY_CONTENT_TYPE, PostCalendarEventRequest.EXCLUDE_CALENDAR_ID_PARAMS) - ); - return request; - } - - static Request deleteCalendarEvent(DeleteCalendarEventRequest deleteCalendarEventRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(deleteCalendarEventRequest.getCalendarId()) - .addPathPartAsIs("events") - .addPathPart(deleteCalendarEventRequest.getEventId()) - .build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request estimateModelMemory(EstimateModelMemoryRequest estimateModelMemoryRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPartAsIs("_estimate_model_memory") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(estimateModelMemoryRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putDataFrameAnalytics(PutDataFrameAnalyticsRequest putRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(putRequest.getConfig().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request updateDataFrameAnalytics(UpdateDataFrameAnalyticsRequest updateRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(updateRequest.getUpdate().getId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getDataFrameAnalytics(GetDataFrameAnalyticsRequest getRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(Strings.collectionToCommaDelimitedString(getRequest.getIds())) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (getRequest.getPageParams() != null) { - PageParams pageParams = getRequest.getPageParams(); - if (pageParams.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), pageParams.getFrom().toString()); - } - if (pageParams.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), pageParams.getSize().toString()); - } - } - if (getRequest.getAllowNoMatch() != null) { - params.putParam(GetDataFrameAnalyticsRequest.ALLOW_NO_MATCH, Boolean.toString(getRequest.getAllowNoMatch())); - } - if (getRequest.getExcludeGenerated() != null) { - params.putParam(GetDataFrameAnalyticsRequest.EXCLUDE_GENERATED, Boolean.toString(getRequest.getExcludeGenerated())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request getDataFrameAnalyticsStats(GetDataFrameAnalyticsStatsRequest getStatsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(Strings.collectionToCommaDelimitedString(getStatsRequest.getIds())) - .addPathPartAsIs("_stats") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (getStatsRequest.getPageParams() != null) { - PageParams pageParams = getStatsRequest.getPageParams(); - if (pageParams.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), pageParams.getFrom().toString()); - } - if (pageParams.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), pageParams.getSize().toString()); - } - } - if (getStatsRequest.getAllowNoMatch() != null) { - params.putParam( - GetDataFrameAnalyticsStatsRequest.ALLOW_NO_MATCH.getPreferredName(), - Boolean.toString(getStatsRequest.getAllowNoMatch()) - ); - } - request.addParameters(params.asMap()); - return request; - } - - static Request startDataFrameAnalytics(StartDataFrameAnalyticsRequest startRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(startRequest.getId()) - .addPathPartAsIs("_start") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (startRequest.getTimeout() != null) { - params.withTimeout(startRequest.getTimeout()); - } - request.addParameters(params.asMap()); - return request; - } - - static Request stopDataFrameAnalytics(StopDataFrameAnalyticsRequest stopRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(stopRequest.getId()) - .addPathPartAsIs("_stop") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (stopRequest.getTimeout() != null) { - params.withTimeout(stopRequest.getTimeout()); - } - if (stopRequest.getAllowNoMatch() != null) { - params.putParam( - StopDataFrameAnalyticsRequest.ALLOW_NO_MATCH.getPreferredName(), - Boolean.toString(stopRequest.getAllowNoMatch()) - ); - } - if (stopRequest.getForce() != null) { - params.putParam(StopDataFrameAnalyticsRequest.FORCE.getPreferredName(), Boolean.toString(stopRequest.getForce())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request deleteDataFrameAnalytics(DeleteDataFrameAnalyticsRequest deleteRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(deleteRequest.getId()) - .build(); - - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (deleteRequest.getForce() != null) { - params.putParam("force", Boolean.toString(deleteRequest.getForce())); - } - if (deleteRequest.getTimeout() != null) { - params.withTimeout(deleteRequest.getTimeout()); - } - request.addParameters(params.asMap()); - - return request; - } - - static Request evaluateDataFrame(EvaluateDataFrameRequest evaluateRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "_evaluate").build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(evaluateRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request explainDataFrameAnalytics(ExplainDataFrameAnalyticsRequest explainRequest) throws IOException { - EndpointBuilder endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics"); - if (explainRequest.getId() != null) { - endpoint.addPathPart(explainRequest.getId()); - } - endpoint.addPathPartAsIs("_explain"); - - Request request = new Request(HttpPost.METHOD_NAME, endpoint.build()); - if (explainRequest.getConfig() != null) { - request.setEntity(createEntity(explainRequest.getConfig(), REQUEST_BODY_CONTENT_TYPE)); - } - return request; - } - - static Request getTrainedModels(GetTrainedModelsRequest getTrainedModelsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(Strings.collectionToCommaDelimitedString(getTrainedModelsRequest.getIds())) - .build(); - RequestConverters.Params params = new RequestConverters.Params(); - if (getTrainedModelsRequest.getPageParams() != null) { - PageParams pageParams = getTrainedModelsRequest.getPageParams(); - if (pageParams.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), pageParams.getFrom().toString()); - } - if (pageParams.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), pageParams.getSize().toString()); - } - } - if (getTrainedModelsRequest.getAllowNoMatch() != null) { - params.putParam(GetTrainedModelsRequest.ALLOW_NO_MATCH, Boolean.toString(getTrainedModelsRequest.getAllowNoMatch())); - } - if (getTrainedModelsRequest.getDecompressDefinition() != null) { - params.putParam( - GetTrainedModelsRequest.DECOMPRESS_DEFINITION, - Boolean.toString(getTrainedModelsRequest.getDecompressDefinition()) - ); - } - if (getTrainedModelsRequest.getIncludes().isEmpty() == false) { - params.putParam( - GetTrainedModelsRequest.INCLUDE, - Strings.collectionToCommaDelimitedString(getTrainedModelsRequest.getIncludes()) - ); - } - if (getTrainedModelsRequest.getTags() != null) { - params.putParam(GetTrainedModelsRequest.TAGS, Strings.collectionToCommaDelimitedString(getTrainedModelsRequest.getTags())); - } - if (getTrainedModelsRequest.getExcludeGenerated() != null) { - params.putParam(GetTrainedModelsRequest.EXCLUDE_GENERATED, Boolean.toString(getTrainedModelsRequest.getExcludeGenerated())); - } - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.addParameters(params.asMap()); - return request; - } - - static Request getTrainedModelsStats(GetTrainedModelsStatsRequest getTrainedModelsStatsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(Strings.collectionToCommaDelimitedString(getTrainedModelsStatsRequest.getIds())) - .addPathPart("_stats") - .build(); - RequestConverters.Params params = new RequestConverters.Params(); - if (getTrainedModelsStatsRequest.getPageParams() != null) { - PageParams pageParams = getTrainedModelsStatsRequest.getPageParams(); - if (pageParams.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), pageParams.getFrom().toString()); - } - if (pageParams.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), pageParams.getSize().toString()); - } - } - if (getTrainedModelsStatsRequest.getAllowNoMatch() != null) { - params.putParam(GetTrainedModelsStatsRequest.ALLOW_NO_MATCH, Boolean.toString(getTrainedModelsStatsRequest.getAllowNoMatch())); - } - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.addParameters(params.asMap()); - return request; - } - - static Request deleteTrainedModel(DeleteTrainedModelRequest deleteRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models").addPathPart(deleteRequest.getId()).build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request putTrainedModel(PutTrainedModelRequest putTrainedModelRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(putTrainedModelRequest.getTrainedModelConfig().getModelId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putTrainedModelRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putTrainedModelAlias(PutTrainedModelAliasRequest putTrainedModelAliasRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(putTrainedModelAliasRequest.getModelId()) - .addPathPartAsIs("model_aliases") - .addPathPart(putTrainedModelAliasRequest.getModelAlias()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (putTrainedModelAliasRequest.getReassign() != null) { - params.putParam(PutTrainedModelAliasRequest.REASSIGN, Boolean.toString(putTrainedModelAliasRequest.getReassign())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request deleteTrainedModelAlias(DeleteTrainedModelAliasRequest deleteTrainedModelAliasRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(deleteTrainedModelAliasRequest.getModelId()) - .addPathPartAsIs("model_aliases") - .addPathPart(deleteTrainedModelAliasRequest.getModelAlias()) - .build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request putFilter(PutFilterRequest putFilterRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("filters") - .addPathPart(putFilterRequest.getMlFilter().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putFilterRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getFilter(GetFiltersRequest getFiltersRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("filters") - .addPathPart(getFiltersRequest.getFilterId()) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (getFiltersRequest.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), getFiltersRequest.getSize().toString()); - } - if (getFiltersRequest.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), getFiltersRequest.getFrom().toString()); - } - request.addParameters(params.asMap()); - return request; - } - - static Request updateFilter(UpdateFilterRequest updateFilterRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("filters") - .addPathPart(updateFilterRequest.getFilterId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateFilterRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request deleteFilter(DeleteFilterRequest deleteFilterRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "filters").addPathPart(deleteFilterRequest.getId()).build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - return request; - } - - static Request setUpgradeMode(SetUpgradeModeRequest setUpgradeModeRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "set_upgrade_mode").build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - params.putParam(SetUpgradeModeRequest.ENABLED.getPreferredName(), Boolean.toString(setUpgradeModeRequest.isEnabled())); - if (setUpgradeModeRequest.getTimeout() != null) { - params.putParam(SetUpgradeModeRequest.TIMEOUT.getPreferredName(), setUpgradeModeRequest.getTimeout().toString()); - } - request.addParameters(params.asMap()); - return request; - } - - static Request mlInfo(MlInfoRequest infoRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "info").build(); - return new Request(HttpGet.METHOD_NAME, endpoint); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java deleted file mode 100644 index 1272ca1658578..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ /dev/null @@ -1,2992 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.ml.CloseJobRequest; -import org.elasticsearch.client.ml.CloseJobResponse; -import org.elasticsearch.client.ml.DeleteCalendarEventRequest; -import org.elasticsearch.client.ml.DeleteCalendarJobRequest; -import org.elasticsearch.client.ml.DeleteCalendarRequest; -import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.DeleteDatafeedRequest; -import org.elasticsearch.client.ml.DeleteExpiredDataRequest; -import org.elasticsearch.client.ml.DeleteExpiredDataResponse; -import org.elasticsearch.client.ml.DeleteFilterRequest; -import org.elasticsearch.client.ml.DeleteForecastRequest; -import org.elasticsearch.client.ml.DeleteJobRequest; -import org.elasticsearch.client.ml.DeleteJobResponse; -import org.elasticsearch.client.ml.DeleteModelSnapshotRequest; -import org.elasticsearch.client.ml.DeleteTrainedModelAliasRequest; -import org.elasticsearch.client.ml.DeleteTrainedModelRequest; -import org.elasticsearch.client.ml.EstimateModelMemoryRequest; -import org.elasticsearch.client.ml.EstimateModelMemoryResponse; -import org.elasticsearch.client.ml.EvaluateDataFrameRequest; -import org.elasticsearch.client.ml.EvaluateDataFrameResponse; -import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.FlushJobRequest; -import org.elasticsearch.client.ml.FlushJobResponse; -import org.elasticsearch.client.ml.ForecastJobRequest; -import org.elasticsearch.client.ml.ForecastJobResponse; -import org.elasticsearch.client.ml.GetBucketsRequest; -import org.elasticsearch.client.ml.GetBucketsResponse; -import org.elasticsearch.client.ml.GetCalendarEventsRequest; -import org.elasticsearch.client.ml.GetCalendarEventsResponse; -import org.elasticsearch.client.ml.GetCalendarsRequest; -import org.elasticsearch.client.ml.GetCalendarsResponse; -import org.elasticsearch.client.ml.GetCategoriesRequest; -import org.elasticsearch.client.ml.GetCategoriesResponse; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsResponse; -import org.elasticsearch.client.ml.GetDatafeedRequest; -import org.elasticsearch.client.ml.GetDatafeedResponse; -import org.elasticsearch.client.ml.GetDatafeedStatsRequest; -import org.elasticsearch.client.ml.GetDatafeedStatsResponse; -import org.elasticsearch.client.ml.GetFiltersRequest; -import org.elasticsearch.client.ml.GetFiltersResponse; -import org.elasticsearch.client.ml.GetInfluencersRequest; -import org.elasticsearch.client.ml.GetInfluencersResponse; -import org.elasticsearch.client.ml.GetJobRequest; -import org.elasticsearch.client.ml.GetJobResponse; -import org.elasticsearch.client.ml.GetJobStatsRequest; -import org.elasticsearch.client.ml.GetJobStatsResponse; -import org.elasticsearch.client.ml.GetModelSnapshotsRequest; -import org.elasticsearch.client.ml.GetModelSnapshotsResponse; -import org.elasticsearch.client.ml.GetOverallBucketsRequest; -import org.elasticsearch.client.ml.GetOverallBucketsResponse; -import org.elasticsearch.client.ml.GetRecordsRequest; -import org.elasticsearch.client.ml.GetRecordsResponse; -import org.elasticsearch.client.ml.GetTrainedModelsRequest; -import org.elasticsearch.client.ml.GetTrainedModelsResponse; -import org.elasticsearch.client.ml.GetTrainedModelsStatsRequest; -import org.elasticsearch.client.ml.GetTrainedModelsStatsResponse; -import org.elasticsearch.client.ml.MlInfoRequest; -import org.elasticsearch.client.ml.MlInfoResponse; -import org.elasticsearch.client.ml.OpenJobRequest; -import org.elasticsearch.client.ml.OpenJobResponse; -import org.elasticsearch.client.ml.PostCalendarEventRequest; -import org.elasticsearch.client.ml.PostCalendarEventResponse; -import org.elasticsearch.client.ml.PostDataRequest; -import org.elasticsearch.client.ml.PostDataResponse; -import org.elasticsearch.client.ml.PreviewDatafeedRequest; -import org.elasticsearch.client.ml.PreviewDatafeedResponse; -import org.elasticsearch.client.ml.PutCalendarJobRequest; -import org.elasticsearch.client.ml.PutCalendarRequest; -import org.elasticsearch.client.ml.PutCalendarResponse; -import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.PutDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.PutDatafeedRequest; -import org.elasticsearch.client.ml.PutDatafeedResponse; -import org.elasticsearch.client.ml.PutFilterRequest; -import org.elasticsearch.client.ml.PutFilterResponse; -import org.elasticsearch.client.ml.PutJobRequest; -import org.elasticsearch.client.ml.PutJobResponse; -import org.elasticsearch.client.ml.PutTrainedModelAliasRequest; -import org.elasticsearch.client.ml.PutTrainedModelRequest; -import org.elasticsearch.client.ml.PutTrainedModelResponse; -import org.elasticsearch.client.ml.RevertModelSnapshotRequest; -import org.elasticsearch.client.ml.RevertModelSnapshotResponse; -import org.elasticsearch.client.ml.SetUpgradeModeRequest; -import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.StartDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.StartDatafeedRequest; -import org.elasticsearch.client.ml.StartDatafeedResponse; -import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.StopDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.StopDatafeedRequest; -import org.elasticsearch.client.ml.StopDatafeedResponse; -import org.elasticsearch.client.ml.UpdateDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.UpdateDatafeedRequest; -import org.elasticsearch.client.ml.UpdateFilterRequest; -import org.elasticsearch.client.ml.UpdateJobRequest; -import org.elasticsearch.client.ml.UpdateModelSnapshotRequest; -import org.elasticsearch.client.ml.UpdateModelSnapshotResponse; -import org.elasticsearch.client.ml.UpgradeJobModelSnapshotRequest; -import org.elasticsearch.client.ml.UpgradeJobModelSnapshotResponse; -import org.elasticsearch.client.ml.job.stats.JobStats; - -import java.io.IOException; -import java.util.Collections; - -/** - * Machine Learning API client wrapper for the {@link RestHighLevelClient} - *

    - * See the - * X-Pack Machine Learning APIs for additional information. - * - * @deprecated The High Level Rest Client is deprecated in favor of the - * - * Elasticsearch Java API Client - */ -@Deprecated(since = "7.16.0", forRemoval = true) -@SuppressWarnings("removal") -public final class MachineLearningClient { - - private final RestHighLevelClient restHighLevelClient; - - MachineLearningClient(RestHighLevelClient restHighLevelClient) { - this.restHighLevelClient = restHighLevelClient; - } - - /** - * Creates a new Machine Learning Job - *

    - * For additional info - * see ML PUT job documentation - * - * @param request The PutJobRequest containing the {@link org.elasticsearch.client.ml.job.config.Job} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutJobResponse with enclosed {@link org.elasticsearch.client.ml.job.config.Job} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutJobResponse putJob(PutJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putJob, - options, - PutJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Job asynchronously and notifies listener on completion - *

    - * For additional info - * see ML PUT job documentation - * @param request The request containing the {@link org.elasticsearch.client.ml.job.config.Job} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putJobAsync(PutJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putJob, - options, - PutJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets one or more Machine Learning job configuration info. - *

    - * For additional info - * see ML GET job documentation - * - * @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetJobResponse} response object containing - * the {@link org.elasticsearch.client.ml.job.config.Job} objects and the number of jobs found - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetJobResponse getJob(GetJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getJob, - options, - GetJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets one or more Machine Learning job configuration info, asynchronously. - *

    - * For additional info - * see ML GET job documentation - * @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified with {@link GetJobResponse} upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getJobAsync(GetJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getJob, - options, - GetJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets usage statistics for one or more Machine Learning jobs - *

    - * For additional info - * see Get job stats docs - * - * @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetJobStatsResponse} response object containing - * the {@link JobStats} objects and the number of jobs found - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetJobStatsResponse getJobStats(GetJobStatsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getJobStats, - options, - GetJobStatsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets usage statistics for one or more Machine Learning jobs, asynchronously. - *

    - * For additional info - * see Get job stats docs - * @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified with {@link GetJobStatsResponse} upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getJobStatsAsync(GetJobStatsRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getJobStats, - options, - GetJobStatsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes expired data from Machine Learning Jobs - *

    - * For additional info - * see ML Delete Expired Data - * documentation - * - * @param request The request to delete expired ML data - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The action response which contains the acknowledgement or the task id depending on whether the action was set to wait for - * completion - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public DeleteExpiredDataResponse deleteExpiredData(DeleteExpiredDataRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteExpiredData, - options, - DeleteExpiredDataResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes expired data from Machine Learning Jobs asynchronously and notifies the listener on completion - *

    - * For additional info - * see ML Delete Expired Data - * documentation - * @param request The request to delete expired ML data - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteExpiredDataAsync( - DeleteExpiredDataRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteExpiredData, - options, - DeleteExpiredDataResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Job - *

    - * For additional info - * see ML Delete job documentation - * - * @param request The request to delete the job - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The action response which contains the acknowledgement or the task id depending on whether the action was set to wait for - * completion - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteJob, - options, - DeleteJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Job asynchronously and notifies the listener on completion - *

    - * For additional info - * see ML Delete Job documentation - * - * @param request The request to delete the job - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteJob, - options, - DeleteJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Opens a Machine Learning Job. - * When you open a new job, it starts with an empty model. - * When you open an existing job, the most recent model state is automatically loaded. - * The job is ready to resume its analysis from where it left off, once new data is received. - *

    - * For additional info - * see ML Open Job documentation - * - * @param request Request containing job_id and additional optional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response containing if the job was successfully opened or not. - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public OpenJobResponse openJob(OpenJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::openJob, - options, - OpenJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Opens a Machine Learning Job asynchronously, notifies listener on completion. - * When you open a new job, it starts with an empty model. - * When you open an existing job, the most recent model state is automatically loaded. - * The job is ready to resume its analysis from where it left off, once new data is received. - *

    - * For additional info - * see ML Open Job documentation - * - * @param request Request containing job_id and additional optional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable openJobAsync(OpenJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::openJob, - options, - OpenJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Closes one or more Machine Learning Jobs. A job can be opened and closed multiple times throughout its lifecycle. - * A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. - *

    - * For additional info - * see ML Close Job documentation - * - * @param request Request containing job_ids and additional options. See {@link CloseJobRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response containing if the job was successfully closed or not. - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public CloseJobResponse closeJob(CloseJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::closeJob, - options, - CloseJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Closes one or more Machine Learning Jobs asynchronously, notifies listener on completion - * A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. - *

    - * For additional info - * see ML Close Job documentation - * - * @param request Request containing job_ids and additional options. See {@link CloseJobRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable closeJobAsync(CloseJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::closeJob, - options, - CloseJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Flushes internally buffered data for the given Machine Learning Job ensuring all data sent to the has been processed. - * This may cause new results to be calculated depending on the contents of the buffer - * Both flush and close operations are similar, - * however the flush is more efficient if you are expecting to send more data for analysis. - * When flushing, the job remains open and is available to continue analyzing data. - * A close operation additionally prunes and persists the model state to disk and the - * job must be opened again before analyzing further data. - *

    - * For additional info - * see Flush ML job documentation - * - * @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public FlushJobResponse flushJob(FlushJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::flushJob, - options, - FlushJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Flushes internally buffered data for the given Machine Learning Job asynchronously ensuring all data sent to the has been processed. - * This may cause new results to be calculated depending on the contents of the buffer - * Both flush and close operations are similar, - * however the flush is more efficient if you are expecting to send more data for analysis. - * When flushing, the job remains open and is available to continue analyzing data. - * A close operation additionally prunes and persists the model state to disk and the - * job must be opened again before analyzing further data. - *

    - * For additional info - * see Flush ML job documentation - * - * @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable flushJobAsync(FlushJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::flushJob, - options, - FlushJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates a forecast of an existing, opened Machine Learning Job - * This predicts the future behavior of a time series by using its historical behavior. - *

    - * For additional info - * see Forecast ML Job Documentation - * - * @param request ForecastJobRequest with forecasting options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response containing forecast acknowledgement and new forecast's ID - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public ForecastJobResponse forecastJob(ForecastJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::forecastJob, - options, - ForecastJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a forecast of an existing, opened Machine Learning Job asynchronously - * This predicts the future behavior of a time series by using its historical behavior. - *

    - * For additional info - * see Forecast ML Job Documentation - * - * @param request ForecastJobRequest with forecasting options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable forecastJobAsync(ForecastJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::forecastJob, - options, - ForecastJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes Machine Learning Job Forecasts - *

    - * For additional info - * see Delete Job Forecast - * Documentation - * - * @param request the {@link DeleteForecastRequest} object enclosing the desired jobId, forecastIDs, and other options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return a AcknowledgedResponse object indicating request success - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteForecast(DeleteForecastRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteForecast, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes Machine Learning Job Forecasts asynchronously - *

    - * For additional info - * see Delete Job Forecast - * Documentation - * - * @param request the {@link DeleteForecastRequest} object enclosing the desired jobId, forecastIDs, and other options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteForecastAsync( - DeleteForecastRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteForecast, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes Machine Learning Model Snapshots - *

    - * For additional info - * see - * ML Delete Model Snapshot documentation - * - * @param request The request to delete the model snapshot - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteModelSnapshot(DeleteModelSnapshotRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteModelSnapshot, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes Machine Learning Model Snapshots asynchronously and notifies the listener on completion - *

    - * For additional info - * see - * ML Delete Model Snapshot documentation - * - * @param request The request to delete the model snapshot - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteModelSnapshotAsync( - DeleteModelSnapshotRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteModelSnapshot, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Reverts to a particular Machine Learning Model Snapshot - *

    - * For additional info - * see - * ML Revert Model Snapshot documentation - * - * @param request The request to revert to a previous model snapshot - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public RevertModelSnapshotResponse revertModelSnapshot(RevertModelSnapshotRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::revertModelSnapshot, - options, - RevertModelSnapshotResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Reverts to a particular Machine Learning Model Snapshot asynchronously and notifies the listener on completion - *

    - * For additional info - * see - * ML Revert Model Snapshot documentation - * - * @param request The request to revert to a previous model snapshot - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable revertModelSnapshotAsync( - RevertModelSnapshotRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::revertModelSnapshot, - options, - RevertModelSnapshotResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Datafeed - *

    - * For additional info - * see ML PUT datafeed documentation - * - * @param request The PutDatafeedRequest containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutDatafeedResponse with enclosed {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutDatafeedResponse putDatafeed(PutDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putDatafeed, - options, - PutDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Datafeed asynchronously and notifies listener on completion - *

    - * For additional info - * see ML PUT datafeed documentation - * - * @param request The request containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putDatafeedAsync(PutDatafeedRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putDatafeed, - options, - PutDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning Datafeed - *

    - * For additional info - * see - * ML Update datafeed documentation - * - * @param request The UpdateDatafeedRequest containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedUpdate} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutDatafeedResponse with enclosed, updated {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutDatafeedResponse updateDatafeed(UpdateDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateDatafeed, - options, - PutDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning Datafeed asynchronously and notifies listener on completion - *

    - * For additional info - * see - * ML Update datafeed documentation - * - * @param request The request containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedUpdate} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateDatafeedAsync( - UpdateDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateDatafeed, - options, - PutDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets one or more Machine Learning datafeed configuration info. - * - *

    - * For additional info - * see ML GET datafeed documentation - * - * @param request {@link GetDatafeedRequest} Request containing a list of datafeedId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetDatafeedResponse} response object containing - * the {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} objects and the number of jobs found - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetDatafeedResponse getDatafeed(GetDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getDatafeed, - options, - GetDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets one or more Machine Learning datafeed configuration info, asynchronously. - * - *

    - * For additional info - * see ML GET datafeed documentation - * - * @param request {@link GetDatafeedRequest} Request containing a list of datafeedId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified with {@link GetDatafeedResponse} upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getDatafeedAsync(GetDatafeedRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getDatafeed, - options, - GetDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Datafeed - *

    - * For additional info - * see - * ML Delete Datafeed documentation - * - * @param request The request to delete the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteDatafeed(DeleteDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteDatafeed, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Datafeed asynchronously and notifies the listener on completion - *

    - * For additional info - * see - * ML Delete Datafeed documentation - * - * @param request The request to delete the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteDatafeedAsync( - DeleteDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteDatafeed, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Starts the given Machine Learning Datafeed - *

    - * For additional info - * see - * ML Start Datafeed documentation - * - * @param request The request to start the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public StartDatafeedResponse startDatafeed(StartDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::startDatafeed, - options, - StartDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Starts the given Machine Learning Datafeed asynchronously and notifies the listener on completion - *

    - * For additional info - * see - * ML Start Datafeed documentation - * - * @param request The request to start the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable startDatafeedAsync( - StartDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::startDatafeed, - options, - StartDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Stops the given Machine Learning Datafeed - *

    - * For additional info - * see - * ML Stop Datafeed documentation - * - * @param request The request to stop the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public StopDatafeedResponse stopDatafeed(StopDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::stopDatafeed, - options, - StopDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Stops the given Machine Learning Datafeed asynchronously and notifies the listener on completion - *

    - * For additional info - * see - * ML Stop Datafeed documentation - * - * @param request The request to stop the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable stopDatafeedAsync( - StopDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::stopDatafeed, - options, - StopDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets statistics for one or more Machine Learning datafeeds - *

    - * For additional info - * see Get datafeed stats docs - * - * @param request {@link GetDatafeedStatsRequest} Request containing a list of datafeedId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetDatafeedStatsResponse} response object containing - * the {@link org.elasticsearch.client.ml.datafeed.DatafeedStats} objects and the number of datafeeds found - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetDatafeedStatsResponse getDatafeedStats(GetDatafeedStatsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getDatafeedStats, - options, - GetDatafeedStatsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Previews the given Machine Learning Datafeed - *

    - * For additional info - * see - * ML Preview Datafeed documentation - * - * @param request The request to preview the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link PreviewDatafeedResponse} object containing a {@link org.elasticsearch.common.bytes.BytesReference} of the data in - * JSON format - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PreviewDatafeedResponse previewDatafeed(PreviewDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::previewDatafeed, - options, - PreviewDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets statistics for one or more Machine Learning datafeeds, asynchronously. - *

    - * For additional info - * see Get datafeed stats docs - * - * @param request {@link GetDatafeedStatsRequest} Request containing a list of datafeedId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified with {@link GetDatafeedStatsResponse} upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getDatafeedStatsAsync( - GetDatafeedStatsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getDatafeedStats, - options, - GetDatafeedStatsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Previews the given Machine Learning Datafeed asynchronously and notifies the listener on completion - *

    - * For additional info - * see - * ML Preview Datafeed documentation - * - * @param request The request to preview the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable previewDatafeedAsync( - PreviewDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::previewDatafeed, - options, - PreviewDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} - *

    - * For additional info - * see ML Update Job Documentation - * - * @param request the {@link UpdateJobRequest} object enclosing the desired updates - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return a PutJobResponse object containing the updated job object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutJobResponse updateJob(UpdateJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateJob, - options, - PutJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} asynchronously - *

    - * For additional info - * see ML Update Job Documentation - * - * @param request the {@link UpdateJobRequest} object enclosing the desired updates - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateJobAsync(UpdateJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateJob, - options, - PutJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the buckets for a Machine Learning Job. - *

    - * For additional info - * see ML GET buckets documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - */ - public GetBucketsResponse getBuckets(GetBucketsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getBuckets, - options, - GetBucketsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the buckets for a Machine Learning Job, notifies listener once the requested buckets are retrieved. - *

    - * For additional info - * see ML GET buckets documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getBucketsAsync(GetBucketsRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getBuckets, - options, - GetBucketsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the categories for a Machine Learning Job. - *

    - * For additional info - * see - * ML GET categories documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetCategoriesResponse getCategories(GetCategoriesRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getCategories, - options, - GetCategoriesResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the categories for a Machine Learning Job, notifies listener once the requested buckets are retrieved. - *

    - * For additional info - * see - * ML GET categories documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getCategoriesAsync( - GetCategoriesRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getCategories, - options, - GetCategoriesResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the snapshots for a Machine Learning Job. - *

    - * For additional info - * see - * ML GET model snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetModelSnapshotsResponse getModelSnapshots(GetModelSnapshotsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getModelSnapshots, - options, - GetModelSnapshotsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the snapshots for a Machine Learning Job, notifies listener once the requested snapshots are retrieved. - *

    - * For additional info - * see - * ML GET model snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getModelSnapshotsAsync( - GetModelSnapshotsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getModelSnapshots, - options, - GetModelSnapshotsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a snapshot for a Machine Learning Job. - *

    - * For additional info - * see - * ML UPDATE model snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public UpdateModelSnapshotResponse updateModelSnapshot(UpdateModelSnapshotRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateModelSnapshot, - options, - UpdateModelSnapshotResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a snapshot for a Machine Learning Job, notifies listener once the requested snapshots are retrieved. - *

    - * For additional info - * see - * ML UPDATE model snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateModelSnapshotAsync( - UpdateModelSnapshotRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateModelSnapshot, - options, - UpdateModelSnapshotResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Upgrades a snapshot for a Machine Learning Job to the current major version. - *

    - * For additional info - * see - * ML Upgrade job snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public UpgradeJobModelSnapshotResponse upgradeJobSnapshot(UpgradeJobModelSnapshotRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::upgradeJobSnapshot, - options, - UpgradeJobModelSnapshotResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Upgrades a snapshot for a Machine Learning Job to the current major version, - * notifies listener once the upgrade has started. - *

    - * For additional info - * see - * ML Upgrade job snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable upgradeJobSnapshotAsync( - UpgradeJobModelSnapshotRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::upgradeJobSnapshot, - options, - UpgradeJobModelSnapshotResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets overall buckets for a set of Machine Learning Jobs. - *

    - * For additional info - * see - * ML GET overall buckets documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - */ - public GetOverallBucketsResponse getOverallBuckets(GetOverallBucketsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getOverallBuckets, - options, - GetOverallBucketsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets overall buckets for a set of Machine Learning Jobs, notifies listener once the requested buckets are retrieved. - *

    - * For additional info - * see - * ML GET overall buckets documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getOverallBucketsAsync( - GetOverallBucketsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getOverallBuckets, - options, - GetOverallBucketsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the records for a Machine Learning Job. - *

    - * For additional info - * see ML GET records documentation - * - * @param request the request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - */ - public GetRecordsResponse getRecords(GetRecordsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getRecords, - options, - GetRecordsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the records for a Machine Learning Job, notifies listener once the requested records are retrieved. - *

    - * For additional info - * see ML GET records documentation - * - * @param request the request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getRecordsAsync(GetRecordsRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getRecords, - options, - GetRecordsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Sends data to an anomaly detection job for analysis. - *

    - * NOTE: The job must have a state of open to receive and process the data. - *

    - * For additional info - * see ML POST Data documentation - * - * @param request PostDataRequest containing the data to post and some additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response containing operational progress about the job - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PostDataResponse postData(PostDataRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::postData, - options, - PostDataResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Sends data to an anomaly detection job for analysis, asynchronously - *

    - * NOTE: The job must have a state of open to receive and process the data. - *

    - * For additional info - * see ML POST Data documentation - * - * @param request PostDataRequest containing the data to post and some additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable postDataAsync(PostDataRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::postData, - options, - PostDataResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets a single or multiple calendars. - *

    - * For additional info - * see ML GET calendars documentation - * - * @param request The calendars request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetCalendarsResponse} response object containing the {@link org.elasticsearch.client.ml.calendars.Calendar} - * objects and the number of calendars found - */ - public GetCalendarsResponse getCalendars(GetCalendarsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getCalendars, - options, - GetCalendarsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets a single or multiple calendars, notifies listener once the requested records are retrieved. - *

    - * For additional info - * see ML GET calendars documentation - * - * @param request The calendars request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getCalendarsAsync( - GetCalendarsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getCalendars, - options, - GetCalendarsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the influencers for a Machine Learning Job. - *

    - * For additional info - * see - * ML GET influencers documentation - * - * @param request the request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - */ - public GetInfluencersResponse getInfluencers(GetInfluencersRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getInfluencers, - options, - GetInfluencersResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the influencers for a Machine Learning Job, notifies listener once the requested influencers are retrieved. - *

    - * For additional info - * * see - * ML GET influencers documentation - * - * @param request the request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getInfluencersAsync( - GetInfluencersRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getInfluencers, - options, - GetInfluencersResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Create a new machine learning calendar - *

    - * For additional info - * see - * ML create calendar documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutCalendarResponse} containing the calendar - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutCalendarResponse putCalendar(PutCalendarRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putCalendar, - options, - PutCalendarResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Create a new machine learning calendar, notifies listener with the created calendar - *

    - * For additional info - * see - * ML create calendar documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putCalendarAsync(PutCalendarRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putCalendar, - options, - PutCalendarResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Adds Machine Learning Job(s) to a calendar - *

    - * For additional info - * see - * ML Put calendar job documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutCalendarResponse} containing the updated calendar - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutCalendarResponse putCalendarJob(PutCalendarJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putCalendarJob, - options, - PutCalendarResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Adds Machine Learning Job(s) to a calendar, notifies listener when completed - *

    - * For additional info - * see - * ML Put calendar job documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putCalendarJobAsync( - PutCalendarJobRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putCalendarJob, - options, - PutCalendarResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Removes Machine Learning Job(s) from a calendar - *

    - * For additional info - * see - * ML Delete calendar job documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutCalendarResponse} containing the updated calendar - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutCalendarResponse deleteCalendarJob(DeleteCalendarJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteCalendarJob, - options, - PutCalendarResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Removes Machine Learning Job(s) from a calendar, notifies listener when completed - *

    - * For additional info - * see - * ML Delete calendar job documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteCalendarJobAsync( - DeleteCalendarJobRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteCalendarJob, - options, - PutCalendarResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Calendar - *

    - * For additional info see - * - * ML Delete calendar documentation - * - * @param request The request to delete the calendar - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteCalendar(DeleteCalendarRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteCalendar, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Job asynchronously and notifies the listener on completion - *

    - * For additional info see - * - * ML Delete calendar documentation - * - * @param request The request to delete the calendar - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteCalendarAsync( - DeleteCalendarRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteCalendar, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the events for a machine learning calendar - *

    - * For additional info - * see - * GET Calendar Events API - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PostCalendarEventRequest} containing the scheduled events - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetCalendarEventsResponse getCalendarEvents(GetCalendarEventsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getCalendarEvents, - options, - GetCalendarEventsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the events for a a machine learning calendar asynchronously, notifies the listener on completion - *

    - * For additional info - * see - * GET Calendar Events API - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getCalendarEventsAsync( - GetCalendarEventsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getCalendarEvents, - options, - GetCalendarEventsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates new events for a a machine learning calendar - *

    - * For additional info - * see - * Add Events to Calendar API - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PostCalendarEventRequest} containing the scheduled events - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PostCalendarEventResponse postCalendarEvent(PostCalendarEventRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::postCalendarEvents, - options, - PostCalendarEventResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates new events for a a machine learning calendar asynchronously, notifies the listener on completion - *

    - * For additional info - * see - * Add Events to Calendar API - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable postCalendarEventAsync( - PostCalendarEventRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::postCalendarEvents, - options, - PostCalendarEventResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Removes a Scheduled Event from a calendar - *

    - * For additional info - * see - * ML Delete calendar event documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutCalendarResponse} containing the updated calendar - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteCalendarEvent(DeleteCalendarEventRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteCalendarEvent, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Removes a Scheduled Event from a calendar, notifies listener when completed - *

    - * For additional info - * see - * ML Delete calendar event documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteCalendarEventAsync( - DeleteCalendarEventRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteCalendarEvent, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Filter - *

    - * For additional info - * see ML PUT Filter documentation - * - * @param request The PutFilterRequest containing the {@link org.elasticsearch.client.ml.job.config.MlFilter} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutFilterResponse with enclosed {@link org.elasticsearch.client.ml.job.config.MlFilter} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutFilterResponse putFilter(PutFilterRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putFilter, - options, - PutFilterResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Filter asynchronously and notifies listener on completion - *

    - * For additional info - * see ML PUT Filter documentation - * - * @param request The request containing the {@link org.elasticsearch.client.ml.job.config.MlFilter} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putFilterAsync(PutFilterRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putFilter, - options, - PutFilterResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets Machine Learning Filters - *

    - * For additional info - * see ML GET Filter documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return GetFilterResponse with enclosed {@link org.elasticsearch.client.ml.job.config.MlFilter} objects - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetFiltersResponse getFilter(GetFiltersRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getFilter, - options, - GetFiltersResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets Machine Learning Filters asynchronously and notifies listener on completion - *

    - * For additional info - * see ML GET Filter documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getFilterAsync(GetFiltersRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getFilter, - options, - GetFiltersResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning Filter - *

    - * For additional info - * see - * ML Update Filter documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutFilterResponse with the updated {@link org.elasticsearch.client.ml.job.config.MlFilter} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutFilterResponse updateFilter(UpdateFilterRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateFilter, - options, - PutFilterResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning Filter asynchronously and notifies listener on completion - *

    - * For additional info - * see - * ML Update Filter documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateFilterAsync(UpdateFilterRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateFilter, - options, - PutFilterResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning filter - *

    - * For additional info - * see - * ML Delete Filter documentation - * - * @param request The request to delete the filter - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteFilter(DeleteFilterRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteFilter, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning filter asynchronously and notifies the listener on completion - *

    - * For additional info - * see - * ML Delete Filter documentation - * - * @param request The request to delete the filter - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteFilterAsync( - DeleteFilterRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteFilter, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets Machine Learning information about default values and limits. - *

    - * For additional info - * see Machine Learning info - * - * @param request The request of Machine Learning info - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response info about default values and limits - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public MlInfoResponse getMlInfo(MlInfoRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::mlInfo, - options, - MlInfoResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets Machine Learning information about default values and limits, asynchronously. - *

    - * For additional info - * see Machine Learning info - * - * @param request The request of Machine Learning info - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getMlInfoAsync(MlInfoRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::mlInfo, - options, - MlInfoResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Sets the ML cluster setting upgrade_mode - *

    - * For additional info - * see Set Upgrade Mode - * - * @param request The request to set upgrade mode - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse setUpgradeMode(SetUpgradeModeRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::setUpgradeMode, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Sets the ML cluster setting upgrade_mode asynchronously - *

    - * For additional info - * see Set Upgrade Mode - * - * @param request The request of Machine Learning info - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable setUpgradeModeAsync( - SetUpgradeModeRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::setUpgradeMode, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Estimate the model memory an analysis config is likely to need given supplied field cardinalities - *

    - * For additional info - * see Estimate Model Memory - * - * @param request The {@link EstimateModelMemoryRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link EstimateModelMemoryResponse} response object - */ - public EstimateModelMemoryResponse estimateModelMemory(EstimateModelMemoryRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::estimateModelMemory, - options, - EstimateModelMemoryResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Estimate the model memory an analysis config is likely to need given supplied field cardinalities and notifies listener upon - * completion - *

    - * For additional info - * see Estimate Model Memory - * - * @param request The {@link EstimateModelMemoryRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable estimateModelMemoryAsync( - EstimateModelMemoryRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::estimateModelMemory, - options, - EstimateModelMemoryResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates a new Data Frame Analytics config - *

    - * For additional info - * see - * PUT Data Frame Analytics documentation - * - * @param request The {@link PutDataFrameAnalyticsRequest} containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutDataFrameAnalyticsResponse} containing the created - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutDataFrameAnalyticsResponse putDataFrameAnalytics(PutDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putDataFrameAnalytics, - options, - PutDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a new Data Frame Analytics config asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * PUT Data Frame Analytics documentation - * - * @param request The {@link PutDataFrameAnalyticsRequest} containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putDataFrameAnalyticsAsync( - PutDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putDataFrameAnalytics, - options, - PutDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a Data Frame Analytics config - *

    - * For additional info - * see - * PUT Data Frame Analytics documentation - * - * @param request The {@link UpdateDataFrameAnalyticsRequest} containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfigUpdate} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutDataFrameAnalyticsResponse} containing the updated - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutDataFrameAnalyticsResponse updateDataFrameAnalytics(UpdateDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateDataFrameAnalytics, - options, - PutDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a Data Frame Analytics config asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * Update Data Frame Analytics documentation - * - * @param request The {@link UpdateDataFrameAnalyticsRequest} containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfigUpdate} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateDataFrameAnalyticsAsync( - UpdateDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateDataFrameAnalytics, - options, - PutDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets a single or multiple Data Frame Analytics configs - *

    - * For additional info - * see - * GET Data Frame Analytics documentation - * - * @param request The {@link GetDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetDataFrameAnalyticsResponse} response object containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} objects - */ - public GetDataFrameAnalyticsResponse getDataFrameAnalytics(GetDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getDataFrameAnalytics, - options, - GetDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets a single or multiple Data Frame Analytics configs asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * GET Data Frame Analytics documentation - * - * @param request The {@link GetDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getDataFrameAnalyticsAsync( - GetDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getDataFrameAnalytics, - options, - GetDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the running statistics of a Data Frame Analytics - *

    - * For additional info - * see - * GET Data Frame Analytics Stats documentation - * - * @param request The {@link GetDataFrameAnalyticsStatsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetDataFrameAnalyticsStatsResponse} response object - */ - public GetDataFrameAnalyticsStatsResponse getDataFrameAnalyticsStats(GetDataFrameAnalyticsStatsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getDataFrameAnalyticsStats, - options, - GetDataFrameAnalyticsStatsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the running statistics of a Data Frame Analytics asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * GET Data Frame Analytics Stats documentation - * - * @param request The {@link GetDataFrameAnalyticsStatsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getDataFrameAnalyticsStatsAsync( - GetDataFrameAnalyticsStatsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getDataFrameAnalyticsStats, - options, - GetDataFrameAnalyticsStatsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Starts Data Frame Analytics - *

    - * For additional info - * see - * Start Data Frame Analytics documentation - * - * @param request The {@link StartDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public StartDataFrameAnalyticsResponse startDataFrameAnalytics(StartDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::startDataFrameAnalytics, - options, - StartDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Starts Data Frame Analytics asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * Start Data Frame Analytics documentation - * - * @param request The {@link StartDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable startDataFrameAnalyticsAsync( - StartDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::startDataFrameAnalytics, - options, - StartDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Stops Data Frame Analytics - *

    - * For additional info - * see - * Stop Data Frame Analytics documentation - * - * @param request The {@link StopDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link StopDataFrameAnalyticsResponse} - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public StopDataFrameAnalyticsResponse stopDataFrameAnalytics(StopDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::stopDataFrameAnalytics, - options, - StopDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Stops Data Frame Analytics asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * Stop Data Frame Analytics documentation - * - * @param request The {@link StopDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable stopDataFrameAnalyticsAsync( - StopDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::stopDataFrameAnalytics, - options, - StopDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Data Frame Analytics config - *

    - * For additional info - * see - * DELETE Data Frame Analytics documentation - * - * @param request The {@link DeleteDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteDataFrameAnalytics(DeleteDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteDataFrameAnalytics, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Data Frame Analytics config asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * DELETE Data Frame Analytics documentation - * - * @param request The {@link DeleteDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteDataFrameAnalyticsAsync( - DeleteDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteDataFrameAnalytics, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Evaluates the given Data Frame - *

    - * For additional info - * see - * Evaluate Data Frame documentation - * - * @param request The {@link EvaluateDataFrameRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link EvaluateDataFrameResponse} response object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public EvaluateDataFrameResponse evaluateDataFrame(EvaluateDataFrameRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::evaluateDataFrame, - options, - EvaluateDataFrameResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Evaluates the given Data Frame asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * Evaluate Data Frame documentation - * - * @param request The {@link EvaluateDataFrameRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable evaluateDataFrameAsync( - EvaluateDataFrameRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::evaluateDataFrame, - options, - EvaluateDataFrameResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Explains the given Data Frame Analytics - *

    - * For additional info - * see - * Explain Data Frame Analytics documentation - * - * @param request The {@link ExplainDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link ExplainDataFrameAnalyticsResponse} response object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public ExplainDataFrameAnalyticsResponse explainDataFrameAnalytics(ExplainDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::explainDataFrameAnalytics, - options, - ExplainDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Explains the given Data Frame Analytics asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * Explain Data Frame Analytics documentation - * - * @param request The {@link ExplainDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable explainDataFrameAnalyticsAsync( - ExplainDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::explainDataFrameAnalytics, - options, - ExplainDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets trained model configs - *

    - * For additional info - * see - * GET Trained Model Configs documentation - * - * @param request The {@link GetTrainedModelsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetTrainedModelsResponse} response object - */ - public GetTrainedModelsResponse getTrainedModels(GetTrainedModelsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getTrainedModels, - options, - GetTrainedModelsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets trained model configs asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * GET Trained Model Configs documentation - * - * @param request The {@link GetTrainedModelsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getTrainedModelsAsync( - GetTrainedModelsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getTrainedModels, - options, - GetTrainedModelsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Put trained model config - *

    - * For additional info - * see - * PUT Trained Model Config documentation - * - * @param request The {@link PutTrainedModelRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link PutTrainedModelResponse} response object - */ - public PutTrainedModelResponse putTrainedModel(PutTrainedModelRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putTrainedModel, - options, - PutTrainedModelResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Put trained model config asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * PUT Trained Model Config documentation - * - * @param request The {@link PutTrainedModelRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putTrainedModelAsync( - PutTrainedModelRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putTrainedModel, - options, - PutTrainedModelResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets trained model stats - *

    - * For additional info - * see - * GET Trained Model Stats documentation - * - * @param request The {@link GetTrainedModelsStatsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetTrainedModelsStatsResponse} response object - */ - public GetTrainedModelsStatsResponse getTrainedModelsStats(GetTrainedModelsStatsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getTrainedModelsStats, - options, - GetTrainedModelsStatsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets trained model stats asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * GET Trained Model Stats documentation - * - * @param request The {@link GetTrainedModelsStatsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getTrainedModelsStatsAsync( - GetTrainedModelsStatsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getTrainedModelsStats, - options, - GetTrainedModelsStatsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Trained Model - *

    - * For additional info - * see - * DELETE Trained Model documentation - * - * @param request The {@link DeleteTrainedModelRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteTrainedModel(DeleteTrainedModelRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteTrainedModel, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Trained Model asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * DELETE Trained Model documentation - * - * @param request The {@link DeleteTrainedModelRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteTrainedModelAsync( - DeleteTrainedModelRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteTrainedModel, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates or reassigns a trained model alias - *

    - * For additional info - * see - * Put Trained Model Aliases documentation - * - * @param request The {@link PutTrainedModelAliasRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse putTrainedModelAlias(PutTrainedModelAliasRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putTrainedModelAlias, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates or reassigns a trained model alias asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * Put Trained Model Aliases documentation - * - * @param request The {@link PutTrainedModelAliasRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putTrainedModelAliasAsync( - PutTrainedModelAliasRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putTrainedModelAlias, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes a trained model alias - *

    - * For additional info - * see - * Delete Trained Model Aliases documentation - * - * @param request The {@link DeleteTrainedModelAliasRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteTrainedModelAlias(DeleteTrainedModelAliasRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteTrainedModelAlias, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes a trained model alias asynchronously and notifies listener upon completion - *

    - * For additional info - * see - * Delete Trained Model Aliases documentation - * - * @param request The {@link DeleteTrainedModelAliasRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteTrainedModelAliasAsync( - DeleteTrainedModelAliasRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteTrainedModelAlias, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index d94071ccac1d5..f030033392123 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -51,8 +51,6 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.client.analytics.InferencePipelineAggregationBuilder; -import org.elasticsearch.client.analytics.ParsedInference; import org.elasticsearch.client.analytics.ParsedStringStats; import org.elasticsearch.client.analytics.ParsedTopMetrics; import org.elasticsearch.client.analytics.StringStatsAggregationBuilder; @@ -278,7 +276,6 @@ public class RestHighLevelClient implements Closeable { private final IndicesClient indicesClient = new IndicesClient(this); private final IngestClient ingestClient = new IngestClient(this); private final SnapshotClient snapshotClient = new SnapshotClient(this); - private final MachineLearningClient machineLearningClient = new MachineLearningClient(this); private final SecurityClient securityClient = new SecurityClient(this); private final TransformClient transformClient = new TransformClient(this); private final EqlClient eqlClient = new EqlClient(this); @@ -391,20 +388,6 @@ public SearchableSnapshotsClient searchableSnapshots() { return searchableSnapshotsClient; } - /** - * Provides methods for accessing the Elastic Licensed Machine Learning APIs that - * are shipped with the Elastic Stack distribution of Elasticsearch. All of - * these APIs will 404 if run against the OSS distribution of Elasticsearch. - *

    - * See the - * Machine Learning APIs on elastic.co for more information. - * - * @return the client wrapper for making Machine Learning API calls - */ - public MachineLearningClient machineLearning() { - return machineLearningClient; - } - /** * Provides methods for accessing the Elastic Licensed Security APIs that * are shipped with the Elastic Stack distribution of Elasticsearch. All of @@ -2730,7 +2713,6 @@ static List getDefaultNamedXContents() { map.put(CompositeAggregationBuilder.NAME, (p, c) -> ParsedComposite.fromXContent(p, (String) c)); map.put(StringStatsAggregationBuilder.NAME, (p, c) -> ParsedStringStats.PARSER.parse(p, (String) c)); map.put(TopMetricsAggregationBuilder.NAME, (p, c) -> ParsedTopMetrics.PARSER.parse(p, (String) c)); - map.put(InferencePipelineAggregationBuilder.NAME, (p, c) -> ParsedInference.fromXContent(p, (String) (c))); map.put(TimeSeriesAggregationBuilder.NAME, (p, c) -> ParsedTimeSeries.fromXContent(p, (String) (c))); List entries = map.entrySet() .stream() diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java deleted file mode 100644 index 20ba0fa993534..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.analytics; - -import org.elasticsearch.Version; -import org.elasticsearch.client.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; -import java.util.TreeMap; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * For building inference pipeline aggregations - * - * NOTE: This extends {@linkplain AbstractPipelineAggregationBuilder} for compatibility - * with {@link SearchSourceBuilder#aggregation(PipelineAggregationBuilder)} but it - * doesn't support any "server" side things like {@linkplain #doWriteTo(StreamOutput)} - * or {@linkplain #createInternal(Map)} - */ -public class InferencePipelineAggregationBuilder extends AbstractPipelineAggregationBuilder { - - public static String NAME = "inference"; - - public static final ParseField MODEL_ID = new ParseField("model_id"); - private static final ParseField INFERENCE_CONFIG = new ParseField("inference_config"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - false, - (args, name) -> new InferencePipelineAggregationBuilder(name, (String) args[0], (Map) args[1]) - ); - - static { - PARSER.declareString(constructorArg(), MODEL_ID); - PARSER.declareObject(constructorArg(), (p, c) -> p.mapStrings(), BUCKETS_PATH_FIELD); - PARSER.declareNamedObject( - InferencePipelineAggregationBuilder::setInferenceConfig, - (p, c, n) -> p.namedObject(InferenceConfig.class, n, c), - INFERENCE_CONFIG - ); - } - - private final Map bucketPathMap; - private final String modelId; - private InferenceConfig inferenceConfig; - - public static InferencePipelineAggregationBuilder parse(String pipelineAggregatorName, XContentParser parser) { - return PARSER.apply(parser, pipelineAggregatorName); - } - - public InferencePipelineAggregationBuilder(String name, String modelId, Map bucketsPath) { - super(name, NAME, new TreeMap<>(bucketsPath).values().toArray(new String[] {})); - this.modelId = modelId; - this.bucketPathMap = bucketsPath; - } - - public void setInferenceConfig(InferenceConfig inferenceConfig) { - this.inferenceConfig = inferenceConfig; - } - - @Override - protected void validate(ValidationContext context) { - // validation occurs on the server - } - - @Override - protected void doWriteTo(StreamOutput out) { - throw new UnsupportedOperationException(); - } - - @Override - protected PipelineAggregator createInternal(Map metaData) { - throw new UnsupportedOperationException(); - } - - @Override - protected boolean overrideBucketsPath() { - return true; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(MODEL_ID.getPreferredName(), modelId); - builder.field(BUCKETS_PATH_FIELD.getPreferredName(), bucketPathMap); - if (inferenceConfig != null) { - builder.startObject(INFERENCE_CONFIG.getPreferredName()); - builder.field(inferenceConfig.getName(), inferenceConfig); - builder.endObject(); - } - return builder; - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), bucketPathMap, modelId, inferenceConfig); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null || getClass() != obj.getClass()) return false; - if (super.equals(obj) == false) return false; - - InferencePipelineAggregationBuilder other = (InferencePipelineAggregationBuilder) obj; - return Objects.equals(bucketPathMap, other.bucketPathMap) - && Objects.equals(modelId, other.modelId) - && Objects.equals(inferenceConfig, other.inferenceConfig); - } - - @Override - public Version getMinimalSupportedVersion() { - return Version.V_7_9_0; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedInference.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedInference.java deleted file mode 100644 index d3e1fcd5c85f0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedInference.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.analytics; - -import org.elasticsearch.client.ml.inference.results.FeatureImportance; -import org.elasticsearch.client.ml.inference.results.TopClassEntry; -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * This class parses the superset of all possible fields that may be written by - * InferenceResults. The warning field is mutually exclusive with all the other fields. - * - * In the case of classification results {@link #getValue()} may return a String, - * Boolean or a Double. For regression results {@link #getValue()} is always - * a Double. - */ -public class ParsedInference extends ParsedAggregation { - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - ParsedInference.class.getSimpleName(), - true, - args -> new ParsedInference(args[0], (List) args[1], (List) args[2], (String) args[3]) - ); - - public static final ParseField FEATURE_IMPORTANCE = new ParseField("feature_importance"); - public static final ParseField WARNING = new ParseField("warning"); - public static final ParseField TOP_CLASSES = new ParseField("top_classes"); - - static { - PARSER.declareField(optionalConstructorArg(), (p, n) -> { - Object o; - XContentParser.Token token = p.currentToken(); - if (token == XContentParser.Token.VALUE_STRING) { - o = p.text(); - } else if (token == XContentParser.Token.VALUE_BOOLEAN) { - o = p.booleanValue(); - } else if (token == XContentParser.Token.VALUE_NUMBER) { - o = p.doubleValue(); - } else { - throw new XContentParseException( - p.getTokenLocation(), - "[" - + ParsedInference.class.getSimpleName() - + "] failed to parse field [" - + CommonFields.VALUE - + "] " - + "value [" - + token - + "] is not a string, boolean or number" - ); - } - return o; - }, CommonFields.VALUE, ObjectParser.ValueType.VALUE); - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> FeatureImportance.fromXContent(p), FEATURE_IMPORTANCE); - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> TopClassEntry.fromXContent(p), TOP_CLASSES); - PARSER.declareString(optionalConstructorArg(), WARNING); - declareAggregationFields(PARSER); - } - - public static ParsedInference fromXContent(XContentParser parser, final String name) { - ParsedInference parsed = PARSER.apply(parser, null); - parsed.setName(name); - return parsed; - } - - private final Object value; - private final List featureImportance; - private final List topClasses; - private final String warning; - - ParsedInference(Object value, List featureImportance, List topClasses, String warning) { - this.value = value; - this.warning = warning; - this.featureImportance = featureImportance; - this.topClasses = topClasses; - } - - public Object getValue() { - return value; - } - - public List getFeatureImportance() { - return featureImportance; - } - - public List getTopClasses() { - return topClasses; - } - - public String getWarning() { - return warning; - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - if (warning != null) { - builder.field(WARNING.getPreferredName(), warning); - } else { - builder.field(CommonFields.VALUE.getPreferredName(), value); - if (topClasses != null && topClasses.size() > 0) { - builder.field(TOP_CLASSES.getPreferredName(), topClasses); - } - if (featureImportance != null && featureImportance.size() > 0) { - builder.field(FEATURE_IMPORTANCE.getPreferredName(), featureImportance); - } - } - return builder; - } - - @Override - public String getType() { - return InferencePipelineAggregationBuilder.NAME; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/AbstractResultResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/AbstractResultResponse.java deleted file mode 100644 index 94e42fc0c8b0a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/AbstractResultResponse.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * Abstract class that provides a list of results and their count. - */ -public abstract class AbstractResultResponse implements ToXContentObject { - - public static final ParseField COUNT = new ParseField("count"); - - private final ParseField resultsField; - protected final List results; - protected final long count; - - AbstractResultResponse(ParseField resultsField, List results, long count) { - this.resultsField = Objects.requireNonNull(resultsField, "[results_field] must not be null"); - this.results = Collections.unmodifiableList(results); - this.count = count; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(COUNT.getPreferredName(), count); - builder.field(resultsField.getPreferredName(), results); - builder.endObject(); - return builder; - } - - public long count() { - return count; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobRequest.java deleted file mode 100644 index bd55976d7debb..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobRequest.java +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.security.InvalidParameterException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request to close Machine Learning Jobs - */ -public class CloseJobRequest implements ToXContentObject, Validatable { - - public static final ParseField JOB_ID = new ParseField("job_id"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ParseField FORCE = new ParseField("force"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "close_job_request", - true, - a -> new CloseJobRequest((List) a[0]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), - JOB_ID, - ObjectParser.ValueType.STRING_ARRAY - ); - PARSER.declareString((obj, val) -> obj.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - PARSER.declareBoolean(CloseJobRequest::setForce, FORCE); - PARSER.declareBoolean(CloseJobRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private static final String ALL_JOBS = "_all"; - - private final List jobIds; - private TimeValue timeout; - private Boolean force; - private Boolean allowNoMatch; - - /** - * Explicitly close all jobs - * - * @return a {@link CloseJobRequest} for all existing jobs - */ - public static CloseJobRequest closeAllJobsRequest() { - return new CloseJobRequest(ALL_JOBS); - } - - CloseJobRequest(List jobIds) { - if (jobIds.isEmpty()) { - throw new InvalidParameterException("jobIds must not be empty"); - } - if (jobIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values"); - } - this.jobIds = new ArrayList<>(jobIds); - } - - /** - * Close the specified Jobs via their unique jobIds - * - * @param jobIds must be non-null and non-empty and each jobId must be non-null - */ - public CloseJobRequest(String... jobIds) { - this(Arrays.asList(jobIds)); - } - - /** - * All the jobIds to be closed - */ - public List getJobIds() { - return jobIds; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * How long to wait for the close request to complete before timing out. - * - * @param timeout Default value: 30 minutes - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - public Boolean getForce() { - return force; - } - - /** - * Should the closing be forced. - * - * Use to close a failed job, or to forcefully close a job which has not responded to its initial close request. - * - * @param force When {@code true} forcefully close the job. Defaults to {@code false} - */ - public void setForce(boolean force) { - this.force = force; - } - - public Boolean getAllowNoMatch() { - return this.allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no jobs. - * - * This includes {@code _all} string or when no jobs have been specified - * - * @param allowNoMatch When {@code true} ignore if wildcard or {@code _all} matches no jobs. Defaults to {@code true} - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, timeout, force, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - CloseJobRequest that = (CloseJobRequest) other; - return Objects.equals(jobIds, that.jobIds) - && Objects.equals(timeout, that.timeout) - && Objects.equals(force, that.force) - && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(JOB_ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds)); - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - if (force != null) { - builder.field(FORCE.getPreferredName(), force); - } - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobResponse.java deleted file mode 100644 index 827cd87595ad4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobResponse.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Job(s) closed or not - */ -public class CloseJobResponse implements ToXContentObject { - - private static final ParseField CLOSED = new ParseField("closed"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "close_job_response", - true, - (a) -> new CloseJobResponse((Boolean) a[0]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), CLOSED); - } - - private boolean closed; - - public CloseJobResponse(boolean closed) { - this.closed = closed; - } - - public static CloseJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Has the job closed or not - * @return boolean value indicating the job closed status - */ - public boolean isClosed() { - return closed; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - CloseJobResponse that = (CloseJobResponse) other; - return isClosed() == that.isClosed(); - } - - @Override - public int hashCode() { - return Objects.hash(isClosed()); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLOSED.getPreferredName(), closed); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarEventRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarEventRequest.java deleted file mode 100644 index 5ade66ff71820..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarEventRequest.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * Request class for removing an event from an existing calendar - */ -public class DeleteCalendarEventRequest implements Validatable { - - private final String eventId; - private final String calendarId; - - /** - * Create a new request referencing an existing Calendar and which event to remove - * from it. - * - * @param calendarId The non-null ID of the calendar - * @param eventId Scheduled Event to remove from the calendar, Cannot be null. - */ - public DeleteCalendarEventRequest(String calendarId, String eventId) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - this.eventId = Objects.requireNonNull(eventId, "[event_id] must not be null."); - } - - public String getEventId() { - return eventId; - } - - public String getCalendarId() { - return calendarId; - } - - @Override - public int hashCode() { - return Objects.hash(eventId, calendarId); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DeleteCalendarEventRequest that = (DeleteCalendarEventRequest) other; - return Objects.equals(eventId, that.eventId) && Objects.equals(calendarId, that.calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarJobRequest.java deleted file mode 100644 index fff975334d95c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarJobRequest.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.security.InvalidParameterException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request class for removing Machine Learning Jobs from an existing calendar - */ -public class DeleteCalendarJobRequest implements Validatable { - - private final List jobIds; - private final String calendarId; - - /** - * Create a new request referencing an existing Calendar and which JobIds to remove - * from it. - * - * @param calendarId The non-null ID of the calendar - * @param jobIds JobIds to remove from the calendar, cannot be empty, or contain null values. - * It can be a list of jobs or groups. - */ - public DeleteCalendarJobRequest(String calendarId, String... jobIds) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - if (jobIds.length == 0) { - throw new InvalidParameterException("jobIds must not be empty."); - } - if (Arrays.stream(jobIds).anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values."); - } - this.jobIds = Arrays.asList(jobIds); - } - - public List getJobIds() { - return jobIds; - } - - public String getCalendarId() { - return calendarId; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, calendarId); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DeleteCalendarJobRequest that = (DeleteCalendarJobRequest) other; - return Objects.equals(jobIds, that.jobIds) && Objects.equals(calendarId, that.calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java deleted file mode 100644 index 8777d202529f6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * Request to delete a Machine Learning Calendar - */ -public class DeleteCalendarRequest implements Validatable { - - private final String calendarId; - - /** - * The constructor requires a single calendar id. - * @param calendarId The calendar to delete. Must be {@code non-null} - */ - public DeleteCalendarRequest(String calendarId) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null"); - } - - public String getCalendarId() { - return calendarId; - } - - @Override - public int hashCode() { - return Objects.hash(calendarId); - } - - @Override - public boolean equals(Object obj) { - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - DeleteCalendarRequest other = (DeleteCalendarRequest) obj; - return Objects.equals(calendarId, other.calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDataFrameAnalyticsRequest.java deleted file mode 100644 index ec19611ec58ae..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.core.TimeValue; - -import java.util.Objects; -import java.util.Optional; - -/** - * Request to delete a data frame analytics config - */ -public class DeleteDataFrameAnalyticsRequest implements Validatable { - - private final String id; - private Boolean force; - private TimeValue timeout; - - public DeleteDataFrameAnalyticsRequest(String id) { - this.id = id; - } - - public String getId() { - return id; - } - - public Boolean getForce() { - return force; - } - - /** - * Used to forcefully delete an job that is not stopped. - * This method is quicker than stopping and deleting the job. - * - * @param force When {@code true} forcefully delete a non stopped job. Defaults to {@code false} - */ - public void setForce(Boolean force) { - this.force = force; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * Sets the time to wait until the job is deleted. - * - * @param timeout The time to wait until the job is deleted. - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - @Override - public Optional validate() { - if (id == null) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - DeleteDataFrameAnalyticsRequest other = (DeleteDataFrameAnalyticsRequest) o; - return Objects.equals(id, other.id) && Objects.equals(force, other.force) && Objects.equals(timeout, other.timeout); - } - - @Override - public int hashCode() { - return Objects.hash(id, force, timeout); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java deleted file mode 100644 index 4ed729e9fd079..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * Request to delete a Machine Learning Datafeed via its ID - */ -public class DeleteDatafeedRequest implements Validatable { - - private String datafeedId; - private Boolean force; - - public DeleteDatafeedRequest(String datafeedId) { - this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null"); - } - - public String getDatafeedId() { - return datafeedId; - } - - public Boolean getForce() { - return force; - } - - /** - * Used to forcefully delete a started datafeed. - * This method is quicker than stopping and deleting the datafeed. - * - * @param force When {@code true} forcefully delete a started datafeed. Defaults to {@code false} - */ - public void setForce(Boolean force) { - this.force = force; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, force); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || obj.getClass() != getClass()) { - return false; - } - - DeleteDatafeedRequest other = (DeleteDatafeedRequest) obj; - return Objects.equals(datafeedId, other.datafeedId) && Objects.equals(force, other.force); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java deleted file mode 100644 index 66edbcfe8e961..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to delete expired model snapshots and forecasts - */ -public class DeleteExpiredDataRequest implements Validatable, ToXContentObject { - - static final String REQUESTS_PER_SECOND = "requests_per_second"; - static final String TIMEOUT = "timeout"; - static final String JOB_ID = "job_id"; - - private final String jobId; - private final Float requestsPerSecond; - private final TimeValue timeout; - - /** - * Create a new request to delete expired data - */ - public DeleteExpiredDataRequest() { - this(null, null, null); - } - - public DeleteExpiredDataRequest(String jobId, Float requestsPerSecond, TimeValue timeout) { - this.jobId = jobId; - this.requestsPerSecond = requestsPerSecond; - this.timeout = timeout; - } - - /** - * The requests allowed per second in the underlying Delete by Query requests executed. - * - * `-1.0f` indicates that the standard nightly cleanup behavior should be ran. - * Throttling scales according to the number of data nodes. - * `null` is default and means no throttling will occur. - */ - public Float getRequestsPerSecond() { - return requestsPerSecond; - } - - /** - * Indicates how long the deletion request will run until it timesout. - * - * Default value is 8 hours. - */ - public TimeValue getTimeout() { - return timeout; - } - - /** - * The optional job id - * - * The default is `null` meaning all jobs. - * @return The job id or null - */ - public String getJobId() { - return jobId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DeleteExpiredDataRequest that = (DeleteExpiredDataRequest) o; - return Objects.equals(requestsPerSecond, that.requestsPerSecond) - && Objects.equals(timeout, that.timeout) - && Objects.equals(jobId, that.jobId); - } - - @Override - public int hashCode() { - return Objects.hash(requestsPerSecond, timeout, jobId); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (jobId != null) { - builder.field(JOB_ID, jobId); - } - if (requestsPerSecond != null) { - builder.field(REQUESTS_PER_SECOND, requestsPerSecond); - } - if (timeout != null) { - builder.field(TIMEOUT, timeout.getStringRep()); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataResponse.java deleted file mode 100644 index 18cd260698198..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataResponse.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * A response acknowledging the deletion of expired data - */ -public class DeleteExpiredDataResponse implements ToXContentObject { - - private static final ParseField DELETED = new ParseField("deleted"); - - public DeleteExpiredDataResponse(boolean deleted) { - this.deleted = deleted; - } - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "delete_expired_data_response", - true, - a -> new DeleteExpiredDataResponse((Boolean) a[0]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), DELETED); - } - - public static DeleteExpiredDataResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final Boolean deleted; - - public Boolean getDeleted() { - return deleted; - } - - @Override - public int hashCode() { - return Objects.hash(deleted); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (deleted != null) { - builder.field(DELETED.getPreferredName(), deleted); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - DeleteExpiredDataResponse response = (DeleteExpiredDataResponse) obj; - return Objects.equals(deleted, response.deleted); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteFilterRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteFilterRequest.java deleted file mode 100644 index a98ad85c775e0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteFilterRequest.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * A request to delete a machine learning filter - */ -public class DeleteFilterRequest implements Validatable { - - private final String filterId; - - public DeleteFilterRequest(String filterId) { - this.filterId = Objects.requireNonNull(filterId, "[filter_id] is required"); - } - - public String getId() { - return filterId; - } - - @Override - public int hashCode() { - return Objects.hash(filterId); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final DeleteFilterRequest other = (DeleteFilterRequest) obj; - - return Objects.equals(filterId, other.filterId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java deleted file mode 100644 index 11a49bf3aa270..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * POJO for a delete forecast request - */ -public class DeleteForecastRequest implements Validatable, ToXContentObject { - - public static final ParseField FORECAST_ID = new ParseField("forecast_id"); - public static final ParseField ALLOW_NO_FORECASTS = new ParseField("allow_no_forecasts"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final String ALL = "_all"; - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "delete_forecast_request", - (a) -> new DeleteForecastRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareStringOrNull((c, p) -> c.setForecastIds(Strings.commaDelimitedListToStringArray(p)), FORECAST_ID); - PARSER.declareBoolean(DeleteForecastRequest::setAllowNoForecasts, ALLOW_NO_FORECASTS); - PARSER.declareString(DeleteForecastRequest::timeout, TIMEOUT); - } - - /** - * Create a new {@link DeleteForecastRequest} that explicitly deletes all forecasts - * - * @param jobId the jobId of the Job whose forecasts to delete - */ - public static DeleteForecastRequest deleteAllForecasts(String jobId) { - DeleteForecastRequest request = new DeleteForecastRequest(jobId); - request.setForecastIds(ALL); - return request; - } - - private final String jobId; - private List forecastIds = new ArrayList<>(); - private Boolean allowNoForecasts; - private TimeValue timeout; - - /** - * Create a new DeleteForecastRequest for the given Job ID - * - * @param jobId the jobId of the Job whose forecast(s) to delete - */ - public DeleteForecastRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId, Job.ID.getPreferredName()); - } - - public String getJobId() { - return jobId; - } - - public List getForecastIds() { - return forecastIds; - } - - /** - * The forecast IDs to delete. Can be also be {@link DeleteForecastRequest#ALL} to explicitly delete ALL forecasts - * - * @param forecastIds forecast IDs to delete - */ - public void setForecastIds(String... forecastIds) { - setForecastIds(Arrays.asList(forecastIds)); - } - - void setForecastIds(List forecastIds) { - if (forecastIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("forecastIds must not contain null values"); - } - this.forecastIds = new ArrayList<>(forecastIds); - } - - public Boolean getAllowNoForecasts() { - return allowNoForecasts; - } - - /** - * Sets the value of "allow_no_forecasts". - * - * @param allowNoForecasts when {@code true} no error is thrown when {@link DeleteForecastRequest#ALL} does not find any forecasts - */ - public void setAllowNoForecasts(boolean allowNoForecasts) { - this.allowNoForecasts = allowNoForecasts; - } - - /** - * Allows to set the timeout - * @param timeout timeout as a string (e.g. 1s) - */ - public void timeout(String timeout) { - this.timeout = TimeValue.parseTimeValue(timeout, this.timeout, getClass().getSimpleName() + ".timeout"); - } - - /** - * Allows to set the timeout - * @param timeout timeout as a {@link TimeValue} - */ - public void timeout(TimeValue timeout) { - this.timeout = timeout; - } - - public TimeValue timeout() { - return timeout; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DeleteForecastRequest that = (DeleteForecastRequest) other; - return Objects.equals(jobId, that.jobId) - && Objects.equals(forecastIds, that.forecastIds) - && Objects.equals(allowNoForecasts, that.allowNoForecasts) - && Objects.equals(timeout, that.timeout); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, forecastIds, allowNoForecasts, timeout); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (forecastIds != null) { - builder.field(FORECAST_ID.getPreferredName(), Strings.collectionToCommaDelimitedString(forecastIds)); - } - if (allowNoForecasts != null) { - builder.field(ALLOW_NO_FORECASTS.getPreferredName(), allowNoForecasts); - } - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java deleted file mode 100644 index dc4c0cd4d6c86..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * Request to delete a Machine Learning Job via its ID - */ -public class DeleteJobRequest implements Validatable { - - private String jobId; - private Boolean force; - private Boolean waitForCompletion; - - public DeleteJobRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); - } - - public String getJobId() { - return jobId; - } - - /** - * The jobId which to delete - * @param jobId unique jobId to delete, must not be null - */ - public void setJobId(String jobId) { - this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); - } - - public Boolean getForce() { - return force; - } - - /** - * Used to forcefully delete an opened job. - * This method is quicker than closing and deleting the job. - * - * @param force When {@code true} forcefully delete an opened job. Defaults to {@code false} - */ - public void setForce(Boolean force) { - this.force = force; - } - - public Boolean getWaitForCompletion() { - return waitForCompletion; - } - - /** - * Set whether this request should wait until the operation has completed before returning - * @param waitForCompletion When {@code true} the call will wait for the job deletion to complete. - * Otherwise, the deletion will be executed asynchronously and the response - * will contain the task id. - */ - public void setWaitForCompletion(Boolean waitForCompletion) { - this.waitForCompletion = waitForCompletion; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, force); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || obj.getClass() != getClass()) { - return false; - } - - DeleteJobRequest other = (DeleteJobRequest) obj; - return Objects.equals(jobId, other.jobId) && Objects.equals(force, other.force); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java deleted file mode 100644 index ad843da43a357..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response object that contains the acknowledgement or the task id - * depending on whether the delete job action was requested to wait for completion. - */ -public class DeleteJobResponse implements ToXContentObject { - - private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); - private static final ParseField TASK = new ParseField("task"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "delete_job_response", - true, - a -> new DeleteJobResponse((Boolean) a[0], (TaskId) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ACKNOWLEDGED); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), TaskId.parser(), TASK, ObjectParser.ValueType.STRING); - } - - public static DeleteJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final Boolean acknowledged; - private final TaskId task; - - DeleteJobResponse(@Nullable Boolean acknowledged, @Nullable TaskId task) { - assert acknowledged != null || task != null; - this.acknowledged = acknowledged; - this.task = task; - } - - /** - * Get the action acknowledgement - * @return {@code null} when the request had {@link DeleteJobRequest#getWaitForCompletion()} set to {@code false} or - * otherwise a {@code boolean} that indicates whether the job was deleted successfully. - */ - public Boolean getAcknowledged() { - return acknowledged; - } - - /** - * Get the task id - * @return {@code null} when the request had {@link DeleteJobRequest#getWaitForCompletion()} set to {@code true} or - * otherwise the id of the job deletion task. - */ - public TaskId getTask() { - return task; - } - - @Override - public int hashCode() { - return Objects.hash(acknowledged, task); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DeleteJobResponse that = (DeleteJobResponse) other; - return Objects.equals(acknowledged, that.acknowledged) && Objects.equals(task, that.task); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (acknowledged != null) { - builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged); - } - if (task != null) { - builder.field(TASK.getPreferredName(), task.toString()); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java deleted file mode 100644 index 75dda5d47eade..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.process.ModelSnapshot; - -import java.util.Objects; - -/** - * Request to delete a Machine Learning Model Snapshot Job via its Job and Snapshot IDs - */ -public class DeleteModelSnapshotRequest implements Validatable { - - private final String jobId; - private final String snapshotId; - - public DeleteModelSnapshotRequest(String jobId, String snapshotId) { - this.jobId = Objects.requireNonNull(jobId, "[" + Job.ID + "] must not be null"); - this.snapshotId = Objects.requireNonNull(snapshotId, "[" + ModelSnapshot.SNAPSHOT_ID + "] must not be null"); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || obj.getClass() != getClass()) { - return false; - } - - DeleteModelSnapshotRequest other = (DeleteModelSnapshotRequest) obj; - return Objects.equals(jobId, other.jobId) && Objects.equals(snapshotId, other.snapshotId); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelAliasRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelAliasRequest.java deleted file mode 100644 index aa91a01a0d775..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelAliasRequest.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -public class DeleteTrainedModelAliasRequest implements Validatable { - - private final String modelAlias; - private final String modelId; - - public DeleteTrainedModelAliasRequest(String modelAlias, String modelId) { - this.modelAlias = Objects.requireNonNull(modelAlias); - this.modelId = Objects.requireNonNull(modelId); - } - - public String getModelAlias() { - return modelAlias; - } - - public String getModelId() { - return modelId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DeleteTrainedModelAliasRequest request = (DeleteTrainedModelAliasRequest) o; - return Objects.equals(modelAlias, request.modelAlias) && Objects.equals(modelId, request.modelId); - } - - @Override - public int hashCode() { - return Objects.hash(modelAlias, modelId); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelRequest.java deleted file mode 100644 index a964c8f0ddd63..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelRequest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; - -import java.util.Objects; -import java.util.Optional; - -/** - * Request to delete a data frame analytics config - */ -public class DeleteTrainedModelRequest implements Validatable { - - private final String id; - - public DeleteTrainedModelRequest(String id) { - this.id = id; - } - - public String getId() { - return id; - } - - @Override - public Optional validate() { - if (id == null) { - return Optional.of(ValidationException.withError("trained model id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - DeleteTrainedModelRequest other = (DeleteTrainedModelRequest) o; - return Objects.equals(id, other.id); - } - - @Override - public int hashCode() { - return Objects.hash(id); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryRequest.java deleted file mode 100644 index 2a2a43fb2441e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryRequest.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.ml.job.config.AnalysisConfig; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; - -/** - * Request to estimate the model memory an analysis config is likely to need given supplied field cardinalities. - */ -public class EstimateModelMemoryRequest implements Validatable, ToXContentObject { - - public static final String ANALYSIS_CONFIG = "analysis_config"; - public static final String OVERALL_CARDINALITY = "overall_cardinality"; - public static final String MAX_BUCKET_CARDINALITY = "max_bucket_cardinality"; - - private final AnalysisConfig analysisConfig; - private Map overallCardinality = Collections.emptyMap(); - private Map maxBucketCardinality = Collections.emptyMap(); - - @Override - public Optional validate() { - return Optional.empty(); - } - - public EstimateModelMemoryRequest(AnalysisConfig analysisConfig) { - this.analysisConfig = Objects.requireNonNull(analysisConfig); - } - - public AnalysisConfig getAnalysisConfig() { - return analysisConfig; - } - - public Map getOverallCardinality() { - return overallCardinality; - } - - public void setOverallCardinality(Map overallCardinality) { - this.overallCardinality = Collections.unmodifiableMap(overallCardinality); - } - - public Map getMaxBucketCardinality() { - return maxBucketCardinality; - } - - public void setMaxBucketCardinality(Map maxBucketCardinality) { - this.maxBucketCardinality = Collections.unmodifiableMap(maxBucketCardinality); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ANALYSIS_CONFIG, analysisConfig); - if (overallCardinality.isEmpty() == false) { - builder.field(OVERALL_CARDINALITY, overallCardinality); - } - if (maxBucketCardinality.isEmpty() == false) { - builder.field(MAX_BUCKET_CARDINALITY, maxBucketCardinality); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(analysisConfig, overallCardinality, maxBucketCardinality); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - EstimateModelMemoryRequest that = (EstimateModelMemoryRequest) other; - return Objects.equals(analysisConfig, that.analysisConfig) - && Objects.equals(overallCardinality, that.overallCardinality) - && Objects.equals(maxBucketCardinality, that.maxBucketCardinality); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryResponse.java deleted file mode 100644 index 806a76fe7eb63..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryResponse.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class EstimateModelMemoryResponse { - - public static final ParseField MODEL_MEMORY_ESTIMATE = new ParseField("model_memory_estimate"); - - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "estimate_model_memory", - true, - args -> new EstimateModelMemoryResponse((String) args[0]) - ); - - static { - PARSER.declareString(constructorArg(), MODEL_MEMORY_ESTIMATE); - } - - public static EstimateModelMemoryResponse fromXContent(final XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final ByteSizeValue modelMemoryEstimate; - - public EstimateModelMemoryResponse(String modelMemoryEstimate) { - this.modelMemoryEstimate = ByteSizeValue.parseBytesSizeValue(modelMemoryEstimate, MODEL_MEMORY_ESTIMATE.getPreferredName()); - } - - /** - * @return An estimate of the model memory the supplied analysis config is likely to need given the supplied field cardinalities. - */ - public ByteSizeValue getModelMemoryEstimate() { - return modelMemoryEstimate; - } - - @Override - public boolean equals(Object o) { - - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - EstimateModelMemoryResponse other = (EstimateModelMemoryResponse) o; - return Objects.equals(this.modelMemoryEstimate, other.modelMemoryEstimate); - } - - @Override - public int hashCode() { - return Objects.hash(modelMemoryEstimate); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java deleted file mode 100644 index 5bad5d73a8892..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.ml.dataframe.QueryConfig; -import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class EvaluateDataFrameRequest implements ToXContentObject, Validatable { - - private static final ParseField INDEX = new ParseField("index"); - private static final ParseField QUERY = new ParseField("query"); - private static final ParseField EVALUATION = new ParseField("evaluation"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "evaluate_data_frame_request", - true, - args -> new EvaluateDataFrameRequest((List) args[0], (QueryConfig) args[1], (Evaluation) args[2]) - ); - - static { - PARSER.declareStringArray(constructorArg(), INDEX); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> QueryConfig.fromXContent(p), QUERY); - PARSER.declareObject(constructorArg(), (p, c) -> parseEvaluation(p), EVALUATION); - } - - private static Evaluation parseEvaluation(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); - Evaluation evaluation = parser.namedObject(Evaluation.class, parser.currentName(), null); - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return evaluation; - } - - public static EvaluateDataFrameRequest fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private List indices; - private QueryConfig queryConfig; - private Evaluation evaluation; - - public EvaluateDataFrameRequest(String index, @Nullable QueryConfig queryConfig, Evaluation evaluation) { - this(Arrays.asList(index), queryConfig, evaluation); - } - - public EvaluateDataFrameRequest(List indices, @Nullable QueryConfig queryConfig, Evaluation evaluation) { - setIndices(indices); - setQueryConfig(queryConfig); - setEvaluation(evaluation); - } - - public List getIndices() { - return Collections.unmodifiableList(indices); - } - - public final void setIndices(List indices) { - Objects.requireNonNull(indices); - this.indices = new ArrayList<>(indices); - } - - public QueryConfig getQueryConfig() { - return queryConfig; - } - - public final void setQueryConfig(QueryConfig queryConfig) { - this.queryConfig = queryConfig; - } - - public Evaluation getEvaluation() { - return evaluation; - } - - public final void setEvaluation(Evaluation evaluation) { - this.evaluation = evaluation; - } - - @Override - public Optional validate() { - List errors = new ArrayList<>(); - if (indices.isEmpty()) { - errors.add("At least one index must be specified"); - } - if (evaluation == null) { - errors.add("evaluation must not be null"); - } - return errors.isEmpty() ? Optional.empty() : Optional.of(ValidationException.withErrors(errors)); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.stringListField(INDEX.getPreferredName(), indices); - if (queryConfig != null) { - builder.field(QUERY.getPreferredName(), queryConfig.getQuery()); - } - builder.startObject(EVALUATION.getPreferredName()).field(evaluation.getName(), evaluation).endObject(); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(indices, queryConfig, evaluation); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - EvaluateDataFrameRequest that = (EvaluateDataFrameRequest) o; - return Objects.equals(indices, that.indices) - && Objects.equals(queryConfig, that.queryConfig) - && Objects.equals(evaluation, that.evaluation); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameResponse.java deleted file mode 100644 index 82213974297ab..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameResponse.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.NamedObjectNotFoundException; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - -public class EvaluateDataFrameResponse implements ToXContentObject { - - public static EvaluateDataFrameResponse fromXContent(XContentParser parser) throws IOException { - if (parser.currentToken() == null) { - parser.nextToken(); - } - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); - String evaluationName = parser.currentName(); - parser.nextToken(); - Map metrics = parser.map(LinkedHashMap::new, p -> parseMetric(evaluationName, p)); - List knownMetrics = metrics.values() - .stream() - .filter(Objects::nonNull) // Filter out null values returned by {@link EvaluateDataFrameResponse::parseMetric}. - .collect(Collectors.toList()); - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return new EvaluateDataFrameResponse(evaluationName, knownMetrics); - } - - private static EvaluationMetric.Result parseMetric(String evaluationName, XContentParser parser) throws IOException { - String metricName = parser.currentName(); - try { - return parser.namedObject(EvaluationMetric.Result.class, registeredMetricName(evaluationName, metricName), null); - } catch (NamedObjectNotFoundException e) { - parser.skipChildren(); - // Metric name not recognized. Return {@code null} value here and filter it out later. - return null; - } - } - - private final String evaluationName; - private final Map metrics; - - public EvaluateDataFrameResponse(String evaluationName, List metrics) { - this.evaluationName = Objects.requireNonNull(evaluationName); - this.metrics = Objects.requireNonNull(metrics).stream().collect(Collectors.toUnmodifiableMap(m -> m.getMetricName(), m -> m)); - } - - public String getEvaluationName() { - return evaluationName; - } - - public List getMetrics() { - return metrics.values().stream().collect(Collectors.toList()); - } - - @SuppressWarnings("unchecked") - public T getMetricByName(String metricName) { - Objects.requireNonNull(metricName); - return (T) metrics.get(metricName); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.startObject().field(evaluationName, metrics).endObject(); - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - EvaluateDataFrameResponse that = (EvaluateDataFrameResponse) o; - return Objects.equals(evaluationName, that.evaluationName) && Objects.equals(metrics, that.metrics); - } - - @Override - public int hashCode() { - return Objects.hash(evaluationName, metrics); - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsRequest.java deleted file mode 100644 index b9df8faacdda8..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; -import org.elasticsearch.core.Nullable; - -import java.util.Objects; - -/** - * Request to explain the following about a data frame analytics job: - *

      - *
    • field selection: which fields are included or are not in the analysis
    • - *
    • memory estimation: how much memory the job is estimated to require
    • - *
    - */ -public class ExplainDataFrameAnalyticsRequest implements Validatable { - - private final String id; - private final DataFrameAnalyticsConfig config; - - public ExplainDataFrameAnalyticsRequest(String id) { - this.id = Objects.requireNonNull(id); - this.config = null; - } - - public ExplainDataFrameAnalyticsRequest(DataFrameAnalyticsConfig config) { - this.id = null; - this.config = Objects.requireNonNull(config); - } - - @Nullable - public String getId() { - return id; - } - - @Nullable - public DataFrameAnalyticsConfig getConfig() { - return config; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - ExplainDataFrameAnalyticsRequest other = (ExplainDataFrameAnalyticsRequest) o; - return Objects.equals(id, other.id) && Objects.equals(config, other.config); - } - - @Override - public int hashCode() { - return Objects.hash(id, config); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsResponse.java deleted file mode 100644 index d9e15f8ff7031..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.dataframe.explain.FieldSelection; -import org.elasticsearch.client.ml.dataframe.explain.MemoryEstimation; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class ExplainDataFrameAnalyticsResponse implements ToXContentObject { - - public static final ParseField TYPE = new ParseField("explain_data_frame_analytics_response"); - - public static final ParseField FIELD_SELECTION = new ParseField("field_selection"); - public static final ParseField MEMORY_ESTIMATION = new ParseField("memory_estimation"); - - public static ExplainDataFrameAnalyticsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - TYPE.getPreferredName(), - true, - args -> new ExplainDataFrameAnalyticsResponse((List) args[0], (MemoryEstimation) args[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), FieldSelection.PARSER, FIELD_SELECTION); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), MemoryEstimation.PARSER, MEMORY_ESTIMATION); - } - - private final List fieldSelection; - private final MemoryEstimation memoryEstimation; - - public ExplainDataFrameAnalyticsResponse(List fieldSelection, MemoryEstimation memoryEstimation) { - this.fieldSelection = Objects.requireNonNull(fieldSelection); - this.memoryEstimation = Objects.requireNonNull(memoryEstimation); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FIELD_SELECTION.getPreferredName(), fieldSelection); - builder.field(MEMORY_ESTIMATION.getPreferredName(), memoryEstimation); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) return true; - if (other == null || getClass() != other.getClass()) return false; - - ExplainDataFrameAnalyticsResponse that = (ExplainDataFrameAnalyticsResponse) other; - return Objects.equals(fieldSelection, that.fieldSelection) && Objects.equals(memoryEstimation, that.memoryEstimation); - } - - @Override - public int hashCode() { - return Objects.hash(fieldSelection, memoryEstimation); - } - - public MemoryEstimation getMemoryEstimation() { - return memoryEstimation; - } - - public List getFieldSelection() { - return fieldSelection; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java deleted file mode 100644 index a19f787a6458f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request object to flush a given Machine Learning job. - */ -public class FlushJobRequest implements Validatable, ToXContentObject { - - public static final ParseField CALC_INTERIM = new ParseField("calc_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField ADVANCE_TIME = new ParseField("advance_time"); - public static final ParseField SKIP_TIME = new ParseField("skip_time"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "flush_job_request", - (a) -> new FlushJobRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareBoolean(FlushJobRequest::setCalcInterim, CALC_INTERIM); - PARSER.declareString(FlushJobRequest::setStart, START); - PARSER.declareString(FlushJobRequest::setEnd, END); - PARSER.declareString(FlushJobRequest::setAdvanceTime, ADVANCE_TIME); - PARSER.declareString(FlushJobRequest::setSkipTime, SKIP_TIME); - } - - private final String jobId; - private Boolean calcInterim; - private String start; - private String end; - private String advanceTime; - private String skipTime; - - /** - * Create new Flush job request - * - * @param jobId The job ID of the job to flush - */ - public FlushJobRequest(String jobId) { - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - public boolean getCalcInterim() { - return calcInterim; - } - - /** - * When {@code true} calculates the interim results for the most recent bucket or all buckets within the latency period. - * - * @param calcInterim defaults to {@code false}. - */ - public void setCalcInterim(boolean calcInterim) { - this.calcInterim = calcInterim; - } - - public String getStart() { - return start; - } - - /** - * When used in conjunction with {@link FlushJobRequest#calcInterim}, - * specifies the start of the range of buckets on which to calculate interim results. - * - * @param start the beginning of the range of buckets; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * When used in conjunction with {@link FlushJobRequest#calcInterim}, specifies the end of the range - * of buckets on which to calculate interim results - * - * @param end the end of the range of buckets; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public String getAdvanceTime() { - return advanceTime; - } - - /** - * Specifies to advance to a particular time value. - * Results are generated and the model is updated for data from the specified time interval. - * - * @param advanceTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setAdvanceTime(String advanceTime) { - this.advanceTime = advanceTime; - } - - public String getSkipTime() { - return skipTime; - } - - /** - * Specifies to skip to a particular time value. - * Results are not generated and the model is not updated for data from the specified time interval. - * - * @param skipTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setSkipTime(String skipTime) { - this.skipTime = skipTime; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, calcInterim, start, end, advanceTime, skipTime); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - FlushJobRequest other = (FlushJobRequest) obj; - return Objects.equals(jobId, other.jobId) - && calcInterim == other.calcInterim - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(advanceTime, other.advanceTime) - && Objects.equals(skipTime, other.skipTime); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (calcInterim != null) { - builder.field(CALC_INTERIM.getPreferredName(), calcInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (advanceTime != null) { - builder.field(ADVANCE_TIME.getPreferredName(), advanceTime); - } - if (skipTime != null) { - builder.field(SKIP_TIME.getPreferredName(), skipTime); - } - builder.endObject(); - return builder; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java deleted file mode 100644 index d85ec888b61a4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -/** - * Response object containing flush acknowledgement and additional data - */ -public class FlushJobResponse implements ToXContentObject { - - public static final ParseField FLUSHED = new ParseField("flushed"); - public static final ParseField LAST_FINALIZED_BUCKET_END = new ParseField("last_finalized_bucket_end"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "flush_job_response", - true, - (a) -> { - boolean flushed = (boolean) a[0]; - Date date = a[1] == null ? null : new Date((long) a[1]); - return new FlushJobResponse(flushed, date); - } - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), FLUSHED); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_FINALIZED_BUCKET_END); - } - - public static FlushJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final boolean flushed; - private final Date lastFinalizedBucketEnd; - - public FlushJobResponse(boolean flushed, @Nullable Date lastFinalizedBucketEnd) { - this.flushed = flushed; - this.lastFinalizedBucketEnd = lastFinalizedBucketEnd; - } - - /** - * Was the job successfully flushed or not - */ - public boolean isFlushed() { - return flushed; - } - - /** - * Provides the timestamp (in milliseconds-since-the-epoch) of the end of the last bucket that was processed. - */ - @Nullable - public Date getLastFinalizedBucketEnd() { - return lastFinalizedBucketEnd; - } - - @Override - public int hashCode() { - return Objects.hash(flushed, lastFinalizedBucketEnd); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - FlushJobResponse that = (FlushJobResponse) other; - return that.flushed == flushed && Objects.equals(lastFinalizedBucketEnd, that.lastFinalizedBucketEnd); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FLUSHED.getPreferredName(), flushed); - if (lastFinalizedBucketEnd != null) { - builder.timeField( - LAST_FINALIZED_BUCKET_END.getPreferredName(), - LAST_FINALIZED_BUCKET_END.getPreferredName() + "_string", - lastFinalizedBucketEnd.getTime() - ); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java deleted file mode 100644 index 2a7f09c802dc3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Pojo for forecasting an existing and open Machine Learning Job - */ -public class ForecastJobRequest implements Validatable, ToXContentObject { - - public static final ParseField DURATION = new ParseField("duration"); - public static final ParseField EXPIRES_IN = new ParseField("expires_in"); - public static final ParseField MAX_MODEL_MEMORY = new ParseField("max_model_memory"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "forecast_job_request", - (a) -> new ForecastJobRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString((request, val) -> request.setDuration(TimeValue.parseTimeValue(val, DURATION.getPreferredName())), DURATION); - PARSER.declareString( - (request, val) -> request.setExpiresIn(TimeValue.parseTimeValue(val, EXPIRES_IN.getPreferredName())), - EXPIRES_IN - ); - PARSER.declareField(ForecastJobRequest::setMaxModelMemory, (p, c) -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return ByteSizeValue.parseBytesSizeValue(p.text(), MAX_MODEL_MEMORY.getPreferredName()); - } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return new ByteSizeValue(p.longValue()); - } - throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); - }, MAX_MODEL_MEMORY, ObjectParser.ValueType.VALUE); - } - - private final String jobId; - private TimeValue duration; - private TimeValue expiresIn; - private ByteSizeValue maxModelMemory; - - /** - * A new forecast request - * - * @param jobId the non-null, existing, and opened jobId to forecast - */ - public ForecastJobRequest(String jobId) { - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - public TimeValue getDuration() { - return duration; - } - - /** - * Set the forecast duration - * - * A period of time that indicates how far into the future to forecast. - * The default value is 1 day. The forecast starts at the last record that was processed. - * - * @param duration TimeValue for the duration of the forecast - */ - public void setDuration(TimeValue duration) { - this.duration = duration; - } - - public TimeValue getExpiresIn() { - return expiresIn; - } - - /** - * Set the forecast expiration - * - * The period of time that forecast results are retained. - * After a forecast expires, the results are deleted. The default value is 14 days. - * If set to a value of 0, the forecast is never automatically deleted. - * - * @param expiresIn TimeValue for the forecast expiration - */ - public void setExpiresIn(TimeValue expiresIn) { - this.expiresIn = expiresIn; - } - - public ByteSizeValue getMaxModelMemory() { - return maxModelMemory; - } - - /** - * Set the amount of memory allowed to be used by this forecast. - * - * If the projected forecast memory usage exceeds this amount, the forecast will spool results to disk to keep within the limits. - * @param maxModelMemory A byte sized value less than 500MB and less than 40% of the associated job's configured memory usage. - * Defaults to 20MB. - */ - public ForecastJobRequest setMaxModelMemory(ByteSizeValue maxModelMemory) { - this.maxModelMemory = maxModelMemory; - return this; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, duration, expiresIn, maxModelMemory); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - ForecastJobRequest other = (ForecastJobRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(duration, other.duration) - && Objects.equals(expiresIn, other.expiresIn) - && Objects.equals(maxModelMemory, other.maxModelMemory); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (duration != null) { - builder.field(DURATION.getPreferredName(), duration.getStringRep()); - } - if (expiresIn != null) { - builder.field(EXPIRES_IN.getPreferredName(), expiresIn.getStringRep()); - } - if (maxModelMemory != null) { - builder.field(MAX_MODEL_MEMORY.getPreferredName(), maxModelMemory.getStringRep()); - } - builder.endObject(); - return builder; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java deleted file mode 100644 index b1cbd5d863c99..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Forecast response object - */ -public class ForecastJobResponse implements ToXContentObject { - - public static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); - public static final ParseField FORECAST_ID = new ParseField("forecast_id"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "forecast_job_response", - true, - (a) -> new ForecastJobResponse((Boolean) a[0], (String) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ACKNOWLEDGED); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FORECAST_ID); - } - - public static ForecastJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final boolean acknowledged; - private final String forecastId; - - public ForecastJobResponse(boolean acknowledged, String forecastId) { - this.acknowledged = acknowledged; - this.forecastId = forecastId; - } - - /** - * Forecast creating acknowledgement - * @return {@code true} indicates success, {@code false} otherwise - */ - public boolean isAcknowledged() { - return acknowledged; - } - - /** - * The created forecast ID - */ - public String getForecastId() { - return forecastId; - } - - @Override - public int hashCode() { - return Objects.hash(acknowledged, forecastId); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - ForecastJobResponse other = (ForecastJobResponse) obj; - return Objects.equals(acknowledged, other.acknowledged) && Objects.equals(forecastId, other.forecastId); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged); - builder.field(FORECAST_ID.getPreferredName(), forecastId); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java deleted file mode 100644 index 052f177bb8743..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java +++ /dev/null @@ -1,250 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.results.Result; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve buckets of a given job - */ -public class GetBucketsRequest implements Validatable, ToXContentObject { - - public static final ParseField EXPAND = new ParseField("expand"); - public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score"); - public static final ParseField SORT = new ParseField("sort"); - public static final ParseField DESCENDING = new ParseField("desc"); - - public static final ObjectParser PARSER = new ObjectParser<>("get_buckets_request", GetBucketsRequest::new); - - static { - PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID); - PARSER.declareString(GetBucketsRequest::setTimestamp, Result.TIMESTAMP); - PARSER.declareBoolean(GetBucketsRequest::setExpand, EXPAND); - PARSER.declareBoolean(GetBucketsRequest::setExcludeInterim, EXCLUDE_INTERIM); - PARSER.declareStringOrNull(GetBucketsRequest::setStart, START); - PARSER.declareStringOrNull(GetBucketsRequest::setEnd, END); - PARSER.declareObject(GetBucketsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - PARSER.declareDouble(GetBucketsRequest::setAnomalyScore, ANOMALY_SCORE); - PARSER.declareString(GetBucketsRequest::setSort, SORT); - PARSER.declareBoolean(GetBucketsRequest::setDescending, DESCENDING); - } - - private String jobId; - private String timestamp; - private Boolean expand; - private Boolean excludeInterim; - private String start; - private String end; - private PageParams pageParams; - private Double anomalyScore; - private String sort; - private Boolean descending; - - private GetBucketsRequest() {} - - /** - * Constructs a request to retrieve buckets of a given job - * @param jobId id of the job to retrieve buckets of - */ - public GetBucketsRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - /** - * Sets the timestamp of a specific bucket to be retrieved. - * @param timestamp String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setTimestamp(String timestamp) { - this.timestamp = timestamp; - } - - public String getTimestamp() { - return timestamp; - } - - public boolean isExpand() { - return expand; - } - - /** - * Sets the value of "expand". - * When {@code true}, buckets will be expanded to include their records. - * @param expand value of "expand" to be set - */ - public void setExpand(Boolean expand) { - this.expand = expand; - } - - public Boolean getExcludeInterim() { - return excludeInterim; - } - - /** - * Sets the value of "exclude_interim". - * When {@code true}, interim buckets will be filtered out. - * @param excludeInterim value of "exclude_interim" to be set - */ - public void setExcludeInterim(Boolean excludeInterim) { - this.excludeInterim = excludeInterim; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only buckets whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only buckets whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * Sets the paging parameters - * @param pageParams the paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public Double getAnomalyScore() { - return anomalyScore; - } - - /** - * Sets the value of "anomaly_score". - * Only buckets with "anomaly_score" equal or greater will be returned. - * @param anomalyScore value of "anomaly_score". - */ - public void setAnomalyScore(Double anomalyScore) { - this.anomalyScore = anomalyScore; - } - - public String getSort() { - return sort; - } - - /** - * Sets the value of "sort". - * Specifies the bucket field to sort on. - * @param sort value of "sort". - */ - public void setSort(String sort) { - this.sort = sort; - } - - public Boolean getDescending() { - return descending; - } - - /** - * Sets the value of "desc". - * Specifies the sorting order. - * @param descending value of "desc" - */ - public void setDescending(boolean descending) { - this.descending = descending; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (timestamp != null) { - builder.field(Result.TIMESTAMP.getPreferredName(), timestamp); - } - if (expand != null) { - builder.field(EXPAND.getPreferredName(), expand); - } - if (excludeInterim != null) { - builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - if (anomalyScore != null) { - builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); - } - if (sort != null) { - builder.field(SORT.getPreferredName(), sort); - } - if (descending != null) { - builder.field(DESCENDING.getPreferredName(), descending); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, timestamp, expand, excludeInterim, anomalyScore, pageParams, start, end, sort, descending); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetBucketsRequest other = (GetBucketsRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(timestamp, other.timestamp) - && Objects.equals(expand, other.expand) - && Objects.equals(excludeInterim, other.excludeInterim) - && Objects.equals(anomalyScore, other.anomalyScore) - && Objects.equals(pageParams, other.pageParams) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(sort, other.sort) - && Objects.equals(descending, other.descending); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsResponse.java deleted file mode 100644 index ced8ad201adbc..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.Bucket; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested buckets - */ -public class GetBucketsResponse extends AbstractResultResponse { - - public static final ParseField BUCKETS = new ParseField("buckets"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_buckets_response", - true, - a -> new GetBucketsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), Bucket.PARSER, BUCKETS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetBucketsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetBucketsResponse(List buckets, long count) { - super(BUCKETS, buckets, count); - } - - /** - * The retrieved buckets - * @return the retrieved buckets - */ - public List buckets() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetBucketsResponse other = (GetBucketsResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java deleted file mode 100644 index 91bc1f0b537ba..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Get the Scheduled Events for a Calendar - */ -public class GetCalendarEventsRequest implements Validatable, ToXContentObject { - - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_calendar_events_request", - a -> new GetCalendarEventsRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Calendar.ID); - PARSER.declareString(GetCalendarEventsRequest::setStart, START); - PARSER.declareString(GetCalendarEventsRequest::setEnd, END); - PARSER.declareString(GetCalendarEventsRequest::setJobId, Job.ID); - PARSER.declareObject(GetCalendarEventsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - } - - private final String calendarId; - private String start; - private String end; - private String jobId; - private PageParams pageParams; - - /** - * Create a new request to get the ScheduledEvents for the given calendarId. - * - * @param calendarId The ID of the calendar. - * Can be `_all` to get ALL ScheduledEvents for all calendars. - */ - public GetCalendarEventsRequest(String calendarId) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - } - - public String getCalendarId() { - return calendarId; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * The paging parameters for the gathered ScheduledEvents - * @param pageParams The desired paging params - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public String getStart() { - return start; - } - - /** - * Specifies to get events with timestamps after this time. - * - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Specifies to get events with timestamps earlier than this time. - * - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public String getJobId() { - return jobId; - } - - /** - * The jobId for which to get the ScheduledEvents. When this option is used calendarId must be `_all` - * @param jobId The job for which to get the events. - */ - public void setJobId(String jobId) { - this.jobId = jobId; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Calendar.ID.getPreferredName(), calendarId); - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (jobId != null) { - builder.field(Job.ID.getPreferredName(), jobId); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendarId, start, end, jobId, pageParams); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - GetCalendarEventsRequest other = (GetCalendarEventsRequest) obj; - return Objects.equals(calendarId, other.calendarId) - && Objects.equals(pageParams, other.pageParams) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(jobId, other.jobId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsResponse.java deleted file mode 100644 index 2c36c1c329e84..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsResponse.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.calendars.ScheduledEvent; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link ScheduledEvent} objects and the total count found - */ -public class GetCalendarEventsResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("events"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "calendar_events_response", - true, - a -> new GetCalendarEventsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), ScheduledEvent.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), COUNT); - } - - GetCalendarEventsResponse(List events, long count) { - super(RESULTS_FIELD, events, count); - } - - /** - * The collection of {@link ScheduledEvent} objects found in the query - */ - public List events() { - return results; - } - - public static GetCalendarEventsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetCalendarEventsResponse other = (GetCalendarEventsResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java deleted file mode 100644 index 55bcbd88964be..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class GetCalendarsRequest implements Validatable, ToXContentObject { - - public static final ObjectParser PARSER = new ObjectParser<>( - "get_calendars_request", - GetCalendarsRequest::new - ); - - static { - PARSER.declareString(GetCalendarsRequest::setCalendarId, Calendar.ID); - PARSER.declareObject(GetCalendarsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - } - - private String calendarId; - private PageParams pageParams; - - public GetCalendarsRequest() {} - - public GetCalendarsRequest(String calendarId) { - this.calendarId = calendarId; - } - - public String getCalendarId() { - return calendarId; - } - - public void setCalendarId(String calendarId) { - this.calendarId = calendarId; - } - - public PageParams getPageParams() { - return pageParams; - } - - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (calendarId != null) { - builder.field(Calendar.ID.getPreferredName(), calendarId); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendarId, pageParams); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetCalendarsRequest other = (GetCalendarsRequest) obj; - return Objects.equals(calendarId, other.calendarId) && Objects.equals(pageParams, other.pageParams); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java deleted file mode 100644 index bf2119692b485..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class GetCalendarsResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("calendars"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "calendars_response", - true, - a -> new GetCalendarsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), Calendar.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT); - } - - public static GetCalendarsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetCalendarsResponse(List calendars, long count) { - super(RESULTS_FIELD, calendars, count); - } - - /** - * The collection of {@link Calendar} objects found in the query - */ - public List calendars() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetCalendarsResponse other = (GetCalendarsResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java deleted file mode 100644 index 9a00c09ffd847..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.results.CategoryDefinition; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve categories of a given job - */ -public class GetCategoriesRequest implements Validatable, ToXContentObject { - - public static final ParseField CATEGORY_ID = CategoryDefinition.CATEGORY_ID; - public static final ParseField PARTITION_FIELD_VALUE = CategoryDefinition.PARTITION_FIELD_VALUE; - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_categories_request", - a -> new GetCategoriesRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareLong(GetCategoriesRequest::setCategoryId, CATEGORY_ID); - PARSER.declareObject(GetCategoriesRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - PARSER.declareString(GetCategoriesRequest::setPartitionFieldValue, PARTITION_FIELD_VALUE); - } - - private final String jobId; - private Long categoryId; - private PageParams pageParams; - private String partitionFieldValue; - - /** - * Constructs a request to retrieve category information from a given job - * @param jobId id of the job from which to retrieve results - */ - public GetCategoriesRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - public PageParams getPageParams() { - return pageParams; - } - - public Long getCategoryId() { - return categoryId; - } - - /** - * Sets the category id - * @param categoryId the category id - */ - public void setCategoryId(Long categoryId) { - this.categoryId = categoryId; - } - - /** - * Sets the paging parameters - * @param pageParams the paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public String getPartitionFieldValue() { - return partitionFieldValue; - } - - /** - * Sets the partition field value - * @param partitionFieldValue the partition field value - */ - public void setPartitionFieldValue(String partitionFieldValue) { - this.partitionFieldValue = partitionFieldValue; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (categoryId != null) { - builder.field(CATEGORY_ID.getPreferredName(), categoryId); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - if (partitionFieldValue != null) { - builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetCategoriesRequest request = (GetCategoriesRequest) obj; - return Objects.equals(jobId, request.jobId) - && Objects.equals(categoryId, request.categoryId) - && Objects.equals(pageParams, request.pageParams) - && Objects.equals(partitionFieldValue, request.partitionFieldValue); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, categoryId, pageParams, partitionFieldValue); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java deleted file mode 100644 index d1e542d18e43d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.CategoryDefinition; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested categories - */ -public class GetCategoriesResponse extends AbstractResultResponse { - - public static final ParseField CATEGORIES = new ParseField("categories"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_categories_response", - true, - a -> new GetCategoriesResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), CategoryDefinition.PARSER, CATEGORIES); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetCategoriesResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetCategoriesResponse(List categories, long count) { - super(CATEGORIES, categories, count); - } - - /** - * The retrieved categories - * @return the retrieved categories - */ - public List categories() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetCategoriesResponse other = (GetCategoriesResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsRequest.java deleted file mode 100644 index 9b19280f9729f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.core.Nullable; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -public class GetDataFrameAnalyticsRequest implements Validatable { - - public static final String ALLOW_NO_MATCH = "allow_no_match"; - public static final String EXCLUDE_GENERATED = "exclude_generated"; - - private final List ids; - private Boolean allowNoMatch; - private PageParams pageParams; - private Boolean excludeGenerated; - - /** - * Helper method to create a request that will get ALL Data Frame Analytics - * @return new {@link GetDataFrameAnalyticsRequest} object for the id "_all" - */ - public static GetDataFrameAnalyticsRequest getAllDataFrameAnalyticsRequest() { - return new GetDataFrameAnalyticsRequest("_all"); - } - - public GetDataFrameAnalyticsRequest(String... ids) { - this.ids = Arrays.asList(ids); - } - - public List getIds() { - return ids; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Setting this flag to `true` removes certain fields from the configuration on retrieval. - * - * This is useful when getting the configuration and wanting to put it in another cluster. - * - * Default value is false. - * @param excludeGenerated Boolean value indicating if certain fields should be removed - */ - public void setExcludeGenerated(boolean excludeGenerated) { - this.excludeGenerated = excludeGenerated; - } - - public Boolean getExcludeGenerated() { - return excludeGenerated; - } - - /** - * Whether to ignore if a wildcard expression matches no data frame analytics. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any data frame analytics - */ - public GetDataFrameAnalyticsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public PageParams getPageParams() { - return pageParams; - } - - public GetDataFrameAnalyticsRequest setPageParams(@Nullable PageParams pageParams) { - this.pageParams = pageParams; - return this; - } - - @Override - public Optional validate() { - if (ids == null || ids.isEmpty()) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetDataFrameAnalyticsRequest other = (GetDataFrameAnalyticsRequest) o; - return Objects.equals(ids, other.ids) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(excludeGenerated, other.excludeGenerated) - && Objects.equals(pageParams, other.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(ids, allowNoMatch, excludeGenerated, pageParams); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsResponse.java deleted file mode 100644 index 8e991efa10d44..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class GetDataFrameAnalyticsResponse { - - public static final ParseField DATA_FRAME_ANALYTICS = new ParseField("data_frame_analytics"); - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_data_frame_analytics", - true, - args -> new GetDataFrameAnalyticsResponse((List) args[0]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), (p, c) -> DataFrameAnalyticsConfig.fromXContent(p), DATA_FRAME_ANALYTICS); - } - - public static GetDataFrameAnalyticsResponse fromXContent(final XContentParser parser) { - return PARSER.apply(parser, null); - } - - private List analytics; - - public GetDataFrameAnalyticsResponse(List analytics) { - this.analytics = analytics; - } - - public List getAnalytics() { - return analytics; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetDataFrameAnalyticsResponse other = (GetDataFrameAnalyticsResponse) o; - return Objects.equals(this.analytics, other.analytics); - } - - @Override - public int hashCode() { - return Objects.hash(analytics); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsRequest.java deleted file mode 100644 index 50a71f5a4dc33..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsRequest.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ParseField; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -/** - * Request to get data frame analytics stats - */ -public class GetDataFrameAnalyticsStatsRequest implements Validatable { - - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - private final List ids; - private Boolean allowNoMatch; - private PageParams pageParams; - - public GetDataFrameAnalyticsStatsRequest(String... ids) { - this.ids = Arrays.asList(ids); - } - - public List getIds() { - return ids; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no data frame analytics. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any data frame analytics - */ - public GetDataFrameAnalyticsStatsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public PageParams getPageParams() { - return pageParams; - } - - public GetDataFrameAnalyticsStatsRequest setPageParams(@Nullable PageParams pageParams) { - this.pageParams = pageParams; - return this; - } - - @Override - public Optional validate() { - if (ids == null || ids.isEmpty()) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetDataFrameAnalyticsStatsRequest other = (GetDataFrameAnalyticsStatsRequest) o; - return Objects.equals(ids, other.ids) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(pageParams, other.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(ids, allowNoMatch, pageParams); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsResponse.java deleted file mode 100644 index 00284b0802a16..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsResponse.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsStats; -import org.elasticsearch.client.transform.AcknowledgedTasksResponse; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class GetDataFrameAnalyticsStatsResponse { - - public static GetDataFrameAnalyticsStatsResponse fromXContent(XContentParser parser) { - return GetDataFrameAnalyticsStatsResponse.PARSER.apply(parser, null); - } - - private static final ParseField DATA_FRAME_ANALYTICS = new ParseField("data_frame_analytics"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_data_frame_analytics_stats_response", - true, - args -> new GetDataFrameAnalyticsStatsResponse( - (List) args[0], - (List) args[1], - (List) args[2] - ) - ); - - static { - PARSER.declareObjectArray(constructorArg(), (p, c) -> DataFrameAnalyticsStats.fromXContent(p), DATA_FRAME_ANALYTICS); - PARSER.declareObjectArray( - optionalConstructorArg(), - (p, c) -> TaskOperationFailure.fromXContent(p), - AcknowledgedTasksResponse.TASK_FAILURES - ); - PARSER.declareObjectArray( - optionalConstructorArg(), - (p, c) -> ElasticsearchException.fromXContent(p), - AcknowledgedTasksResponse.NODE_FAILURES - ); - } - - private final List analyticsStats; - private final List taskFailures; - private final List nodeFailures; - - public GetDataFrameAnalyticsStatsResponse( - List analyticsStats, - @Nullable List taskFailures, - @Nullable List nodeFailures - ) { - this.analyticsStats = analyticsStats; - this.taskFailures = taskFailures == null ? Collections.emptyList() : Collections.unmodifiableList(taskFailures); - this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(nodeFailures); - } - - public List getAnalyticsStats() { - return analyticsStats; - } - - public List getNodeFailures() { - return nodeFailures; - } - - public List getTaskFailures() { - return taskFailures; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetDataFrameAnalyticsStatsResponse other = (GetDataFrameAnalyticsStatsResponse) o; - return Objects.equals(analyticsStats, other.analyticsStats) - && Objects.equals(nodeFailures, other.nodeFailures) - && Objects.equals(taskFailures, other.taskFailures); - } - - @Override - public int hashCode() { - return Objects.hash(analyticsStats, nodeFailures, taskFailures); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java deleted file mode 100644 index 67fa0503a9b77..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request object to get {@link DatafeedConfig} objects with the matching {@code datafeedId}s. - * - * {@code _all} explicitly gets all the datafeeds in the cluster - * An empty request (no {@code datafeedId}s) implicitly gets all the datafeeds in the cluster - */ -public class GetDatafeedRequest implements Validatable, ToXContentObject { - - public static final ParseField DATAFEED_IDS = new ParseField("datafeed_ids"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - public static final String EXCLUDE_GENERATED = "exclude_generated"; - - private static final String ALL_DATAFEEDS = "_all"; - private final List datafeedIds; - private Boolean allowNoMatch; - private Boolean excludeGenerated; - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_datafeed_request", - true, - a -> new GetDatafeedRequest(a[0] == null ? new ArrayList<>() : (List) a[0]) - ); - - static { - PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), DATAFEED_IDS); - PARSER.declareBoolean(GetDatafeedRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - /** - * Helper method to create a query that will get ALL datafeeds - * @return new {@link GetDatafeedRequest} object searching for the datafeedId "_all" - */ - public static GetDatafeedRequest getAllDatafeedsRequest() { - return new GetDatafeedRequest(ALL_DATAFEEDS); - } - - /** - * Get the specified {@link DatafeedConfig} configurations via their unique datafeedIds - * @param datafeedIds must not contain any null values - */ - public GetDatafeedRequest(String... datafeedIds) { - this(Arrays.asList(datafeedIds)); - } - - GetDatafeedRequest(List datafeedIds) { - if (datafeedIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("datafeedIds must not contain null values"); - } - this.datafeedIds = new ArrayList<>(datafeedIds); - } - - /** - * All the datafeedIds for which to get configuration information - */ - public List getDatafeedIds() { - return datafeedIds; - } - - /** - * Whether to ignore if a wildcard expression matches no datafeeds. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any datafeeds - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Setting this flag to `true` removes certain fields from the configuration on retrieval. - * - * This is useful when getting the configuration and wanting to put it in another cluster. - * - * Default value is false. - * @param excludeGenerated Boolean value indicating if certain fields should be removed - */ - public void setExcludeGenerated(boolean excludeGenerated) { - this.excludeGenerated = excludeGenerated; - } - - public Boolean getExcludeGenerated() { - return excludeGenerated; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedIds, excludeGenerated, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || other.getClass() != getClass()) { - return false; - } - - GetDatafeedRequest that = (GetDatafeedRequest) other; - return Objects.equals(datafeedIds, that.datafeedIds) - && Objects.equals(allowNoMatch, that.allowNoMatch) - && Objects.equals(excludeGenerated, that.excludeGenerated); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - if (datafeedIds.isEmpty() == false) { - builder.stringListField(DATAFEED_IDS.getPreferredName(), datafeedIds); - } - - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedResponse.java deleted file mode 100644 index b8a6a0d79972a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedResponse.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link DatafeedConfig} objects and the total count found - */ -public class GetDatafeedResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("datafeeds"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_datafeed_response", - true, - a -> new GetDatafeedResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), DatafeedConfig.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT); - } - - GetDatafeedResponse(List datafeedBuilders, long count) { - super(RESULTS_FIELD, datafeedBuilders.stream().map(DatafeedConfig.Builder::build).collect(Collectors.toList()), count); - } - - /** - * The collection of {@link DatafeedConfig} objects found in the query - */ - public List datafeeds() { - return results; - } - - public static GetDatafeedResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetDatafeedResponse other = (GetDatafeedResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java deleted file mode 100644 index 081504354eb20..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request object to get {@link org.elasticsearch.client.ml.datafeed.DatafeedStats} by their respective datafeedIds - * - * {@code _all} explicitly gets all the datafeeds' statistics in the cluster - * An empty request (no {@code datafeedId}s) implicitly gets all the datafeeds' statistics in the cluster - */ -public class GetDatafeedStatsRequest implements Validatable, ToXContentObject { - - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_datafeed_stats_request", - a -> new GetDatafeedStatsRequest((List) a[0]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), - DatafeedConfig.ID, - ObjectParser.ValueType.STRING_ARRAY - ); - PARSER.declareBoolean(GetDatafeedStatsRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private static final String ALL_DATAFEEDS = "_all"; - - private final List datafeedIds; - private Boolean allowNoMatch; - - /** - * Explicitly gets all datafeeds statistics - * - * @return a {@link GetDatafeedStatsRequest} for all existing datafeeds - */ - public static GetDatafeedStatsRequest getAllDatafeedStatsRequest() { - return new GetDatafeedStatsRequest(ALL_DATAFEEDS); - } - - GetDatafeedStatsRequest(List datafeedIds) { - if (datafeedIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("datafeedIds must not contain null values"); - } - this.datafeedIds = new ArrayList<>(datafeedIds); - } - - /** - * Get the specified Datafeed's statistics via their unique datafeedIds - * - * @param datafeedIds must be non-null and each datafeedId must be non-null - */ - public GetDatafeedStatsRequest(String... datafeedIds) { - this(Arrays.asList(datafeedIds)); - } - - /** - * All the datafeedIds for which to get statistics - */ - public List getDatafeedIds() { - return datafeedIds; - } - - public Boolean getAllowNoMatch() { - return this.allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no datafeeds. - * - * This includes {@code _all} string or when no datafeeds have been specified - * - * @param allowNoMatch When {@code true} ignore if wildcard or {@code _all} matches no datafeeds. Defaults to {@code true} - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedIds, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - GetDatafeedStatsRequest that = (GetDatafeedStatsRequest) other; - return Objects.equals(datafeedIds, that.datafeedIds) && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(datafeedIds)); - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsResponse.java deleted file mode 100644 index 94a49fc074c04..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsResponse.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.datafeed.DatafeedStats; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link DatafeedStats} objects and the total count found - */ -public class GetDatafeedStatsResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("datafeeds"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_datafeed_stats_response", - true, - a -> new GetDatafeedStatsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), DatafeedStats.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), COUNT); - } - - GetDatafeedStatsResponse(List results, long count) { - super(RESULTS_FIELD, results, count); - } - - /** - * The collection of {@link DatafeedStats} objects found in the query - */ - public List datafeedStats() { - return results; - } - - public static GetDatafeedStatsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetDatafeedStatsResponse other = (GetDatafeedStatsResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java deleted file mode 100644 index cafa4d8b331f5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve {@link MlFilter}s - */ -public class GetFiltersRequest implements Validatable, ToXContentObject { - - public static final ObjectParser PARSER = new ObjectParser<>("get_filters_request", GetFiltersRequest::new); - - static { - PARSER.declareString(GetFiltersRequest::setFilterId, MlFilter.ID); - PARSER.declareInt(GetFiltersRequest::setFrom, PageParams.FROM); - PARSER.declareInt(GetFiltersRequest::setSize, PageParams.SIZE); - } - - private String filterId; - private Integer from; - private Integer size; - - public String getFilterId() { - return filterId; - } - - public Integer getFrom() { - return from; - } - - public Integer getSize() { - return size; - } - - /** - * Sets the filter id - * @param filterId the filter id - */ - public void setFilterId(String filterId) { - this.filterId = filterId; - } - - /** - * Sets the number of filters to skip. - * @param from set the `from` parameter - */ - public void setFrom(Integer from) { - this.from = from; - } - - /** - * Sets the number of filters to return. - * @param size set the `size` parameter - */ - public void setSize(Integer size) { - this.size = size; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (filterId != null) { - builder.field(MlFilter.ID.getPreferredName(), filterId); - } - if (from != null) { - builder.field(PageParams.FROM.getPreferredName(), from); - } - if (size != null) { - builder.field(PageParams.SIZE.getPreferredName(), size); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetFiltersRequest request = (GetFiltersRequest) obj; - return Objects.equals(filterId, request.filterId) && Objects.equals(from, request.from) && Objects.equals(size, request.size); - } - - @Override - public int hashCode() { - return Objects.hash(filterId, from, size); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersResponse.java deleted file mode 100644 index a0a190d89cfc2..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersResponse.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link MlFilter} objects and the total count found - */ -public class GetFiltersResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("filters"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_filters_response", - true, - a -> new GetFiltersResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), MlFilter.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT); - } - - GetFiltersResponse(List filters, long count) { - super(RESULTS_FIELD, filters.stream().map(MlFilter.Builder::build).collect(Collectors.toList()), count); - } - - /** - * The collection of {@link MlFilter} objects found in the query - */ - public List filters() { - return results; - } - - public static GetFiltersResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetFiltersResponse other = (GetFiltersResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java deleted file mode 100644 index b174f4c91f1af..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve influencers of a given job - */ -public class GetInfluencersRequest implements Validatable, ToXContentObject { - - public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField INFLUENCER_SCORE = new ParseField("influencer_score"); - public static final ParseField SORT = new ParseField("sort"); - public static final ParseField DESCENDING = new ParseField("desc"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_influencers_request", - a -> new GetInfluencersRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareBoolean(GetInfluencersRequest::setExcludeInterim, EXCLUDE_INTERIM); - PARSER.declareStringOrNull(GetInfluencersRequest::setStart, START); - PARSER.declareStringOrNull(GetInfluencersRequest::setEnd, END); - PARSER.declareObject(GetInfluencersRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - PARSER.declareDouble(GetInfluencersRequest::setInfluencerScore, INFLUENCER_SCORE); - PARSER.declareString(GetInfluencersRequest::setSort, SORT); - PARSER.declareBoolean(GetInfluencersRequest::setDescending, DESCENDING); - } - - private final String jobId; - private Boolean excludeInterim; - private String start; - private String end; - private Double influencerScore; - private PageParams pageParams; - private String sort; - private Boolean descending; - - /** - * Constructs a request to retrieve influencers of a given job - * @param jobId id of the job to retrieve influencers of - */ - public GetInfluencersRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - public Boolean getExcludeInterim() { - return excludeInterim; - } - - /** - * Sets the value of "exclude_interim". - * When {@code true}, interim influencers will be filtered out. - * @param excludeInterim value of "exclude_interim" to be set - */ - public void setExcludeInterim(Boolean excludeInterim) { - this.excludeInterim = excludeInterim; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only influencers whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only influencers whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * Sets the paging parameters - * @param pageParams The paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public Double getInfluencerScore() { - return influencerScore; - } - - /** - * Sets the value of "influencer_score". - * Only influencers with "influencer_score" equal or greater will be returned. - * @param influencerScore value of "influencer_score". - */ - public void setInfluencerScore(Double influencerScore) { - this.influencerScore = influencerScore; - } - - public String getSort() { - return sort; - } - - /** - * Sets the value of "sort". - * Specifies the influencer field to sort on. - * @param sort value of "sort". - */ - public void setSort(String sort) { - this.sort = sort; - } - - public Boolean getDescending() { - return descending; - } - - /** - * Sets the value of "desc". - * Specifies the sorting order. - * @param descending value of "desc" - */ - public void setDescending(Boolean descending) { - this.descending = descending; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (excludeInterim != null) { - builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - if (influencerScore != null) { - builder.field(INFLUENCER_SCORE.getPreferredName(), influencerScore); - } - if (sort != null) { - builder.field(SORT.getPreferredName(), sort); - } - if (descending != null) { - builder.field(DESCENDING.getPreferredName(), descending); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, excludeInterim, influencerScore, pageParams, start, end, sort, descending); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetInfluencersRequest other = (GetInfluencersRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(excludeInterim, other.excludeInterim) - && Objects.equals(influencerScore, other.influencerScore) - && Objects.equals(pageParams, other.pageParams) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(sort, other.sort) - && Objects.equals(descending, other.descending); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersResponse.java deleted file mode 100644 index 6d075c7fb535d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.Influencer; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested influencers - */ -public class GetInfluencersResponse extends AbstractResultResponse { - - public static final ParseField INFLUENCERS = new ParseField("influencers"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_influencers_response", - true, - a -> new GetInfluencersResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), Influencer.PARSER, INFLUENCERS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetInfluencersResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetInfluencersResponse(List influencers, long count) { - super(INFLUENCERS, influencers, count); - } - - /** - * The retrieved influencers - * @return the retrieved influencers - */ - public List influencers() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetInfluencersResponse other = (GetInfluencersResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java deleted file mode 100644 index 037af8a412132..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request object to get {@link Job} objects with the matching {@code jobId}s or - * {@code groupName}s. - * - * {@code _all} explicitly gets all the jobs in the cluster - * An empty request (no {@code jobId}s) implicitly gets all the jobs in the cluster - */ -public class GetJobRequest implements Validatable, ToXContentObject { - - public static final ParseField JOB_IDS = new ParseField("job_ids"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - public static final String EXCLUDE_GENERATED = "exclude_generated"; - - private static final String ALL_JOBS = "_all"; - private final List jobIds; - private Boolean allowNoMatch; - private Boolean excludeGenerated; - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_job_request", - true, - a -> new GetJobRequest(a[0] == null ? new ArrayList<>() : (List) a[0]) - ); - - static { - PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), JOB_IDS); - PARSER.declareBoolean(GetJobRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - /** - * Helper method to create a query that will get ALL jobs - * @return new {@link GetJobRequest} object searching for the jobId "_all" - */ - public static GetJobRequest getAllJobsRequest() { - return new GetJobRequest(ALL_JOBS); - } - - /** - * Get the specified {@link Job} configurations via their unique jobIds - * @param jobIds must not contain any null values - */ - public GetJobRequest(String... jobIds) { - this(Arrays.asList(jobIds)); - } - - GetJobRequest(List jobIds) { - if (jobIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values"); - } - this.jobIds = new ArrayList<>(jobIds); - } - - /** - * All the jobIds for which to get configuration information - */ - public List getJobIds() { - return jobIds; - } - - /** - * Whether to ignore if a wildcard expression matches no jobs. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) does not match any jobs - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Setting this flag to `true` removes certain fields from the configuration on retrieval. - * - * This is useful when getting the configuration and wanting to put it in another cluster. - * - * Default value is false. - * @param excludeGenerated Boolean value indicating if certain fields should be removed - */ - public void setExcludeGenerated(boolean excludeGenerated) { - this.excludeGenerated = excludeGenerated; - } - - public Boolean getExcludeGenerated() { - return excludeGenerated; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, excludeGenerated, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || other.getClass() != getClass()) { - return false; - } - - GetJobRequest that = (GetJobRequest) other; - return Objects.equals(jobIds, that.jobIds) - && Objects.equals(excludeGenerated, that.excludeGenerated) - && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - if (jobIds.isEmpty() == false) { - builder.stringListField(JOB_IDS.getPreferredName(), jobIds); - } - - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobResponse.java deleted file mode 100644 index ccc40edf3687d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobResponse.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link Job} objects and the total count found - */ -public class GetJobResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("jobs"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "jobs_response", - true, - a -> new GetJobResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), Job.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT); - } - - GetJobResponse(List jobBuilders, long count) { - super(RESULTS_FIELD, jobBuilders.stream().map(Job.Builder::build).collect(Collectors.toList()), count); - } - - /** - * The collection of {@link Job} objects found in the query - */ - public List jobs() { - return results; - } - - public static GetJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetJobResponse other = (GetJobResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java deleted file mode 100644 index 009b0239e276d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request object to get {@link org.elasticsearch.client.ml.job.stats.JobStats} by their respective jobIds - * - * {@code _all} explicitly gets all the jobs' statistics in the cluster - * An empty request (no {@code jobId}s) implicitly gets all the jobs' statistics in the cluster - */ -public class GetJobStatsRequest implements Validatable, ToXContentObject { - - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_jobs_stats_request", - a -> new GetJobStatsRequest((List) a[0]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), - Job.ID, - ObjectParser.ValueType.STRING_ARRAY - ); - PARSER.declareBoolean(GetJobStatsRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private static final String ALL_JOBS = "_all"; - - private final List jobIds; - private Boolean allowNoMatch; - - /** - * Explicitly gets all jobs statistics - * - * @return a {@link GetJobStatsRequest} for all existing jobs - */ - public static GetJobStatsRequest getAllJobStatsRequest() { - return new GetJobStatsRequest(ALL_JOBS); - } - - GetJobStatsRequest(List jobIds) { - if (jobIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values"); - } - this.jobIds = new ArrayList<>(jobIds); - } - - /** - * Get the specified Job's statistics via their unique jobIds - * - * @param jobIds must be non-null and each jobId must be non-null - */ - public GetJobStatsRequest(String... jobIds) { - this(Arrays.asList(jobIds)); - } - - /** - * All the jobIds for which to get statistics - */ - public List getJobIds() { - return jobIds; - } - - public Boolean getAllowNoMatch() { - return this.allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no jobs. - * - * This includes {@code _all} string or when no jobs have been specified - * - * @param allowNoMatch When {@code true} ignore if wildcard or {@code _all} matches no jobs. Defaults to {@code true} - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - GetJobStatsRequest that = (GetJobStatsRequest) other; - return Objects.equals(jobIds, that.jobIds) && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds)); - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java deleted file mode 100644 index 3443010fe66a4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.stats.JobStats; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link JobStats} objects and the total count found - */ -public class GetJobStatsResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("jobs"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "jobs_stats_response", - true, - a -> new GetJobStatsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), JobStats.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), COUNT); - } - - GetJobStatsResponse(List jobStats, long count) { - super(RESULTS_FIELD, jobStats, count); - } - - /** - * The collection of {@link JobStats} objects found in the query - */ - public List jobStats() { - return results; - } - - public static GetJobStatsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetJobStatsResponse other = (GetJobStatsResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java deleted file mode 100644 index d6ecbf18a2444..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve information about model snapshots for a given job - */ -public class GetModelSnapshotsRequest implements Validatable, ToXContentObject { - - public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id"); - public static final ParseField SORT = new ParseField("sort"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField DESC = new ParseField("desc"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_model_snapshots_request", - a -> new GetModelSnapshotsRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(GetModelSnapshotsRequest::setSnapshotId, SNAPSHOT_ID); - PARSER.declareString(GetModelSnapshotsRequest::setSort, SORT); - PARSER.declareStringOrNull(GetModelSnapshotsRequest::setStart, START); - PARSER.declareStringOrNull(GetModelSnapshotsRequest::setEnd, END); - PARSER.declareBoolean(GetModelSnapshotsRequest::setDesc, DESC); - PARSER.declareObject(GetModelSnapshotsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - } - - private final String jobId; - private String snapshotId; - private String sort; - private String start; - private String end; - private Boolean desc; - private PageParams pageParams; - - /** - * Constructs a request to retrieve snapshot information from a given job - * @param jobId id of the job from which to retrieve results - */ - public GetModelSnapshotsRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - /** - * Sets the id of the snapshot to retrieve. - * @param snapshotId the snapshot id - */ - public void setSnapshotId(String snapshotId) { - this.snapshotId = snapshotId; - } - - public String getSort() { - return sort; - } - - /** - * Sets the value of "sort". - * Specifies the snapshot field to sort on. - * @param sort value of "sort". - */ - public void setSort(String sort) { - this.sort = sort; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * Sets the paging parameters - * @param pageParams the paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only snapshots whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only snapshots whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public Boolean getDesc() { - return desc; - } - - /** - * Sets the value of "desc". - * Specifies the sorting order. - * @param desc value of "desc" - */ - public void setDesc(boolean desc) { - this.desc = desc; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (snapshotId != null) { - builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId); - } - if (sort != null) { - builder.field(SORT.getPreferredName(), sort); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (desc != null) { - builder.field(DESC.getPreferredName(), desc); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetModelSnapshotsRequest request = (GetModelSnapshotsRequest) obj; - return Objects.equals(jobId, request.jobId) - && Objects.equals(snapshotId, request.snapshotId) - && Objects.equals(sort, request.sort) - && Objects.equals(start, request.start) - && Objects.equals(end, request.end) - && Objects.equals(desc, request.desc) - && Objects.equals(pageParams, request.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId, pageParams, start, end, sort, desc); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsResponse.java deleted file mode 100644 index b52055ced3046..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsResponse.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -/** - * A response containing the requested snapshots - */ -public class GetModelSnapshotsResponse extends AbstractResultResponse { - - public static final ParseField SNAPSHOTS = new ParseField("model_snapshots"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_model_snapshots_response", - true, - a -> new GetModelSnapshotsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), ModelSnapshot.PARSER, SNAPSHOTS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetModelSnapshotsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetModelSnapshotsResponse(List snapshotBuilders, long count) { - super(SNAPSHOTS, snapshotBuilders.stream().map(ModelSnapshot.Builder::build).collect(Collectors.toList()), count); - } - - /** - * The retrieved snapshots - * @return the retrieved snapshots - */ - public List snapshots() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetModelSnapshotsResponse other = (GetModelSnapshotsResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java deleted file mode 100644 index 628fcc804d423..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java +++ /dev/null @@ -1,251 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * A request to retrieve overall buckets of set of jobs - */ -public class GetOverallBucketsRequest implements Validatable, ToXContentObject { - - public static final ParseField TOP_N = new ParseField("top_n"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField OVERALL_SCORE = new ParseField("overall_score"); - public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - private static final String ALL_JOBS = "_all"; - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_overall_buckets_request", - a -> new GetOverallBucketsRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareInt(GetOverallBucketsRequest::setTopN, TOP_N); - PARSER.declareString(GetOverallBucketsRequest::setBucketSpan, BUCKET_SPAN); - PARSER.declareBoolean(GetOverallBucketsRequest::setExcludeInterim, EXCLUDE_INTERIM); - PARSER.declareDouble(GetOverallBucketsRequest::setOverallScore, OVERALL_SCORE); - PARSER.declareStringOrNull(GetOverallBucketsRequest::setStart, START); - PARSER.declareStringOrNull(GetOverallBucketsRequest::setEnd, END); - PARSER.declareBoolean(GetOverallBucketsRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private final List jobIds; - private Integer topN; - private TimeValue bucketSpan; - private Boolean excludeInterim; - private Double overallScore; - private String start; - private String end; - private Boolean allowNoMatch; - - private GetOverallBucketsRequest(String jobId) { - this(Strings.tokenizeToStringArray(jobId, ",")); - } - - /** - * Constructs a request to retrieve overall buckets for a set of jobs - * @param jobIds The job identifiers. Each can be a job identifier, a group name, or a wildcard expression. - */ - public GetOverallBucketsRequest(String... jobIds) { - this(Arrays.asList(jobIds)); - } - - /** - * Constructs a request to retrieve overall buckets for a set of jobs - * @param jobIds The job identifiers. Each can be a job identifier, a group name, or a wildcard expression. - */ - public GetOverallBucketsRequest(List jobIds) { - if (jobIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values"); - } - if (jobIds.isEmpty()) { - this.jobIds = Collections.singletonList(ALL_JOBS); - } else { - this.jobIds = Collections.unmodifiableList(jobIds); - } - } - - public List getJobIds() { - return jobIds; - } - - public Integer getTopN() { - return topN; - } - - /** - * Sets the value of "top_n". - * @param topN The number of top job bucket scores to be used in the overall_score calculation. Defaults to 1. - */ - public void setTopN(Integer topN) { - this.topN = topN; - } - - public TimeValue getBucketSpan() { - return bucketSpan; - } - - /** - * Sets the value of "bucket_span". - * @param bucketSpan The span of the overall buckets. Must be greater or equal to the largest job’s bucket_span. - * Defaults to the largest job’s bucket_span. - */ - public void setBucketSpan(TimeValue bucketSpan) { - this.bucketSpan = bucketSpan; - } - - private void setBucketSpan(String bucketSpan) { - this.bucketSpan = TimeValue.parseTimeValue(bucketSpan, BUCKET_SPAN.getPreferredName()); - } - - public boolean isExcludeInterim() { - return excludeInterim; - } - - /** - * Sets the value of "exclude_interim". - * When {@code true}, interim overall buckets will be filtered out. - * Overall buckets are interim if any of the job buckets within the overall bucket interval are interim. - * @param excludeInterim value of "exclude_interim" to be set - */ - public void setExcludeInterim(Boolean excludeInterim) { - this.excludeInterim = excludeInterim; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only overall buckets whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only overall buckets whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public Double getOverallScore() { - return overallScore; - } - - /** - * Sets the value of "overall_score". - * Only buckets with "overall_score" equal or greater will be returned. - * @param overallScore value of "anomaly_score". - */ - public void setOverallScore(double overallScore) { - this.overallScore = overallScore; - } - - /** - * See {@link GetJobRequest#getAllowNoMatch()} - * @param allowNoMatch value of "allow_no_match". - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no jobs. - * - * If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) does not match any jobs - */ - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - if (jobIds.isEmpty() == false) { - builder.field(Job.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds)); - } - if (topN != null) { - builder.field(TOP_N.getPreferredName(), topN); - } - if (bucketSpan != null) { - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan.getStringRep()); - } - if (excludeInterim != null) { - builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (overallScore != null) { - builder.field(OVERALL_SCORE.getPreferredName(), overallScore); - } - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, topN, bucketSpan, excludeInterim, overallScore, start, end, allowNoMatch); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetOverallBucketsRequest other = (GetOverallBucketsRequest) obj; - return Objects.equals(jobIds, other.jobIds) - && Objects.equals(topN, other.topN) - && Objects.equals(bucketSpan, other.bucketSpan) - && Objects.equals(excludeInterim, other.excludeInterim) - && Objects.equals(overallScore, other.overallScore) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(allowNoMatch, other.allowNoMatch); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsResponse.java deleted file mode 100644 index a75b740c99a14..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.OverallBucket; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested overall buckets - */ -public class GetOverallBucketsResponse extends AbstractResultResponse { - - public static final ParseField OVERALL_BUCKETS = new ParseField("overall_buckets"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_overall_buckets_response", - true, - a -> new GetOverallBucketsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), OverallBucket.PARSER, OVERALL_BUCKETS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetOverallBucketsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetOverallBucketsResponse(List overallBuckets, long count) { - super(OVERALL_BUCKETS, overallBuckets, count); - } - - /** - * The retrieved overall buckets - * @return the retrieved overall buckets - */ - public List overallBuckets() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetOverallBucketsResponse other = (GetOverallBucketsResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java deleted file mode 100644 index c3ebcd1f86e99..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve records of a given job - */ -public class GetRecordsRequest implements ToXContentObject, Validatable { - - public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField RECORD_SCORE = new ParseField("record_score"); - public static final ParseField SORT = new ParseField("sort"); - public static final ParseField DESCENDING = new ParseField("desc"); - - public static final ObjectParser PARSER = new ObjectParser<>("get_records_request", GetRecordsRequest::new); - - static { - PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID); - PARSER.declareBoolean(GetRecordsRequest::setExcludeInterim, EXCLUDE_INTERIM); - PARSER.declareStringOrNull(GetRecordsRequest::setStart, START); - PARSER.declareStringOrNull(GetRecordsRequest::setEnd, END); - PARSER.declareObject(GetRecordsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - PARSER.declareDouble(GetRecordsRequest::setRecordScore, RECORD_SCORE); - PARSER.declareString(GetRecordsRequest::setSort, SORT); - PARSER.declareBoolean(GetRecordsRequest::setDescending, DESCENDING); - } - - private String jobId; - private Boolean excludeInterim; - private String start; - private String end; - private PageParams pageParams; - private Double recordScore; - private String sort; - private Boolean descending; - - private GetRecordsRequest() {} - - /** - * Constructs a request to retrieve records of a given job - * @param jobId id of the job to retrieve records of - */ - public GetRecordsRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - public Boolean getExcludeInterim() { - return excludeInterim; - } - - /** - * Sets the value of "exclude_interim". - * When {@code true}, interim records will be filtered out. - * @param excludeInterim value of "exclude_interim" to be set - */ - public void setExcludeInterim(Boolean excludeInterim) { - this.excludeInterim = excludeInterim; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only records whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only records whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * Sets the paging parameters - * @param pageParams The paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public Double getRecordScore() { - return recordScore; - } - - /** - * Sets the value of "record_score". - * Only records with "record_score" equal or greater will be returned. - * @param recordScore value of "record_score". - */ - public void setRecordScore(Double recordScore) { - this.recordScore = recordScore; - } - - public String getSort() { - return sort; - } - - /** - * Sets the value of "sort". - * Specifies the record field to sort on. - * @param sort value of "sort". - */ - public void setSort(String sort) { - this.sort = sort; - } - - public Boolean getDescending() { - return descending; - } - - /** - * Sets the value of "desc". - * Specifies the sorting order. - * @param descending value of "desc" - */ - public void setDescending(Boolean descending) { - this.descending = descending; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (excludeInterim != null) { - builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - if (recordScore != null) { - builder.field(RECORD_SCORE.getPreferredName(), recordScore); - } - if (sort != null) { - builder.field(SORT.getPreferredName(), sort); - } - if (descending != null) { - builder.field(DESCENDING.getPreferredName(), descending); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, excludeInterim, recordScore, pageParams, start, end, sort, descending); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetRecordsRequest other = (GetRecordsRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(excludeInterim, other.excludeInterim) - && Objects.equals(recordScore, other.recordScore) - && Objects.equals(pageParams, other.pageParams) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(sort, other.sort) - && Objects.equals(descending, other.descending); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsResponse.java deleted file mode 100644 index 3f94a06211ade..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.AnomalyRecord; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested records - */ -public class GetRecordsResponse extends AbstractResultResponse { - - public static final ParseField RECORDS = new ParseField("records"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_records_response", - true, - a -> new GetRecordsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARSER, RECORDS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetRecordsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetRecordsResponse(List records, long count) { - super(RECORDS, records, count); - } - - /** - * The retrieved records - * @return the retrieved records - */ - public List records() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetRecordsResponse other = (GetRecordsResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsRequest.java deleted file mode 100644 index 50b59c6a92f4f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsRequest.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.inference.TrainedModelConfig; -import org.elasticsearch.core.Nullable; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; - -public class GetTrainedModelsRequest implements Validatable { - - private static final String DEFINITION = "definition"; - private static final String TOTAL_FEATURE_IMPORTANCE = "total_feature_importance"; - private static final String FEATURE_IMPORTANCE_BASELINE = "feature_importance_baseline"; - public static final String ALLOW_NO_MATCH = "allow_no_match"; - public static final String EXCLUDE_GENERATED = "exclude_generated"; - public static final String DECOMPRESS_DEFINITION = "decompress_definition"; - public static final String TAGS = "tags"; - public static final String INCLUDE = "include"; - - private final List ids; - private Boolean allowNoMatch; - private Set includes = new HashSet<>(); - private Boolean decompressDefinition; - private Boolean excludeGenerated; - private PageParams pageParams; - private List tags; - - /** - * Helper method to create a request that will get ALL TrainedModelConfigs - * @return new {@link GetTrainedModelsRequest} object for the id "_all" - */ - public static GetTrainedModelsRequest getAllTrainedModelConfigsRequest() { - return new GetTrainedModelsRequest("_all"); - } - - public GetTrainedModelsRequest(String... ids) { - this.ids = Arrays.asList(ids); - } - - public List getIds() { - return ids; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no trained models. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any trained models - */ - public GetTrainedModelsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public PageParams getPageParams() { - return pageParams; - } - - public GetTrainedModelsRequest setPageParams(@Nullable PageParams pageParams) { - this.pageParams = pageParams; - return this; - } - - public Set getIncludes() { - return Collections.unmodifiableSet(includes); - } - - public GetTrainedModelsRequest includeDefinition() { - this.includes.add(DEFINITION); - return this; - } - - public GetTrainedModelsRequest includeTotalFeatureImportance() { - this.includes.add(TOTAL_FEATURE_IMPORTANCE); - return this; - } - - public GetTrainedModelsRequest includeFeatureImportanceBaseline() { - this.includes.add(FEATURE_IMPORTANCE_BASELINE); - return this; - } - - /** - * Whether to include the full model definition. - * - * The full model definition can be very large. - * @deprecated Use {@link GetTrainedModelsRequest#includeDefinition()} - * @param includeDefinition If {@code true}, the definition is included. - */ - @Deprecated - public GetTrainedModelsRequest setIncludeDefinition(Boolean includeDefinition) { - if (includeDefinition != null && includeDefinition) { - return this.includeDefinition(); - } - return this; - } - - public Boolean getDecompressDefinition() { - return decompressDefinition; - } - - /** - * Whether or not to decompress the trained model, or keep it in its compressed string form - * - * @param decompressDefinition If {@code true}, the definition is decompressed. - */ - public GetTrainedModelsRequest setDecompressDefinition(Boolean decompressDefinition) { - this.decompressDefinition = decompressDefinition; - return this; - } - - public List getTags() { - return tags; - } - - /** - * The tags that the trained model must match. These correspond to {@link TrainedModelConfig#getTags()}. - * - * The models returned will match ALL tags supplied. - * If none are provided, only the provided ids are used to find models - * @param tags The tags to match when finding models - */ - public GetTrainedModelsRequest setTags(List tags) { - this.tags = tags; - return this; - } - - /** - * See {@link GetTrainedModelsRequest#setTags(List)} - */ - public GetTrainedModelsRequest setTags(String... tags) { - return setTags(Arrays.asList(tags)); - } - - public Boolean getExcludeGenerated() { - return excludeGenerated; - } - - /** - * Setting this flag to `true` removes certain fields from the model definition on retrieval. - * - * This is useful when getting the model and wanting to put it in another cluster. - * - * Default value is false. - * @param excludeGenerated Boolean value indicating if certain fields should be removed from the mode on GET - */ - public GetTrainedModelsRequest setExcludeGenerated(Boolean excludeGenerated) { - this.excludeGenerated = excludeGenerated; - return this; - } - - @Override - public Optional validate() { - if (ids == null || ids.isEmpty()) { - return Optional.of(ValidationException.withError("trained model id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetTrainedModelsRequest other = (GetTrainedModelsRequest) o; - return Objects.equals(ids, other.ids) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(decompressDefinition, other.decompressDefinition) - && Objects.equals(includes, other.includes) - && Objects.equals(excludeGenerated, other.excludeGenerated) - && Objects.equals(pageParams, other.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(ids, allowNoMatch, pageParams, decompressDefinition, includes, excludeGenerated); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsResponse.java deleted file mode 100644 index 9fb7cf8f7fd13..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsResponse.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.inference.TrainedModelConfig; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class GetTrainedModelsResponse { - - public static final ParseField TRAINED_MODEL_CONFIGS = new ParseField("trained_model_configs"); - public static final ParseField COUNT = new ParseField("count"); - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_trained_model_configs", - true, - args -> new GetTrainedModelsResponse((List) args[0], (Long) args[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), (p, c) -> TrainedModelConfig.fromXContent(p), TRAINED_MODEL_CONFIGS); - PARSER.declareLong(constructorArg(), COUNT); - } - - public static GetTrainedModelsResponse fromXContent(final XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List trainedModels; - private final Long count; - - public GetTrainedModelsResponse(List trainedModels, Long count) { - this.trainedModels = trainedModels; - this.count = count; - } - - public List getTrainedModels() { - return trainedModels; - } - - /** - * @return The total count of the trained models that matched the ID pattern. - */ - public Long getCount() { - return count; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetTrainedModelsResponse other = (GetTrainedModelsResponse) o; - return Objects.equals(this.trainedModels, other.trainedModels) && Objects.equals(this.count, other.count); - } - - @Override - public int hashCode() { - return Objects.hash(trainedModels, count); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsRequest.java deleted file mode 100644 index 0185f531b0c68..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsRequest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.core.Nullable; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -public class GetTrainedModelsStatsRequest implements Validatable { - - public static final String ALLOW_NO_MATCH = "allow_no_match"; - - private final List ids; - private Boolean allowNoMatch; - private PageParams pageParams; - - /** - * Helper method to create a request that will get ALL TrainedModelStats - * @return new {@link GetTrainedModelsStatsRequest} object for the id "_all" - */ - public static GetTrainedModelsStatsRequest getAllTrainedModelStatsRequest() { - return new GetTrainedModelsStatsRequest("_all"); - } - - public GetTrainedModelsStatsRequest(String... ids) { - this.ids = Arrays.asList(ids); - } - - public List getIds() { - return ids; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no trained models. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any trained models - */ - public GetTrainedModelsStatsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public PageParams getPageParams() { - return pageParams; - } - - public GetTrainedModelsStatsRequest setPageParams(@Nullable PageParams pageParams) { - this.pageParams = pageParams; - return this; - } - - @Override - public Optional validate() { - if (ids == null || ids.isEmpty()) { - return Optional.of(ValidationException.withError("trained model id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetTrainedModelsStatsRequest other = (GetTrainedModelsStatsRequest) o; - return Objects.equals(ids, other.ids) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(pageParams, other.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(ids, allowNoMatch, pageParams); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsResponse.java deleted file mode 100644 index ca218657cce83..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsResponse.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.inference.TrainedModelStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class GetTrainedModelsStatsResponse { - - public static final ParseField TRAINED_MODEL_STATS = new ParseField("trained_model_stats"); - public static final ParseField COUNT = new ParseField("count"); - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_trained_model_stats", - true, - args -> new GetTrainedModelsStatsResponse((List) args[0], (Long) args[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), (p, c) -> TrainedModelStats.fromXContent(p), TRAINED_MODEL_STATS); - PARSER.declareLong(constructorArg(), COUNT); - } - - public static GetTrainedModelsStatsResponse fromXContent(final XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List trainedModelStats; - private final Long count; - - public GetTrainedModelsStatsResponse(List trainedModelStats, Long count) { - this.trainedModelStats = trainedModelStats; - this.count = count; - } - - public List getTrainedModelStats() { - return trainedModelStats; - } - - /** - * @return The total count of the trained models that matched the ID pattern. - */ - public Long getCount() { - return count; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetTrainedModelsStatsResponse other = (GetTrainedModelsStatsResponse) o; - return Objects.equals(this.trainedModelStats, other.trainedModelStats) && Objects.equals(this.count, other.count); - } - - @Override - public int hashCode() { - return Objects.hash(trainedModelStats, count); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoRequest.java deleted file mode 100644 index 6c5f1787fd183..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoRequest.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -public class MlInfoRequest implements Validatable {} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoResponse.java deleted file mode 100644 index 6fa6c6eaaf6be..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoResponse.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - -public class MlInfoResponse implements Validatable { - private final Map info; - - private MlInfoResponse(Map info) { - this.info = info; - } - - public Map getInfo() { - return info; - } - - public static MlInfoResponse fromXContent(XContentParser parser) throws IOException { - Map info = parser.map(); - return new MlInfoResponse(info); - } - - @Override - public int hashCode() { - return Objects.hash(info); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - MlInfoResponse other = (MlInfoResponse) obj; - return Objects.equals(info, other.info); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java deleted file mode 100644 index 4a732c9523415..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.Map; -import java.util.Objects; - -/** - * A Pojo class containing an Elastic Node's attributes - */ -public class NodeAttributes implements ToXContentObject { - - public static final ParseField ID = new ParseField("id"); - public static final ParseField NAME = new ParseField("name"); - public static final ParseField EPHEMERAL_ID = new ParseField("ephemeral_id"); - public static final ParseField TRANSPORT_ADDRESS = new ParseField("transport_address"); - public static final ParseField ATTRIBUTES = new ParseField("attributes"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("node", true, (a) -> { - int i = 0; - String id = (String) a[i++]; - String name = (String) a[i++]; - String ephemeralId = (String) a[i++]; - String transportAddress = (String) a[i++]; - Map attributes = (Map) a[i]; - return new NodeAttributes(id, name, ephemeralId, transportAddress, attributes); - }); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); - PARSER.declareString(ConstructingObjectParser.constructorArg(), EPHEMERAL_ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), TRANSPORT_ADDRESS); - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), ATTRIBUTES, ObjectParser.ValueType.OBJECT); - } - - private final String id; - private final String name; - private final String ephemeralId; - private final String transportAddress; - private final Map attributes; - - public NodeAttributes(String id, String name, String ephemeralId, String transportAddress, Map attributes) { - this.id = id; - this.name = name; - this.ephemeralId = ephemeralId; - this.transportAddress = transportAddress; - this.attributes = Collections.unmodifiableMap(attributes); - } - - /** - * The unique identifier of the node. - */ - public String getId() { - return id; - } - - /** - * The node name. - */ - public String getName() { - return name; - } - - /** - * The ephemeral id of the node. - */ - public String getEphemeralId() { - return ephemeralId; - } - - /** - * The host and port where transport HTTP connections are accepted. - */ - public String getTransportAddress() { - return transportAddress; - } - - /** - * Additional attributes related to this node e.g., {"ml.max_open_jobs": "10"}. - */ - public Map getAttributes() { - return attributes; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ID.getPreferredName(), id); - builder.field(NAME.getPreferredName(), name); - builder.field(EPHEMERAL_ID.getPreferredName(), ephemeralId); - builder.field(TRANSPORT_ADDRESS.getPreferredName(), transportAddress); - builder.field(ATTRIBUTES.getPreferredName(), attributes); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(id, name, ephemeralId, transportAddress, attributes); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - NodeAttributes that = (NodeAttributes) other; - return Objects.equals(id, that.id) - && Objects.equals(name, that.name) - && Objects.equals(ephemeralId, that.ephemeralId) - && Objects.equals(transportAddress, that.transportAddress) - && Objects.equals(attributes, that.attributes); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java deleted file mode 100644 index c19ff484242ad..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to open a Machine Learning Job - */ -public class OpenJobRequest implements Validatable, ToXContentObject { - - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "open_job_request", - true, - a -> new OpenJobRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString((request, val) -> request.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - } - - public static OpenJobRequest fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private String jobId; - private TimeValue timeout; - - /** - * Create a new request with the desired jobId - * - * @param jobId unique jobId, must not be null - */ - public OpenJobRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); - } - - public String getJobId() { - return jobId; - } - - /** - * The jobId to open - * - * @param jobId unique jobId, must not be null - */ - public void setJobId(String jobId) { - this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * How long to wait for job to open before timing out the request - * - * @param timeout default value of 30 minutes - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, timeout); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - OpenJobRequest that = (OpenJobRequest) other; - return Objects.equals(jobId, that.jobId) && Objects.equals(timeout, that.timeout); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobResponse.java deleted file mode 100644 index a9c6118db26d6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobResponse.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Job is now opened or not - */ -public class OpenJobResponse implements ToXContentObject { - - private static final ParseField OPENED = new ParseField("opened"); - private static final ParseField NODE = new ParseField("node"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "open_job_response", - true, - (a) -> new OpenJobResponse((Boolean) a[0], (String) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), OPENED); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NODE); - } - - private final boolean opened; - private final String node; - - OpenJobResponse(boolean opened, String node) { - this.opened = opened; - this.node = node; - } - - public static OpenJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Has the job opened or not - * - * @return boolean value indicating the job opened status - */ - public boolean isOpened() { - return opened; - } - - /** - * The node that the job was assigned to - * - * @return The ID of a node if the job was assigned to a node. If an empty string is returned - * it means the job was allowed to open lazily and has not yet been assigned to a node. - * If null is returned it means the server version is too old to return node - * information. - */ - public String getNode() { - return node; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - OpenJobResponse that = (OpenJobResponse) other; - return opened == that.opened && Objects.equals(node, that.node); - } - - @Override - public int hashCode() { - return Objects.hash(opened, node); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(OPENED.getPreferredName(), opened); - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java deleted file mode 100644 index 0752221e9eee0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.client.ml.calendars.ScheduledEvent; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * Request to add a ScheduledEvent to a Machine Learning calendar - */ -public class PostCalendarEventRequest implements Validatable, ToXContentObject { - - private final String calendarId; - private final List scheduledEvents; - - public static final String INCLUDE_CALENDAR_ID_KEY = "include_calendar_id"; - public static final ParseField EVENTS = new ParseField("events"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "post_calendar_event_request", - a -> new PostCalendarEventRequest((String) a[0], (List) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Calendar.ID); - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> ScheduledEvent.PARSER.apply(p, null), EVENTS); - } - public static final MapParams EXCLUDE_CALENDAR_ID_PARAMS = new MapParams( - Collections.singletonMap(INCLUDE_CALENDAR_ID_KEY, Boolean.toString(false)) - ); - - /** - * Create a new PostCalendarEventRequest with an existing non-null calendarId and a list of Scheduled events - * - * @param calendarId The ID of the calendar, must be non-null - * @param scheduledEvents The non-null, non-empty, list of {@link ScheduledEvent} objects to add to the calendar - */ - public PostCalendarEventRequest(String calendarId, List scheduledEvents) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - this.scheduledEvents = Objects.requireNonNull(scheduledEvents, "[events] must not be null."); - if (scheduledEvents.isEmpty()) { - throw new IllegalArgumentException("At least 1 event is required"); - } - } - - public String getCalendarId() { - return calendarId; - } - - public List getScheduledEvents() { - return scheduledEvents; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (params.paramAsBoolean(INCLUDE_CALENDAR_ID_KEY, true)) { - builder.field(Calendar.ID.getPreferredName(), calendarId); - } - builder.field(EVENTS.getPreferredName(), scheduledEvents); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendarId, scheduledEvents); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - PostCalendarEventRequest other = (PostCalendarEventRequest) obj; - return Objects.equals(calendarId, other.calendarId) && Objects.equals(scheduledEvents, other.scheduledEvents); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventResponse.java deleted file mode 100644 index 4aeb8da98f260..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventResponse.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.calendars.ScheduledEvent; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * Response to adding ScheduledEvent(s) to a Machine Learning calendar - */ -public class PostCalendarEventResponse implements ToXContentObject { - - private final List scheduledEvents; - public static final ParseField EVENTS = new ParseField("events"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "post_calendar_event_response", - true, - a -> new PostCalendarEventResponse((List) a[0]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> ScheduledEvent.PARSER.apply(p, null), EVENTS); - } - - public static PostCalendarEventResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Create a new PostCalendarEventResponse containing the scheduled Events - * - * @param scheduledEvents The list of {@link ScheduledEvent} objects - */ - public PostCalendarEventResponse(List scheduledEvents) { - this.scheduledEvents = scheduledEvents; - } - - public List getScheduledEvents() { - return scheduledEvents; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(EVENTS.getPreferredName(), scheduledEvents); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(scheduledEvents); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - PostCalendarEventResponse other = (PostCalendarEventResponse) obj; - return Objects.equals(scheduledEvents, other.scheduledEvents); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java deleted file mode 100644 index 5918f15c412c4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentType; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * Request to post data to a Machine Learning job - */ -public class PostDataRequest implements Validatable, ToXContentObject { - - public static final ParseField RESET_START = new ParseField("reset_start"); - public static final ParseField RESET_END = new ParseField("reset_end"); - public static final ParseField CONTENT_TYPE = new ParseField("content_type"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "post_data_request", - (a) -> new PostDataRequest((String) a[0], XContentType.fromMediaType((String) a[1]), new byte[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), CONTENT_TYPE); - PARSER.declareStringOrNull(PostDataRequest::setResetEnd, RESET_END); - PARSER.declareStringOrNull(PostDataRequest::setResetStart, RESET_START); - } - - private final String jobId; - private final XContentType xContentType; - private final BytesReference content; - private String resetStart; - private String resetEnd; - - /** - * Create a new PostDataRequest object - * - * @param jobId non-null jobId of the job to post data to - * @param xContentType content type of the data to post. Only {@link XContentType#JSON} or {@link XContentType#SMILE} are supported - * @param content bulk serialized content in the format of the passed {@link XContentType} - */ - public PostDataRequest(String jobId, XContentType xContentType, BytesReference content) { - this.jobId = Objects.requireNonNull(jobId, "job_id must not be null"); - this.xContentType = Objects.requireNonNull(xContentType, "content_type must not be null"); - this.content = Objects.requireNonNull(content, "content must not be null"); - } - - /** - * Create a new PostDataRequest object referencing the passed {@code byte[]} content - * - * @param jobId non-null jobId of the job to post data to - * @param xContentType content type of the data to post. Only {@link XContentType#JSON} or {@link XContentType#SMILE} are supported - * @param content bulk serialized content in the format of the passed {@link XContentType} - */ - public PostDataRequest(String jobId, XContentType xContentType, byte[] content) { - this(jobId, xContentType, new BytesArray(content)); - } - - /** - * Create a new PostDataRequest object referencing the passed {@link JsonBuilder} object - * - * @param jobId non-null jobId of the job to post data to - * @param builder {@link JsonBuilder} object containing documents to be serialized and sent in {@link XContentType#JSON} format - */ - public PostDataRequest(String jobId, JsonBuilder builder) { - this(jobId, XContentType.JSON, builder.build()); - } - - public String getJobId() { - return jobId; - } - - public String getResetStart() { - return resetStart; - } - - /** - * Specifies the start of the bucket resetting range - * - * @param resetStart String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string - */ - public void setResetStart(String resetStart) { - this.resetStart = resetStart; - } - - public String getResetEnd() { - return resetEnd; - } - - /** - * Specifies the end of the bucket resetting range - * - * @param resetEnd String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string - */ - public void setResetEnd(String resetEnd) { - this.resetEnd = resetEnd; - } - - public BytesReference getContent() { - return content; - } - - public XContentType getXContentType() { - return xContentType; - } - - @Override - public int hashCode() { - // We leave out the content for server side parity - return Objects.hash(jobId, resetStart, resetEnd, xContentType); - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - // We leave out the content for server side parity - PostDataRequest other = (PostDataRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(resetStart, other.resetStart) - && Objects.equals(resetEnd, other.resetEnd) - && Objects.equals(xContentType, other.xContentType); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(CONTENT_TYPE.getPreferredName(), xContentType.mediaType()); - if (resetEnd != null) { - builder.field(RESET_END.getPreferredName(), resetEnd); - } - if (resetStart != null) { - builder.field(RESET_START.getPreferredName(), resetStart); - } - builder.endObject(); - return builder; - } - - /** - * Class for incrementally building a bulk document request in {@link XContentType#JSON} format - */ - public static class JsonBuilder { - - private final List bytes = new ArrayList<>(); - - /** - * Add a document via a {@code byte[]} array - * - * @param doc {@code byte[]} array of a serialized JSON object - */ - public JsonBuilder addDoc(byte[] doc) { - bytes.add(ByteBuffer.wrap(doc)); - return this; - } - - /** - * Add a document via a serialized JSON String - * - * @param doc a serialized JSON String - */ - public JsonBuilder addDoc(String doc) { - bytes.add(ByteBuffer.wrap(doc.getBytes(StandardCharsets.UTF_8))); - return this; - } - - /** - * Add a document via an object map - * - * @param doc document object to add to bulk request - * @throws IOException on parsing/serialization errors - */ - public JsonBuilder addDoc(Map doc) throws IOException { - try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { - builder.map(doc); - bytes.add(ByteBuffer.wrap(BytesReference.toBytes(BytesReference.bytes(builder)))); - } - return this; - } - - private BytesReference build() { - ByteBuffer[] buffers = bytes.toArray(new ByteBuffer[bytes.size()]); - return BytesReference.fromByteBuffers(buffers); - } - - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java deleted file mode 100644 index 4d8c8886fd896..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.process.DataCounts; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response object when posting data to a Machine Learning Job - */ -public class PostDataResponse implements ToXContentObject { - - private DataCounts dataCounts; - - public static PostDataResponse fromXContent(XContentParser parser) throws IOException { - return new PostDataResponse(DataCounts.PARSER.parse(parser, null)); - } - - public PostDataResponse(DataCounts counts) { - this.dataCounts = counts; - } - - public DataCounts getDataCounts() { - return dataCounts; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return dataCounts.toXContent(builder, params); - } - - @Override - public int hashCode() { - return Objects.hashCode(dataCounts); - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - PostDataResponse other = (PostDataResponse) obj; - return Objects.equals(dataCounts, other.dataCounts); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java deleted file mode 100644 index 8bd277fa31efc..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to preview a MachineLearning Datafeed - */ -public class PreviewDatafeedRequest implements Validatable, ToXContentObject { - - private static final ParseField DATAFEED_CONFIG = new ParseField("datafeed_config"); - private static final ParseField JOB_CONFIG = new ParseField("job_config"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "preview_datafeed_request", - a -> new PreviewDatafeedRequest((String) a[0], (DatafeedConfig.Builder) a[1], (Job.Builder) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), DatafeedConfig.ID); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), DatafeedConfig.PARSER, DATAFEED_CONFIG); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), Job.PARSER, JOB_CONFIG); - } - - public static PreviewDatafeedRequest fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final String datafeedId; - private final DatafeedConfig datafeedConfig; - private final Job jobConfig; - - private PreviewDatafeedRequest( - @Nullable String datafeedId, - @Nullable DatafeedConfig.Builder datafeedConfig, - @Nullable Job.Builder jobConfig - ) { - this.datafeedId = datafeedId; - this.datafeedConfig = datafeedConfig == null ? null : datafeedConfig.build(); - this.jobConfig = jobConfig == null ? null : jobConfig.build(); - } - - /** - * Create a new request with the desired datafeedId - * - * @param datafeedId unique datafeedId, must not be null - */ - public PreviewDatafeedRequest(String datafeedId) { - this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null"); - this.datafeedConfig = null; - this.jobConfig = null; - } - - /** - * Create a new request to preview the provided datafeed config and optional job config - * @param datafeedConfig The datafeed to preview - * @param jobConfig The associated job config (required if the datafeed does not refer to an existing job) - */ - public PreviewDatafeedRequest(DatafeedConfig datafeedConfig, Job jobConfig) { - this.datafeedId = null; - this.datafeedConfig = datafeedConfig; - this.jobConfig = jobConfig; - } - - public String getDatafeedId() { - return datafeedId; - } - - public DatafeedConfig getDatafeedConfig() { - return datafeedConfig; - } - - public Job getJobConfig() { - return jobConfig; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (datafeedId != null) { - builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); - } - if (datafeedConfig != null) { - builder.field(DATAFEED_CONFIG.getPreferredName(), datafeedConfig); - } - if (jobConfig != null) { - builder.field(JOB_CONFIG.getPreferredName(), jobConfig); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, datafeedConfig, jobConfig); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - PreviewDatafeedRequest that = (PreviewDatafeedRequest) other; - return Objects.equals(datafeedId, that.datafeedId) - && Objects.equals(datafeedConfig, that.datafeedConfig) - && Objects.equals(jobConfig, that.jobConfig); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedResponse.java deleted file mode 100644 index 44ed4e40cd165..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedResponse.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xcontent.DeprecationHandler; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -/** - * Response containing a datafeed preview in JSON format - */ -public class PreviewDatafeedResponse implements ToXContentObject { - - private BytesReference preview; - - public static PreviewDatafeedResponse fromXContent(XContentParser parser) throws IOException { - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - parser.nextToken(); - builder.copyCurrentStructure(parser); - return new PreviewDatafeedResponse(BytesReference.bytes(builder)); - } - } - - public PreviewDatafeedResponse(BytesReference preview) { - this.preview = preview; - } - - public BytesReference getPreview() { - return preview; - } - - /** - * Parses the preview to a list of {@link Map} objects - * @return List of previewed data - * @throws IOException If there is a parsing issue with the {@link BytesReference} - * @throws java.lang.ClassCastException If casting the raw {@link Object} entries to a {@link Map} fails - */ - @SuppressWarnings("unchecked") - public List> getDataList() throws IOException { - try ( - StreamInput streamInput = preview.streamInput(); - XContentParser parser = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, streamInput) - ) { - XContentParser.Token token = parser.nextToken(); - if (token == XContentParser.Token.START_ARRAY) { - return parser.listOrderedMap().stream().map(obj -> (Map) obj).collect(Collectors.toList()); - } else { - return Collections.singletonList(parser.mapOrdered()); - } - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - try (InputStream stream = preview.streamInput()) { - builder.rawValue(stream, XContentType.JSON); - } - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(preview); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - PreviewDatafeedResponse other = (PreviewDatafeedResponse) obj; - return Objects.equals(preview, other.preview); - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java deleted file mode 100644 index cba01a764f6ca..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.security.InvalidParameterException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request class for adding Machine Learning Jobs to an existing calendar - */ -public class PutCalendarJobRequest implements Validatable { - - private final List jobIds; - private final String calendarId; - - /** - * Create a new request referencing an existing Calendar and which JobIds to add - * to it. - * - * @param calendarId The non-null ID of the calendar - * @param jobIds JobIds to add to the calendar, cannot be empty, or contain null values. - * It can be a list of jobs or groups. - */ - public PutCalendarJobRequest(String calendarId, String... jobIds) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - if (jobIds.length == 0) { - throw new InvalidParameterException("jobIds must not be empty."); - } - if (Arrays.stream(jobIds).anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values."); - } - this.jobIds = Arrays.asList(jobIds); - } - - public List getJobIds() { - return jobIds; - } - - public String getCalendarId() { - return calendarId; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, calendarId); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - PutCalendarJobRequest that = (PutCalendarJobRequest) other; - return Objects.equals(jobIds, that.jobIds) && Objects.equals(calendarId, that.calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java deleted file mode 100644 index 7a45bc3163732..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to create a new Machine Learning calendar - */ -public class PutCalendarRequest implements Validatable, ToXContentObject { - - private final Calendar calendar; - - public PutCalendarRequest(Calendar calendar) { - this.calendar = calendar; - } - - public Calendar getCalendar() { - return calendar; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - calendar.toXContent(builder, params); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendar); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - PutCalendarRequest other = (PutCalendarRequest) obj; - return Objects.equals(calendar, other.calendar); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarResponse.java deleted file mode 100644 index 3e3170a954815..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarResponse.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class PutCalendarResponse implements ToXContentObject { - - public static PutCalendarResponse fromXContent(XContentParser parser) throws IOException { - return new PutCalendarResponse(Calendar.PARSER.parse(parser, null)); - } - - private final Calendar calendar; - - PutCalendarResponse(Calendar calendar) { - this.calendar = calendar; - } - - public Calendar getCalendar() { - return calendar; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - calendar.toXContent(builder, params); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendar); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - PutCalendarResponse other = (PutCalendarResponse) obj; - return Objects.equals(calendar, other.calendar); - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java deleted file mode 100644 index 33015ed97bf97..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; -import java.util.Optional; - -public class PutDataFrameAnalyticsRequest implements ToXContentObject, Validatable { - - private final DataFrameAnalyticsConfig config; - - public PutDataFrameAnalyticsRequest(DataFrameAnalyticsConfig config) { - this.config = config; - } - - public DataFrameAnalyticsConfig getConfig() { - return config; - } - - @Override - public Optional validate() { - if (config == null) { - return Optional.of(ValidationException.withError("put requires a non-null data frame analytics config")); - } - return Optional.empty(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return config.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - PutDataFrameAnalyticsRequest other = (PutDataFrameAnalyticsRequest) o; - return Objects.equals(config, other.config); - } - - @Override - public int hashCode() { - return Objects.hash(config); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsResponse.java deleted file mode 100644 index 7387de559c256..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class PutDataFrameAnalyticsResponse { - - public static PutDataFrameAnalyticsResponse fromXContent(XContentParser parser) throws IOException { - return new PutDataFrameAnalyticsResponse(DataFrameAnalyticsConfig.fromXContent(parser)); - } - - private final DataFrameAnalyticsConfig config; - - public PutDataFrameAnalyticsResponse(DataFrameAnalyticsConfig config) { - this.config = config; - } - - public DataFrameAnalyticsConfig getConfig() { - return config; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - PutDataFrameAnalyticsResponse other = (PutDataFrameAnalyticsResponse) o; - return Objects.equals(config, other.config); - } - - @Override - public int hashCode() { - return Objects.hash(config); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java deleted file mode 100644 index d079f1b0fc8d6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to create a new Machine Learning Datafeed given a {@link DatafeedConfig} configuration - */ -public class PutDatafeedRequest implements Validatable, ToXContentObject { - - private final DatafeedConfig datafeed; - - /** - * Construct a new PutDatafeedRequest - * - * @param datafeed a {@link DatafeedConfig} configuration to create - */ - public PutDatafeedRequest(DatafeedConfig datafeed) { - this.datafeed = datafeed; - } - - public DatafeedConfig getDatafeed() { - return datafeed; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return datafeed.toXContent(builder, params); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PutDatafeedRequest request = (PutDatafeedRequest) object; - return Objects.equals(datafeed, request.datafeed); - } - - @Override - public int hashCode() { - return Objects.hash(datafeed); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedResponse.java deleted file mode 100644 index 6abaf8deb4be3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedResponse.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response containing the newly created {@link DatafeedConfig} - */ -public class PutDatafeedResponse implements ToXContentObject { - - private DatafeedConfig datafeed; - - public static PutDatafeedResponse fromXContent(XContentParser parser) throws IOException { - return new PutDatafeedResponse(DatafeedConfig.PARSER.parse(parser, null).build()); - } - - PutDatafeedResponse(DatafeedConfig datafeed) { - this.datafeed = datafeed; - } - - public DatafeedConfig getResponse() { - return datafeed; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - datafeed.toXContent(builder, params); - return builder; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - PutDatafeedResponse response = (PutDatafeedResponse) object; - return Objects.equals(datafeed, response.datafeed); - } - - @Override - public int hashCode() { - return Objects.hash(datafeed); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java deleted file mode 100644 index dd08f7a96c9b0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to create a new Machine Learning MlFilter given a {@link MlFilter} configuration - */ -public class PutFilterRequest implements Validatable, ToXContentObject { - - private final MlFilter filter; - - /** - * Construct a new PutMlFilterRequest - * - * @param filter a {@link MlFilter} configuration to create - */ - public PutFilterRequest(MlFilter filter) { - this.filter = filter; - } - - public MlFilter getMlFilter() { - return filter; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return filter.toXContent(builder, params); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PutFilterRequest request = (PutFilterRequest) object; - return Objects.equals(filter, request.filter); - } - - @Override - public int hashCode() { - return Objects.hash(filter); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterResponse.java deleted file mode 100644 index 48a850be7d228..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterResponse.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response containing the newly created {@link MlFilter} - */ -public class PutFilterResponse implements ToXContentObject { - - private MlFilter filter; - - public static PutFilterResponse fromXContent(XContentParser parser) throws IOException { - return new PutFilterResponse(MlFilter.PARSER.parse(parser, null).build()); - } - - PutFilterResponse(MlFilter filter) { - this.filter = filter; - } - - public MlFilter getResponse() { - return filter; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - filter.toXContent(builder, params); - return builder; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - PutFilterResponse response = (PutFilterResponse) object; - return Objects.equals(filter, response.filter); - } - - @Override - public int hashCode() { - return Objects.hash(filter); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java deleted file mode 100644 index 04bfc4f3f9169..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to create a new Machine Learning Job given a {@link Job} configuration - */ -public class PutJobRequest implements Validatable, ToXContentObject { - - private final Job job; - - /** - * Construct a new PutJobRequest - * - * @param job a {@link Job} configuration to create - */ - public PutJobRequest(Job job) { - this.job = job; - } - - public Job getJob() { - return job; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return job.toXContent(builder, params); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PutJobRequest request = (PutJobRequest) object; - return Objects.equals(job, request.job); - } - - @Override - public int hashCode() { - return Objects.hash(job); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobResponse.java deleted file mode 100644 index 532a6f54ba30a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobResponse.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response containing the newly created {@link Job} - */ -public class PutJobResponse implements ToXContentObject { - - private Job job; - - public static PutJobResponse fromXContent(XContentParser parser) throws IOException { - return new PutJobResponse(Job.PARSER.parse(parser, null).build()); - } - - PutJobResponse(Job job) { - this.job = job; - } - - public Job getResponse() { - return job; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - job.toXContent(builder, params); - return builder; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - PutJobResponse response = (PutJobResponse) object; - return Objects.equals(job, response.job); - } - - @Override - public int hashCode() { - return Objects.hash(job); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelAliasRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelAliasRequest.java deleted file mode 100644 index 7988ae35f1c6a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelAliasRequest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -public class PutTrainedModelAliasRequest implements Validatable { - - public static final String REASSIGN = "reassign"; - - private final String modelAlias; - private final String modelId; - private final Boolean reassign; - - public PutTrainedModelAliasRequest(String modelAlias, String modelId, Boolean reassign) { - this.modelAlias = Objects.requireNonNull(modelAlias); - this.modelId = Objects.requireNonNull(modelId); - this.reassign = reassign; - } - - public String getModelAlias() { - return modelAlias; - } - - public String getModelId() { - return modelId; - } - - public Boolean getReassign() { - return reassign; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PutTrainedModelAliasRequest request = (PutTrainedModelAliasRequest) o; - return Objects.equals(modelAlias, request.modelAlias) - && Objects.equals(modelId, request.modelId) - && Objects.equals(reassign, request.reassign); - } - - @Override - public int hashCode() { - return Objects.hash(modelAlias, modelId, reassign); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelRequest.java deleted file mode 100644 index 5276713c921be..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelRequest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.inference.TrainedModelConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class PutTrainedModelRequest implements Validatable, ToXContentObject { - - private final TrainedModelConfig config; - - public PutTrainedModelRequest(TrainedModelConfig config) { - this.config = config; - } - - public TrainedModelConfig getTrainedModelConfig() { - return config; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return config.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PutTrainedModelRequest request = (PutTrainedModelRequest) o; - return Objects.equals(config, request.config); - } - - @Override - public int hashCode() { - return Objects.hash(config); - } - - @Override - public final String toString() { - return Strings.toString(config); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelResponse.java deleted file mode 100644 index dabcc7d24cc0f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelResponse.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.inference.TrainedModelConfig; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class PutTrainedModelResponse implements ToXContentObject { - - private final TrainedModelConfig trainedModelConfig; - - public static PutTrainedModelResponse fromXContent(XContentParser parser) throws IOException { - return new PutTrainedModelResponse(TrainedModelConfig.PARSER.parse(parser, null).build()); - } - - public PutTrainedModelResponse(TrainedModelConfig trainedModelConfig) { - this.trainedModelConfig = trainedModelConfig; - } - - public TrainedModelConfig getResponse() { - return trainedModelConfig; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return trainedModelConfig.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PutTrainedModelResponse response = (PutTrainedModelResponse) o; - return Objects.equals(trainedModelConfig, response.trainedModelConfig); - } - - @Override - public int hashCode() { - return Objects.hash(trainedModelConfig); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java deleted file mode 100644 index 0295d72b7d9c5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to revert to a specific model snapshot for a given job - */ -public class RevertModelSnapshotRequest implements Validatable, ToXContentObject { - - public static final ParseField DELETE_INTERVENING = new ParseField("delete_intervening_results"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "revert_model_snapshots_request", - a -> new RevertModelSnapshotRequest((String) a[0], (String) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), ModelSnapshot.SNAPSHOT_ID); - PARSER.declareBoolean(RevertModelSnapshotRequest::setDeleteInterveningResults, DELETE_INTERVENING); - } - - private final String jobId; - private final String snapshotId; - private Boolean deleteInterveningResults; - - /** - * Constructs a request to revert to a given model snapshot - * @param jobId id of the job for which to revert the model snapshot - * @param snapshotId id of the snapshot to which to revert - */ - public RevertModelSnapshotRequest(String jobId, String snapshotId) { - this.jobId = Objects.requireNonNull(jobId, "[" + Job.ID + "] must not be null"); - this.snapshotId = Objects.requireNonNull(snapshotId, "[" + ModelSnapshot.SNAPSHOT_ID + "] must not be null"); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - public Boolean getDeleteInterveningResults() { - return deleteInterveningResults; - } - - /** - * Sets the request flag that indicates whether or not intervening results should be deleted. - * @param deleteInterveningResults Flag that indicates whether or not intervening results should be deleted. - */ - public void setDeleteInterveningResults(Boolean deleteInterveningResults) { - this.deleteInterveningResults = deleteInterveningResults; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(ModelSnapshot.SNAPSHOT_ID.getPreferredName(), snapshotId); - if (deleteInterveningResults != null) { - builder.field(DELETE_INTERVENING.getPreferredName(), deleteInterveningResults); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - RevertModelSnapshotRequest request = (RevertModelSnapshotRequest) obj; - return Objects.equals(jobId, request.jobId) - && Objects.equals(snapshotId, request.snapshotId) - && Objects.equals(deleteInterveningResults, request.deleteInterveningResults); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId, deleteInterveningResults); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotResponse.java deleted file mode 100644 index 6110569ac9197..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotResponse.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * A response containing the reverted model snapshot - */ -public class RevertModelSnapshotResponse implements ToXContentObject { - - private static final ParseField MODEL = new ParseField("model"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "revert_model_snapshot_response", - true, - a -> new RevertModelSnapshotResponse((ModelSnapshot.Builder) a[0]) - ); - - static { - PARSER.declareObject(ConstructingObjectParser.constructorArg(), ModelSnapshot.PARSER, MODEL); - } - - public static RevertModelSnapshotResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - public RevertModelSnapshotResponse(ModelSnapshot.Builder modelSnapshot) { - this.model = modelSnapshot.build(); - } - - private final ModelSnapshot model; - - /** - * Get full information about the reverted model snapshot - * @return the reverted model snapshot. - */ - public ModelSnapshot getModel() { - return model; - } - - @Override - public int hashCode() { - return Objects.hash(model); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - RevertModelSnapshotResponse other = (RevertModelSnapshotResponse) obj; - return Objects.equals(model, other.model); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (model != null) { - builder.field(MODEL.getPreferredName(), model); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java deleted file mode 100644 index 7e03117fd13d4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ParseField; - -import java.util.Objects; - -/** - * Sets ML into upgrade_mode - */ -public class SetUpgradeModeRequest implements Validatable { - - public static final ParseField ENABLED = new ParseField("enabled"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - - private boolean enabled; - private TimeValue timeout; - - /** - * Create a new request - * - * @param enabled whether to enable `upgrade_mode` or not - */ - public SetUpgradeModeRequest(boolean enabled) { - this.enabled = enabled; - } - - public boolean isEnabled() { - return enabled; - } - - public void setEnabled(boolean enabled) { - this.enabled = enabled; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * How long to wait for the request to be completed - * - * @param timeout default value of 30 seconds - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - @Override - public int hashCode() { - return Objects.hash(enabled, timeout); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - SetUpgradeModeRequest that = (SetUpgradeModeRequest) other; - return Objects.equals(enabled, that.enabled) && Objects.equals(timeout, that.timeout); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsRequest.java deleted file mode 100644 index 0bb09846e7047..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; - -import java.util.Objects; -import java.util.Optional; - -public class StartDataFrameAnalyticsRequest implements Validatable { - - private final String id; - private TimeValue timeout; - - public StartDataFrameAnalyticsRequest(String id) { - this.id = id; - } - - public String getId() { - return id; - } - - public TimeValue getTimeout() { - return timeout; - } - - public StartDataFrameAnalyticsRequest setTimeout(@Nullable TimeValue timeout) { - this.timeout = timeout; - return this; - } - - @Override - public Optional validate() { - if (id == null) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - StartDataFrameAnalyticsRequest other = (StartDataFrameAnalyticsRequest) o; - return Objects.equals(id, other.id) && Objects.equals(timeout, other.timeout); - } - - @Override - public int hashCode() { - return Objects.hash(id, timeout); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsResponse.java deleted file mode 100644 index a158ad9eae705..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Datafeed is now started or not - */ -public class StartDataFrameAnalyticsResponse extends AcknowledgedResponse { - - private static final ParseField NODE = new ParseField("node"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "start_data_frame_analytics_response", - true, - (a) -> new StartDataFrameAnalyticsResponse((Boolean) a[0], (String) a[1]) - ); - - static { - declareAcknowledgedField(PARSER); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NODE); - } - - private final String node; - - public StartDataFrameAnalyticsResponse(boolean acknowledged, String node) { - super(acknowledged); - this.node = node; - } - - public static StartDataFrameAnalyticsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * The node that the job was assigned to - * - * @return The ID of a node if the job was assigned to a node. If an empty string is returned - * it means the job was allowed to open lazily and has not yet been assigned to a node. - * If null is returned it means the server version is too old to return node - * information. - */ - public String getNode() { - return node; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - StartDataFrameAnalyticsResponse that = (StartDataFrameAnalyticsResponse) other; - return isAcknowledged() == that.isAcknowledged() && Objects.equals(node, that.node); - } - - @Override - public int hashCode() { - return Objects.hash(isAcknowledged(), node); - } - - @Override - public void addCustomFields(XContentBuilder builder, Params params) throws IOException { - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java deleted file mode 100644 index 48a9d41192215..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to start a Datafeed - */ -public class StartDatafeedRequest implements Validatable, ToXContentObject { - - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "start_datafeed_request", - a -> new StartDatafeedRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID); - PARSER.declareString(StartDatafeedRequest::setStart, START); - PARSER.declareString(StartDatafeedRequest::setEnd, END); - PARSER.declareString((params, val) -> params.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - } - - private final String datafeedId; - private String start; - private String end; - private TimeValue timeout; - - /** - * Create a new StartDatafeedRequest for the given DatafeedId - * - * @param datafeedId non-null existing Datafeed ID - */ - public StartDatafeedRequest(String datafeedId) { - this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null"); - } - - public String getDatafeedId() { - return datafeedId; - } - - public String getStart() { - return start; - } - - /** - * The time that the datafeed should begin. This value is inclusive. - * - * If you specify a start value that is earlier than the timestamp of the latest processed record, - * the datafeed continues from 1 millisecond after the timestamp of the latest processed record. - * - * If you do not specify a start time and the datafeed is associated with a new job, - * the analysis starts from the earliest time for which data is available. - * - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * The time that the datafeed should end. This value is exclusive. - * If you do not specify an end time, the datafeed runs continuously. - * - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string - */ - public void setEnd(String end) { - this.end = end; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * Indicates how long to wait for the cluster to respond to the request. - * - * @param timeout TimeValue for how long to wait for a response from the cluster - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, start, end, timeout); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || obj.getClass() != getClass()) { - return false; - } - - StartDatafeedRequest other = (StartDatafeedRequest) obj; - return Objects.equals(datafeedId, other.datafeedId) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(timeout, other.timeout); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedResponse.java deleted file mode 100644 index 25417797bb6ba..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedResponse.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Datafeed is now started or not - */ -public class StartDatafeedResponse implements ToXContentObject { - - private static final ParseField STARTED = new ParseField("started"); - private static final ParseField NODE = new ParseField("node"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "start_datafeed_response", - true, - (a) -> new StartDatafeedResponse((Boolean) a[0], (String) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), STARTED); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NODE); - } - - private final boolean started; - private final String node; - - public StartDatafeedResponse(boolean started, String node) { - this.started = started; - this.node = node; - } - - public static StartDatafeedResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Has the Datafeed started or not - * - * @return boolean value indicating the Datafeed started status - */ - public boolean isStarted() { - return started; - } - - /** - * The node that the datafeed was assigned to - * - * @return The ID of a node if the datafeed was assigned to a node. If an empty string is returned - * it means the datafeed was allowed to open lazily and has not yet been assigned to a node. - * If null is returned it means the server version is too old to return node - * information. - */ - public String getNode() { - return node; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - StartDatafeedResponse that = (StartDatafeedResponse) other; - return started == that.started && Objects.equals(node, that.node); - } - - @Override - public int hashCode() { - return Objects.hash(started, node); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(STARTED.getPreferredName(), started); - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsRequest.java deleted file mode 100644 index c8263bed50fac..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ParseField; - -import java.util.Objects; -import java.util.Optional; - -public class StopDataFrameAnalyticsRequest implements Validatable { - - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - public static final ParseField FORCE = new ParseField("force"); - - private final String id; - private Boolean allowNoMatch; - private Boolean force; - private TimeValue timeout; - - public StopDataFrameAnalyticsRequest(String id) { - this.id = id; - } - - public String getId() { - return id; - } - - public TimeValue getTimeout() { - return timeout; - } - - public StopDataFrameAnalyticsRequest setTimeout(@Nullable TimeValue timeout) { - this.timeout = timeout; - return this; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - public StopDataFrameAnalyticsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public Boolean getForce() { - return force; - } - - public StopDataFrameAnalyticsRequest setForce(boolean force) { - this.force = force; - return this; - } - - @Override - public Optional validate() { - if (id == null) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - StopDataFrameAnalyticsRequest other = (StopDataFrameAnalyticsRequest) o; - return Objects.equals(id, other.id) - && Objects.equals(timeout, other.timeout) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(force, other.force); - } - - @Override - public int hashCode() { - return Objects.hash(id, timeout, allowNoMatch, force); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsResponse.java deleted file mode 100644 index 9c4dc1d67be5c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Data Frame Analytics is now stopped or not - */ -public class StopDataFrameAnalyticsResponse implements ToXContentObject { - - private static final ParseField STOPPED = new ParseField("stopped"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "stop_data_frame_analytics_response", - true, - args -> new StopDataFrameAnalyticsResponse((Boolean) args[0]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), STOPPED); - } - - public static StopDataFrameAnalyticsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final boolean stopped; - - public StopDataFrameAnalyticsResponse(boolean stopped) { - this.stopped = stopped; - } - - /** - * Has the Data Frame Analytics stopped or not - * - * @return boolean value indicating the Data Frame Analytics stopped status - */ - public boolean isStopped() { - return stopped; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - StopDataFrameAnalyticsResponse other = (StopDataFrameAnalyticsResponse) o; - return stopped == other.stopped; - } - - @Override - public int hashCode() { - return Objects.hash(stopped); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject().field(STOPPED.getPreferredName(), stopped).endObject(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java deleted file mode 100644 index 9c62b0a4d2bf4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.security.InvalidParameterException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request to stop Machine Learning Datafeeds - */ -public class StopDatafeedRequest implements Validatable, ToXContentObject { - - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ParseField FORCE = new ParseField("force"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "stop_datafeed_request", - a -> new StopDatafeedRequest((List) a[0]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), - DatafeedConfig.ID, - ObjectParser.ValueType.STRING_ARRAY - ); - PARSER.declareString((obj, val) -> obj.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - PARSER.declareBoolean(StopDatafeedRequest::setForce, FORCE); - PARSER.declareBoolean(StopDatafeedRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private static final String ALL_DATAFEEDS = "_all"; - - private final List datafeedIds; - private TimeValue timeout; - private Boolean force; - private Boolean allowNoMatch; - - /** - * Explicitly stop all datafeeds - * - * @return a {@link StopDatafeedRequest} for all existing datafeeds - */ - public static StopDatafeedRequest stopAllDatafeedsRequest() { - return new StopDatafeedRequest(ALL_DATAFEEDS); - } - - StopDatafeedRequest(List datafeedIds) { - if (datafeedIds.isEmpty()) { - throw new InvalidParameterException("datafeedIds must not be empty"); - } - if (datafeedIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("datafeedIds must not contain null values"); - } - this.datafeedIds = new ArrayList<>(datafeedIds); - } - - /** - * Close the specified Datafeeds via their unique datafeedIds - * - * @param datafeedIds must be non-null and non-empty and each datafeedId must be non-null - */ - public StopDatafeedRequest(String... datafeedIds) { - this(Arrays.asList(datafeedIds)); - } - - /** - * All the datafeedIds to be stopped - */ - public List getDatafeedIds() { - return datafeedIds; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * How long to wait for the stop request to complete before timing out. - * - * @param timeout Default value: 30 minutes - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - public Boolean getForce() { - return force; - } - - /** - * Should the stopping be forced. - * - * Use to forcefully stop a datafeed - * - * @param force When {@code true} forcefully stop the datafeed. Defaults to {@code false} - */ - public void setForce(boolean force) { - this.force = force; - } - - public Boolean getAllowNoMatch() { - return this.allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no datafeeds. - * - * This includes {@code _all} string. - * - * @param allowNoMatch When {@code true} ignore if wildcard or {@code _all} matches no datafeeds. Defaults to {@code true} - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedIds, timeout, force, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - StopDatafeedRequest that = (StopDatafeedRequest) other; - return Objects.equals(datafeedIds, that.datafeedIds) - && Objects.equals(timeout, that.timeout) - && Objects.equals(force, that.force) - && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(datafeedIds)); - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - if (force != null) { - builder.field(FORCE.getPreferredName(), force); - } - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedResponse.java deleted file mode 100644 index 864b9ea6d4127..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedResponse.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Datafeed is now stopped or not - */ -public class StopDatafeedResponse implements ToXContentObject { - - private static final ParseField STOPPED = new ParseField("stopped"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "stop_datafeed_response", - true, - (a) -> new StopDatafeedResponse((Boolean) a[0]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), STOPPED); - } - - private final boolean stopped; - - public StopDatafeedResponse(boolean stopped) { - this.stopped = stopped; - } - - public static StopDatafeedResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Has the Datafeed stopped or not - * - * @return boolean value indicating the Datafeed stopped status - */ - public boolean isStopped() { - return stopped; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - StopDatafeedResponse that = (StopDatafeedResponse) other; - return isStopped() == that.isStopped(); - } - - @Override - public int hashCode() { - return Objects.hash(isStopped()); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(STOPPED.getPreferredName(), stopped); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDataFrameAnalyticsRequest.java deleted file mode 100644 index f14d4b75687bd..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfigUpdate; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; -import java.util.Optional; - -public class UpdateDataFrameAnalyticsRequest implements ToXContentObject, Validatable { - - private final DataFrameAnalyticsConfigUpdate update; - - public UpdateDataFrameAnalyticsRequest(DataFrameAnalyticsConfigUpdate update) { - this.update = update; - } - - public DataFrameAnalyticsConfigUpdate getUpdate() { - return update; - } - - @Override - public Optional validate() { - if (update == null) { - return Optional.of(ValidationException.withError("update requires a non-null data frame analytics config update")); - } - return Optional.empty(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return update.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - UpdateDataFrameAnalyticsRequest other = (UpdateDataFrameAnalyticsRequest) o; - return Objects.equals(update, other.update); - } - - @Override - public int hashCode() { - return Objects.hash(update); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java deleted file mode 100644 index d2de264c75411..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedUpdate; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Requests an update to a {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} with the passed {@link DatafeedUpdate} - * settings - */ -public class UpdateDatafeedRequest implements Validatable, ToXContentObject { - - private final DatafeedUpdate update; - - public UpdateDatafeedRequest(DatafeedUpdate update) { - this.update = update; - } - - public DatafeedUpdate getDatafeedUpdate() { - return update; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return update.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - - if (o == null || getClass() != o.getClass()) { - return false; - } - - UpdateDatafeedRequest that = (UpdateDatafeedRequest) o; - return Objects.equals(update, that.update); - } - - @Override - public int hashCode() { - return Objects.hash(update); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java deleted file mode 100644 index 3a76432715067..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collection; -import java.util.Objects; -import java.util.SortedSet; -import java.util.TreeSet; - -/** - * Updates an existing {@link MlFilter} configuration - */ -public class UpdateFilterRequest implements Validatable, ToXContentObject { - - public static final ParseField ADD_ITEMS = new ParseField("add_items"); - public static final ParseField REMOVE_ITEMS = new ParseField("remove_items"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "update_filter_request", - (a) -> new UpdateFilterRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), MlFilter.ID); - PARSER.declareStringOrNull(UpdateFilterRequest::setDescription, MlFilter.DESCRIPTION); - PARSER.declareStringArray(UpdateFilterRequest::setAddItems, ADD_ITEMS); - PARSER.declareStringArray(UpdateFilterRequest::setRemoveItems, REMOVE_ITEMS); - } - - private String filterId; - private String description; - private SortedSet addItems; - private SortedSet removeItems; - - /** - * Construct a new request referencing a non-null, existing filter_id - * @param filterId Id referencing the filter to update - */ - public UpdateFilterRequest(String filterId) { - this.filterId = Objects.requireNonNull(filterId, "[" + MlFilter.ID.getPreferredName() + "] must not be null"); - } - - public String getFilterId() { - return filterId; - } - - public String getDescription() { - return description; - } - - /** - * The new description of the filter - * @param description the updated filter description - */ - public void setDescription(String description) { - this.description = description; - } - - public SortedSet getAddItems() { - return addItems; - } - - /** - * The collection of items to add to the filter - * @param addItems non-null items to add to the filter, defaults to empty array - */ - public void setAddItems(Collection addItems) { - this.addItems = new TreeSet<>(Objects.requireNonNull(addItems, "[" + ADD_ITEMS.getPreferredName() + "] must not be null")); - } - - public SortedSet getRemoveItems() { - return removeItems; - } - - /** - * The collection of items to remove from the filter - * @param removeItems non-null items to remove from the filter, defaults to empty array - */ - public void setRemoveItems(Collection removeItems) { - this.removeItems = new TreeSet<>(Objects.requireNonNull(removeItems, "[" + REMOVE_ITEMS.getPreferredName() + "] must not be null")); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(MlFilter.ID.getPreferredName(), filterId); - if (description != null) { - builder.field(MlFilter.DESCRIPTION.getPreferredName(), description); - } - if (addItems != null) { - builder.stringListField(ADD_ITEMS.getPreferredName(), addItems); - } - if (removeItems != null) { - builder.stringListField(REMOVE_ITEMS.getPreferredName(), removeItems); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(filterId, description, addItems, removeItems); - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - UpdateFilterRequest other = (UpdateFilterRequest) obj; - return Objects.equals(filterId, other.filterId) - && Objects.equals(description, other.description) - && Objects.equals(addItems, other.addItems) - && Objects.equals(removeItems, other.removeItems); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java deleted file mode 100644 index aa46d5677c77d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.JobUpdate; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Updates a {@link org.elasticsearch.client.ml.job.config.Job} with the passed {@link JobUpdate} - * settings - */ -public class UpdateJobRequest implements Validatable, ToXContentObject { - - private final JobUpdate update; - - public UpdateJobRequest(JobUpdate update) { - this.update = update; - } - - public JobUpdate getJobUpdate() { - return update; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return update.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - - if (o == null || getClass() != o.getClass()) { - return false; - } - - UpdateJobRequest that = (UpdateJobRequest) o; - return Objects.equals(update, that.update); - } - - @Override - public int hashCode() { - return Objects.hash(update); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java deleted file mode 100644 index a06880369f6fa..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to update information about an existing model snapshot for a given job - */ -public class UpdateModelSnapshotRequest implements Validatable, ToXContentObject { - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "update_model_snapshot_request", - a -> new UpdateModelSnapshotRequest((String) a[0], (String) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), ModelSnapshot.SNAPSHOT_ID); - PARSER.declareStringOrNull(UpdateModelSnapshotRequest::setDescription, ModelSnapshot.DESCRIPTION); - PARSER.declareBoolean(UpdateModelSnapshotRequest::setRetain, ModelSnapshot.RETAIN); - } - - private final String jobId; - private String snapshotId; - private String description; - private Boolean retain; - - /** - * Constructs a request to update information for a snapshot of given job - * @param jobId id of the job from which to retrieve results - * @param snapshotId id of the snapshot from which to retrieve results - */ - public UpdateModelSnapshotRequest(String jobId, String snapshotId) { - this.jobId = Objects.requireNonNull(jobId, "[" + Job.ID + "] must not be null"); - this.snapshotId = Objects.requireNonNull(snapshotId, "[" + ModelSnapshot.SNAPSHOT_ID + "] must not be null"); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - public String getDescription() { - return description; - } - - /** - * The new description of the snapshot. - * @param description the updated snapshot description - */ - public void setDescription(String description) { - this.description = description; - } - - public Boolean getRetain() { - return retain; - } - - /** - * The new value of the "retain" property of the snapshot - * @param retain the updated retain property - */ - public void setRetain(boolean retain) { - this.retain = retain; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(ModelSnapshot.SNAPSHOT_ID.getPreferredName(), snapshotId); - if (description != null) { - builder.field(ModelSnapshot.DESCRIPTION.getPreferredName(), description); - } - if (retain != null) { - builder.field(ModelSnapshot.RETAIN.getPreferredName(), retain); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - UpdateModelSnapshotRequest request = (UpdateModelSnapshotRequest) obj; - return Objects.equals(jobId, request.jobId) - && Objects.equals(snapshotId, request.snapshotId) - && Objects.equals(description, request.description) - && Objects.equals(retain, request.retain); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId, description, retain); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotResponse.java deleted file mode 100644 index 82b73fe260aa9..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotResponse.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * A response acknowledging the update of information for an existing model snapshot for a given job - */ -public class UpdateModelSnapshotResponse implements ToXContentObject { - - private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); - private static final ParseField MODEL = new ParseField("model"); - - public UpdateModelSnapshotResponse(boolean acknowledged, ModelSnapshot.Builder modelSnapshot) { - this.acknowledged = acknowledged; - this.model = modelSnapshot.build(); - } - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "update_model_snapshot_response", - true, - a -> new UpdateModelSnapshotResponse((Boolean) a[0], ((ModelSnapshot.Builder) a[1])) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ACKNOWLEDGED); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), ModelSnapshot.PARSER, MODEL); - } - - public static UpdateModelSnapshotResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final Boolean acknowledged; - private final ModelSnapshot model; - - /** - * Get the action acknowledgement - * @return a {@code boolean} that indicates whether the model snapshot was updated successfully. - */ - public Boolean getAcknowledged() { - return acknowledged; - } - - /** - * Get the updated snapshot of the model - * @return the updated model snapshot. - */ - public ModelSnapshot getModel() { - return model; - } - - @Override - public int hashCode() { - return Objects.hash(acknowledged, model); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (acknowledged != null) { - builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged); - } - if (model != null) { - builder.field(MODEL.getPreferredName(), model); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - UpdateModelSnapshotResponse request = (UpdateModelSnapshotResponse) obj; - return Objects.equals(acknowledged, request.acknowledged) && Objects.equals(model, request.model); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotRequest.java deleted file mode 100644 index cc1660ed4dc6b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotRequest.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class UpgradeJobModelSnapshotRequest implements Validatable, ToXContentObject { - - public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ParseField WAIT_FOR_COMPLETION = new ParseField("wait_for_completion"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "upgrade_job_snapshot_request", - true, - a -> new UpgradeJobModelSnapshotRequest((String) a[0], (String) a[1], (String) a[2], (Boolean) a[3]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), SNAPSHOT_ID); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TIMEOUT); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), WAIT_FOR_COMPLETION); - } - - private final String jobId; - private final String snapshotId; - private final TimeValue timeout; - private final Boolean waitForCompletion; - - UpgradeJobModelSnapshotRequest(String jobId, String snapshotId, String timeout, Boolean waitForCompletion) { - this(jobId, snapshotId, timeout == null ? null : TimeValue.parseTimeValue(timeout, TIMEOUT.getPreferredName()), waitForCompletion); - } - - public UpgradeJobModelSnapshotRequest(String jobId, String snapshotId, TimeValue timeValue, Boolean waitForCompletion) { - this.jobId = Objects.requireNonNull(jobId, Job.ID.getPreferredName()); - this.snapshotId = Objects.requireNonNull(snapshotId, SNAPSHOT_ID.getPreferredName()); - this.timeout = timeValue; - this.waitForCompletion = waitForCompletion; - } - - public static UpgradeJobModelSnapshotRequest fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - public TimeValue getTimeout() { - return timeout; - } - - public Boolean getWaitForCompletion() { - return waitForCompletion; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - UpgradeJobModelSnapshotRequest request = (UpgradeJobModelSnapshotRequest) o; - return Objects.equals(jobId, request.jobId) - && Objects.equals(timeout, request.timeout) - && Objects.equals(waitForCompletion, request.waitForCompletion) - && Objects.equals(snapshotId, request.snapshotId); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId, timeout, waitForCompletion); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId); - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - if (waitForCompletion != null) { - builder.field(WAIT_FOR_COMPLETION.getPreferredName(), waitForCompletion); - } - builder.endObject(); - return builder; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotResponse.java deleted file mode 100644 index b260bbaa5d22b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotResponse.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class UpgradeJobModelSnapshotResponse implements ToXContentObject { - - private static final ParseField COMPLETED = new ParseField("completed"); - private static final ParseField NODE = new ParseField("node"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "upgrade_job_snapshot_response", - true, - (a) -> new UpgradeJobModelSnapshotResponse((Boolean) a[0], (String) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), COMPLETED); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NODE); - } - - private final boolean completed; - private final String node; - - public UpgradeJobModelSnapshotResponse(Boolean opened, String node) { - this.completed = opened != null && opened; - this.node = node; - } - - public static UpgradeJobModelSnapshotResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - public boolean isCompleted() { - return completed; - } - - /** - * The node that the job was assigned to - * - * @return The ID of a node if the job was assigned to a node. - */ - public String getNode() { - return node; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - UpgradeJobModelSnapshotResponse that = (UpgradeJobModelSnapshotResponse) other; - return completed == that.completed && Objects.equals(node, that.node); - } - - @Override - public int hashCode() { - return Objects.hash(completed, node); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(COMPLETED.getPreferredName(), completed); - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java deleted file mode 100644 index da7c0ac2cac52..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.calendars; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * A simple calendar object for scheduled (special) events. - * The calendar consists of a name an a list of job Ids or job groups - * the events are stored separately and reference the calendar. - */ -public class Calendar implements ToXContentObject { - - public static final String CALENDAR_TYPE = "calendar"; - - public static final ParseField JOB_IDS = new ParseField("job_ids"); - public static final ParseField ID = new ParseField("calendar_id"); - public static final ParseField DESCRIPTION = new ParseField("description"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - CALENDAR_TYPE, - true, - a -> new Calendar((String) a[0], (List) a[1], (String) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), ID); - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), JOB_IDS); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), DESCRIPTION); - } - - private final String id; - private final List jobIds; - private final String description; - - /** - * {@code jobIds} can be a mix of job groups and job Ids - * @param id The calendar Id - * @param jobIds List of job Ids or job groups - * @param description An optional description - */ - public Calendar(String id, List jobIds, @Nullable String description) { - this.id = Objects.requireNonNull(id, ID.getPreferredName() + " must not be null"); - this.jobIds = Collections.unmodifiableList(Objects.requireNonNull(jobIds, JOB_IDS.getPreferredName() + " must not be null")); - this.description = description; - } - - public String getId() { - return id; - } - - public List getJobIds() { - return jobIds; - } - - @Nullable - public String getDescription() { - return description; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ID.getPreferredName(), id); - builder.stringListField(JOB_IDS.getPreferredName(), jobIds); - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - Calendar other = (Calendar) obj; - return id.equals(other.id) && jobIds.equals(other.jobIds) && Objects.equals(description, other.description); - } - - @Override - public int hashCode() { - return Objects.hash(id, jobIds, description); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/ScheduledEvent.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/ScheduledEvent.java deleted file mode 100644 index 8aecc33d32b1a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/ScheduledEvent.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.calendars; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -public class ScheduledEvent implements ToXContentObject { - - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField START_TIME = new ParseField("start_time"); - public static final ParseField END_TIME = new ParseField("end_time"); - public static final ParseField EVENT_ID = new ParseField("event_id"); - public static final String SCHEDULED_EVENT_TYPE = "scheduled_event"; - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - SCHEDULED_EVENT_TYPE, - true, - a -> new ScheduledEvent((String) a[0], (Date) a[1], (Date) a[2], (String) a[3], (String) a[4]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DESCRIPTION); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, START_TIME.getPreferredName()), - START_TIME, - ObjectParser.ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, END_TIME.getPreferredName()), - END_TIME, - ObjectParser.ValueType.VALUE - ); - PARSER.declareString(ConstructingObjectParser.constructorArg(), Calendar.ID); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), EVENT_ID); - } - - private final String description; - private final Date startTime; - private final Date endTime; - private final String calendarId; - private final String eventId; - - ScheduledEvent(String description, Date startTime, Date endTime, String calendarId, @Nullable String eventId) { - this.description = Objects.requireNonNull(description); - this.startTime = Objects.requireNonNull(startTime); - this.endTime = Objects.requireNonNull(endTime); - this.calendarId = Objects.requireNonNull(calendarId); - this.eventId = eventId; - } - - public String getDescription() { - return description; - } - - public Date getStartTime() { - return startTime; - } - - public Date getEndTime() { - return endTime; - } - - public String getCalendarId() { - return calendarId; - } - - public String getEventId() { - return eventId; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DESCRIPTION.getPreferredName(), description); - builder.timeField(START_TIME.getPreferredName(), START_TIME.getPreferredName() + "_string", startTime.getTime()); - builder.timeField(END_TIME.getPreferredName(), END_TIME.getPreferredName() + "_string", endTime.getTime()); - builder.field(Calendar.ID.getPreferredName(), calendarId); - if (eventId != null) { - builder.field(EVENT_ID.getPreferredName(), eventId); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - ScheduledEvent other = (ScheduledEvent) obj; - return Objects.equals(this.description, other.description) - && Objects.equals(this.startTime, other.startTime) - && Objects.equals(this.endTime, other.endTime) - && Objects.equals(this.calendarId, other.calendarId); - } - - @Override - public int hashCode() { - return Objects.hash(description, startTime, endTime, calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/ChunkingConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/ChunkingConfig.java deleted file mode 100644 index 5f23f2478c070..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/ChunkingConfig.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -/** - * The description of how searches should be chunked. - */ -public class ChunkingConfig implements ToXContentObject { - - public static final ParseField MODE_FIELD = new ParseField("mode"); - public static final ParseField TIME_SPAN_FIELD = new ParseField("time_span"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "chunking_config", - true, - a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Mode::fromString, MODE_FIELD); - PARSER.declareString( - ConstructingObjectParser.optionalConstructorArg(), - text -> TimeValue.parseTimeValue(text, TIME_SPAN_FIELD.getPreferredName()), - TIME_SPAN_FIELD - ); - - } - - private final Mode mode; - private final TimeValue timeSpan; - - ChunkingConfig(Mode mode, @Nullable TimeValue timeSpan) { - this.mode = Objects.requireNonNull(mode, MODE_FIELD.getPreferredName()); - this.timeSpan = timeSpan; - } - - @Nullable - public TimeValue getTimeSpan() { - return timeSpan; - } - - Mode getMode() { - return mode; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(MODE_FIELD.getPreferredName(), mode); - if (timeSpan != null) { - builder.field(TIME_SPAN_FIELD.getPreferredName(), timeSpan.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(mode, timeSpan); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - ChunkingConfig other = (ChunkingConfig) obj; - return Objects.equals(this.mode, other.mode) && Objects.equals(this.timeSpan, other.timeSpan); - } - - public static ChunkingConfig newAuto() { - return new ChunkingConfig(Mode.AUTO, null); - } - - public static ChunkingConfig newOff() { - return new ChunkingConfig(Mode.OFF, null); - } - - public static ChunkingConfig newManual(TimeValue timeSpan) { - return new ChunkingConfig(Mode.MANUAL, timeSpan); - } - - public enum Mode { - AUTO, - MANUAL, - OFF; - - public static Mode fromString(String value) { - return Mode.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java deleted file mode 100644 index e1363239f4e44..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java +++ /dev/null @@ -1,491 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * The datafeed configuration object. It specifies which indices - * to get the data from and offers parameters for customizing different - * aspects of the process. - */ -public class DatafeedConfig implements ToXContentObject { - - public static final ParseField ID = new ParseField("datafeed_id"); - public static final ParseField QUERY_DELAY = new ParseField("query_delay"); - public static final ParseField FREQUENCY = new ParseField("frequency"); - public static final ParseField INDEXES = new ParseField("indexes"); - public static final ParseField INDICES = new ParseField("indices"); - public static final ParseField QUERY = new ParseField("query"); - public static final ParseField SCROLL_SIZE = new ParseField("scroll_size"); - public static final ParseField AGGREGATIONS = new ParseField("aggregations"); - public static final ParseField SCRIPT_FIELDS = new ParseField("script_fields"); - public static final ParseField CHUNKING_CONFIG = new ParseField("chunking_config"); - public static final ParseField DELAYED_DATA_CHECK_CONFIG = new ParseField("delayed_data_check_config"); - public static final ParseField MAX_EMPTY_SEARCHES = new ParseField("max_empty_searches"); - public static final ParseField INDICES_OPTIONS = new ParseField("indices_options"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "datafeed_config", - true, - a -> new Builder((String) a[0], (String) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - - PARSER.declareStringArray(Builder::setIndices, INDEXES); - PARSER.declareStringArray(Builder::setIndices, INDICES); - PARSER.declareString( - (builder, val) -> builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), - QUERY_DELAY - ); - PARSER.declareString( - (builder, val) -> builder.setFrequency(TimeValue.parseTimeValue(val, FREQUENCY.getPreferredName())), - FREQUENCY - ); - PARSER.declareField(Builder::setQuery, DatafeedConfig::parseBytes, QUERY, ObjectParser.ValueType.OBJECT); - PARSER.declareField(Builder::setAggregations, DatafeedConfig::parseBytes, AGGREGATIONS, ObjectParser.ValueType.OBJECT); - PARSER.declareObject(Builder::setScriptFields, (p, c) -> { - List parsedScriptFields = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_OBJECT) { - parsedScriptFields.add(new SearchSourceBuilder.ScriptField(p)); - } - return parsedScriptFields; - }, SCRIPT_FIELDS); - PARSER.declareInt(Builder::setScrollSize, SCROLL_SIZE); - PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, CHUNKING_CONFIG); - PARSER.declareObject(Builder::setDelayedDataCheckConfig, DelayedDataCheckConfig.PARSER, DELAYED_DATA_CHECK_CONFIG); - PARSER.declareInt(Builder::setMaxEmptySearches, MAX_EMPTY_SEARCHES); - PARSER.declareObject( - Builder::setIndicesOptions, - (p, c) -> IndicesOptions.fromMap(p.map(), new IndicesOptions(IndicesOptions.Option.NONE, IndicesOptions.WildcardStates.NONE)), - INDICES_OPTIONS - ); - PARSER.declareObject(Builder::setRuntimeMappings, (p, c) -> p.map(), SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD); - } - - private static BytesReference parseBytes(XContentParser parser) throws IOException { - XContentBuilder contentBuilder = JsonXContent.contentBuilder(); - contentBuilder.generator().copyCurrentStructure(parser); - return BytesReference.bytes(contentBuilder); - } - - private final String id; - private final String jobId; - private final TimeValue queryDelay; - private final TimeValue frequency; - private final List indices; - private final BytesReference query; - private final BytesReference aggregations; - private final List scriptFields; - private final Integer scrollSize; - private final ChunkingConfig chunkingConfig; - private final DelayedDataCheckConfig delayedDataCheckConfig; - private final Integer maxEmptySearches; - private final IndicesOptions indicesOptions; - private final Map runtimeMappings; - - private DatafeedConfig( - String id, - String jobId, - TimeValue queryDelay, - TimeValue frequency, - List indices, - BytesReference query, - BytesReference aggregations, - List scriptFields, - Integer scrollSize, - ChunkingConfig chunkingConfig, - DelayedDataCheckConfig delayedDataCheckConfig, - Integer maxEmptySearches, - IndicesOptions indicesOptions, - Map runtimeMappings - ) { - this.id = id; - this.jobId = jobId; - this.queryDelay = queryDelay; - this.frequency = frequency; - this.indices = indices == null ? null : Collections.unmodifiableList(indices); - this.query = query; - this.aggregations = aggregations; - this.scriptFields = scriptFields == null ? null : Collections.unmodifiableList(scriptFields); - this.scrollSize = scrollSize; - this.chunkingConfig = chunkingConfig; - this.delayedDataCheckConfig = delayedDataCheckConfig; - this.maxEmptySearches = maxEmptySearches; - this.indicesOptions = indicesOptions; - this.runtimeMappings = Collections.unmodifiableMap(runtimeMappings); - } - - public String getId() { - return id; - } - - public String getJobId() { - return jobId; - } - - public TimeValue getQueryDelay() { - return queryDelay; - } - - public TimeValue getFrequency() { - return frequency; - } - - public List getIndices() { - return indices; - } - - public Integer getScrollSize() { - return scrollSize; - } - - public BytesReference getQuery() { - return query; - } - - public BytesReference getAggregations() { - return aggregations; - } - - public List getScriptFields() { - return scriptFields == null ? Collections.emptyList() : scriptFields; - } - - public ChunkingConfig getChunkingConfig() { - return chunkingConfig; - } - - public DelayedDataCheckConfig getDelayedDataCheckConfig() { - return delayedDataCheckConfig; - } - - public Integer getMaxEmptySearches() { - return maxEmptySearches; - } - - public IndicesOptions getIndicesOptions() { - return indicesOptions; - } - - public Map getRuntimeMappings() { - return runtimeMappings; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ID.getPreferredName(), id); - builder.field(Job.ID.getPreferredName(), jobId); - if (queryDelay != null) { - builder.field(QUERY_DELAY.getPreferredName(), queryDelay.getStringRep()); - } - if (frequency != null) { - builder.field(FREQUENCY.getPreferredName(), frequency.getStringRep()); - } - if (indices != null) { - builder.field(INDICES.getPreferredName(), indices); - } - if (query != null) { - builder.field(QUERY.getPreferredName(), asMap(query)); - } - if (aggregations != null) { - builder.field(AGGREGATIONS.getPreferredName(), asMap(aggregations)); - } - if (scriptFields != null) { - builder.startObject(SCRIPT_FIELDS.getPreferredName()); - for (SearchSourceBuilder.ScriptField scriptField : scriptFields) { - scriptField.toXContent(builder, params); - } - builder.endObject(); - } - if (scrollSize != null) { - builder.field(SCROLL_SIZE.getPreferredName(), scrollSize); - } - if (chunkingConfig != null) { - builder.field(CHUNKING_CONFIG.getPreferredName(), chunkingConfig); - } - if (delayedDataCheckConfig != null) { - builder.field(DELAYED_DATA_CHECK_CONFIG.getPreferredName(), delayedDataCheckConfig); - } - if (maxEmptySearches != null) { - builder.field(MAX_EMPTY_SEARCHES.getPreferredName(), maxEmptySearches); - } - if (indicesOptions != null) { - builder.startObject(INDICES_OPTIONS.getPreferredName()); - indicesOptions.toXContent(builder, params); - builder.endObject(); - } - if (runtimeMappings.isEmpty() == false) { - builder.field(SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD.getPreferredName(), runtimeMappings); - } - - builder.endObject(); - return builder; - } - - private static Map asMap(BytesReference bytesReference) { - return bytesReference == null ? null : XContentHelper.convertToMap(bytesReference, true, XContentType.JSON).v2(); - } - - /** - * The lists of indices and types are compared for equality but they are not - * sorted first so this test could fail simply because the indices and types - * lists are in different orders. - * - * Also note this could be a heavy operation when a query or aggregations - * are set as we need to convert the bytes references into maps to correctly - * compare them. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DatafeedConfig that = (DatafeedConfig) other; - - return Objects.equals(this.id, that.id) - && Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.frequency, that.frequency) - && Objects.equals(this.queryDelay, that.queryDelay) - && Objects.equals(this.indices, that.indices) - && Objects.equals(asMap(this.query), asMap(that.query)) - && Objects.equals(this.scrollSize, that.scrollSize) - && Objects.equals(asMap(this.aggregations), asMap(that.aggregations)) - && Objects.equals(this.scriptFields, that.scriptFields) - && Objects.equals(this.chunkingConfig, that.chunkingConfig) - && Objects.equals(this.delayedDataCheckConfig, that.delayedDataCheckConfig) - && Objects.equals(this.maxEmptySearches, that.maxEmptySearches) - && Objects.equals(this.indicesOptions, that.indicesOptions) - && Objects.equals(this.runtimeMappings, that.runtimeMappings); - } - - /** - * Note this could be a heavy operation when a query or aggregations - * are set as we need to convert the bytes references into maps to - * compute a stable hash code. - */ - @Override - public int hashCode() { - return Objects.hash( - id, - jobId, - frequency, - queryDelay, - indices, - asMap(query), - scrollSize, - asMap(aggregations), - scriptFields, - chunkingConfig, - delayedDataCheckConfig, - maxEmptySearches, - indicesOptions, - runtimeMappings - ); - } - - public static Builder builder(String id, String jobId) { - return new Builder(id, jobId); - } - - public static class Builder { - - private final String id; - private final String jobId; - private TimeValue queryDelay; - private TimeValue frequency; - private List indices; - private BytesReference query; - private BytesReference aggregations; - private List scriptFields; - private Integer scrollSize; - private ChunkingConfig chunkingConfig; - private DelayedDataCheckConfig delayedDataCheckConfig; - private Integer maxEmptySearches; - private IndicesOptions indicesOptions; - private Map runtimeMappings = Collections.emptyMap(); - - public Builder(String id, String jobId) { - this.id = Objects.requireNonNull(id, ID.getPreferredName()); - this.jobId = Objects.requireNonNull(jobId, Job.ID.getPreferredName()); - } - - public Builder(DatafeedConfig config) { - this.id = config.id; - this.jobId = config.jobId; - this.queryDelay = config.queryDelay; - this.frequency = config.frequency; - this.indices = config.indices == null ? null : new ArrayList<>(config.indices); - this.query = config.query; - this.aggregations = config.aggregations; - this.scriptFields = config.scriptFields == null ? null : new ArrayList<>(config.scriptFields); - this.scrollSize = config.scrollSize; - this.chunkingConfig = config.chunkingConfig; - this.delayedDataCheckConfig = config.getDelayedDataCheckConfig(); - this.maxEmptySearches = config.getMaxEmptySearches(); - this.indicesOptions = config.indicesOptions; - this.runtimeMappings = new HashMap<>(config.runtimeMappings); - } - - public Builder setIndices(List indices) { - this.indices = Objects.requireNonNull(indices, INDICES.getPreferredName()); - return this; - } - - public Builder setIndices(String... indices) { - return setIndices(Arrays.asList(indices)); - } - - public Builder setQueryDelay(TimeValue queryDelay) { - this.queryDelay = queryDelay; - return this; - } - - public Builder setFrequency(TimeValue frequency) { - this.frequency = frequency; - return this; - } - - private Builder setQuery(BytesReference query) { - this.query = query; - return this; - } - - public Builder setQuery(String queryAsJson) { - this.query = queryAsJson == null ? null : new BytesArray(queryAsJson); - return this; - } - - public Builder setQuery(QueryBuilder query) throws IOException { - this.query = query == null ? null : xContentToBytes(query); - return this; - } - - private Builder setAggregations(BytesReference aggregations) { - this.aggregations = aggregations; - return this; - } - - public Builder setAggregations(String aggsAsJson) { - this.aggregations = aggsAsJson == null ? null : new BytesArray(aggsAsJson); - return this; - } - - public Builder setAggregations(AggregatorFactories.Builder aggregations) throws IOException { - this.aggregations = aggregations == null ? null : xContentToBytes(aggregations); - return this; - } - - public Builder setScriptFields(List scriptFields) { - List sorted = new ArrayList<>(scriptFields); - sorted.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName)); - this.scriptFields = sorted; - return this; - } - - public Builder setScrollSize(int scrollSize) { - this.scrollSize = scrollSize; - return this; - } - - public Builder setChunkingConfig(ChunkingConfig chunkingConfig) { - this.chunkingConfig = chunkingConfig; - return this; - } - - /** - * This sets the {@link DelayedDataCheckConfig} settings. - * - * See {@link DelayedDataCheckConfig} for more information. - * - * @param delayedDataCheckConfig the delayed data check configuration - * Default value is enabled, with `check_window` being null. This means the true window is - * calculated when the real-time Datafeed runs. - */ - public Builder setDelayedDataCheckConfig(DelayedDataCheckConfig delayedDataCheckConfig) { - this.delayedDataCheckConfig = delayedDataCheckConfig; - return this; - } - - public Builder setMaxEmptySearches(int maxEmptySearches) { - this.maxEmptySearches = maxEmptySearches; - return this; - } - - public Builder setIndicesOptions(IndicesOptions indicesOptions) { - this.indicesOptions = indicesOptions; - return this; - } - - public Builder setRuntimeMappings(Map runtimeMappings) { - this.runtimeMappings = Objects.requireNonNull(runtimeMappings, SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD.getPreferredName()); - return this; - } - - public DatafeedConfig build() { - return new DatafeedConfig( - id, - jobId, - queryDelay, - frequency, - indices, - query, - aggregations, - scriptFields, - scrollSize, - chunkingConfig, - delayedDataCheckConfig, - maxEmptySearches, - indicesOptions, - runtimeMappings - ); - } - - private static BytesReference xContentToBytes(ToXContentObject object) throws IOException { - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - object.toXContent(builder, ToXContentObject.EMPTY_PARAMS); - return BytesReference.bytes(builder); - } - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedState.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedState.java deleted file mode 100644 index 4d309c31ab375..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedState.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.xcontent.ParseField; - -import java.util.Locale; - -/** - * Datafeed State POJO - */ -public enum DatafeedState { - - STARTED, - STOPPED, - STARTING, - STOPPING; - - public static final ParseField STATE = new ParseField("state"); - - public static DatafeedState fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedStats.java deleted file mode 100644 index b218f749a10f3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedStats.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.client.ml.NodeAttributes; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - -/** - * Datafeed Statistics POJO - */ -public class DatafeedStats implements ToXContentObject { - - private final String datafeedId; - private final DatafeedState datafeedState; - @Nullable - private final NodeAttributes node; - @Nullable - private final String assignmentExplanation; - @Nullable - private final DatafeedTimingStats timingStats; - - public static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation"); - public static final ParseField NODE = new ParseField("node"); - public static final ParseField TIMING_STATS = new ParseField("timing_stats"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("datafeed_stats", true, a -> { - String datafeedId = (String) a[0]; - DatafeedState datafeedState = DatafeedState.fromString((String) a[1]); - NodeAttributes nodeAttributes = (NodeAttributes) a[2]; - String assignmentExplanation = (String) a[3]; - DatafeedTimingStats timingStats = (DatafeedTimingStats) a[4]; - return new DatafeedStats(datafeedId, datafeedState, nodeAttributes, assignmentExplanation, timingStats); - }); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedState.STATE); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), NodeAttributes.PARSER, NODE); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ASSIGNMENT_EXPLANATION); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), DatafeedTimingStats.PARSER, TIMING_STATS); - } - - public DatafeedStats( - String datafeedId, - DatafeedState datafeedState, - @Nullable NodeAttributes node, - @Nullable String assignmentExplanation, - @Nullable DatafeedTimingStats timingStats - ) { - this.datafeedId = Objects.requireNonNull(datafeedId); - this.datafeedState = Objects.requireNonNull(datafeedState); - this.node = node; - this.assignmentExplanation = assignmentExplanation; - this.timingStats = timingStats; - } - - public String getDatafeedId() { - return datafeedId; - } - - public DatafeedState getDatafeedState() { - return datafeedState; - } - - public NodeAttributes getNode() { - return node; - } - - public String getAssignmentExplanation() { - return assignmentExplanation; - } - - public DatafeedTimingStats getDatafeedTimingStats() { - return timingStats; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); - builder.field(DatafeedState.STATE.getPreferredName(), datafeedState.toString()); - if (node != null) { - builder.startObject("node"); - builder.field("id", node.getId()); - builder.field("name", node.getName()); - builder.field("ephemeral_id", node.getEphemeralId()); - builder.field("transport_address", node.getTransportAddress()); - - builder.startObject("attributes"); - for (Map.Entry entry : node.getAttributes().entrySet()) { - if (entry.getKey().startsWith("ml.")) { - builder.field(entry.getKey(), entry.getValue()); - } - } - builder.endObject(); - builder.endObject(); - } - if (assignmentExplanation != null) { - builder.field(ASSIGNMENT_EXPLANATION.getPreferredName(), assignmentExplanation); - } - if (timingStats != null) { - builder.field(TIMING_STATS.getPreferredName(), timingStats); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, datafeedState.toString(), node, assignmentExplanation, timingStats); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - DatafeedStats other = (DatafeedStats) obj; - return Objects.equals(datafeedId, other.datafeedId) - && Objects.equals(this.datafeedState, other.datafeedState) - && Objects.equals(this.node, other.node) - && Objects.equals(this.assignmentExplanation, other.assignmentExplanation) - && Objects.equals(this.timingStats, other.timingStats); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedTimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedTimingStats.java deleted file mode 100644 index 4aa464228f0e8..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedTimingStats.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class DatafeedTimingStats implements ToXContentObject { - - public static final ParseField JOB_ID = new ParseField("job_id"); - public static final ParseField SEARCH_COUNT = new ParseField("search_count"); - public static final ParseField BUCKET_COUNT = new ParseField("bucket_count"); - public static final ParseField TOTAL_SEARCH_TIME_MS = new ParseField("total_search_time_ms"); - public static final ParseField AVG_SEARCH_TIME_PER_BUCKET_MS = new ParseField("average_search_time_per_bucket_ms"); - public static final ParseField EXPONENTIAL_AVG_SEARCH_TIME_PER_HOUR_MS = new ParseField("exponential_average_search_time_per_hour_ms"); - - public static final ParseField TYPE = new ParseField("datafeed_timing_stats"); - - public static final ConstructingObjectParser PARSER = createParser(); - - @SuppressWarnings("HiddenField") - private static ConstructingObjectParser createParser() { - ConstructingObjectParser parser = new ConstructingObjectParser<>("datafeed_timing_stats", true, args -> { - String jobId = (String) args[0]; - Long searchCount = (Long) args[1]; - Long bucketCount = (Long) args[2]; - Double totalSearchTimeMs = (Double) args[3]; - Double avgSearchTimePerBucketMs = (Double) args[4]; - Double exponentialAvgSearchTimePerHourMs = (Double) args[5]; - return new DatafeedTimingStats( - jobId, - getOrDefault(searchCount, 0L), - getOrDefault(bucketCount, 0L), - getOrDefault(totalSearchTimeMs, 0.0), - avgSearchTimePerBucketMs, - exponentialAvgSearchTimePerHourMs - ); - }); - parser.declareString(constructorArg(), JOB_ID); - parser.declareLong(optionalConstructorArg(), SEARCH_COUNT); - parser.declareLong(optionalConstructorArg(), BUCKET_COUNT); - parser.declareDouble(optionalConstructorArg(), TOTAL_SEARCH_TIME_MS); - parser.declareDouble(optionalConstructorArg(), AVG_SEARCH_TIME_PER_BUCKET_MS); - parser.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_SEARCH_TIME_PER_HOUR_MS); - return parser; - } - - private final String jobId; - private long searchCount; - private long bucketCount; - private double totalSearchTimeMs; - private Double avgSearchTimePerBucketMs; - private Double exponentialAvgSearchTimePerHourMs; - - public DatafeedTimingStats( - String jobId, - long searchCount, - long bucketCount, - double totalSearchTimeMs, - @Nullable Double avgSearchTimePerBucketMs, - @Nullable Double exponentialAvgSearchTimePerHourMs - ) { - this.jobId = Objects.requireNonNull(jobId); - this.searchCount = searchCount; - this.bucketCount = bucketCount; - this.totalSearchTimeMs = totalSearchTimeMs; - this.avgSearchTimePerBucketMs = avgSearchTimePerBucketMs; - this.exponentialAvgSearchTimePerHourMs = exponentialAvgSearchTimePerHourMs; - } - - public String getJobId() { - return jobId; - } - - public long getSearchCount() { - return searchCount; - } - - public long getBucketCount() { - return bucketCount; - } - - public double getTotalSearchTimeMs() { - return totalSearchTimeMs; - } - - public Double getAvgSearchTimePerBucketMs() { - return avgSearchTimePerBucketMs; - } - - public Double getExponentialAvgSearchTimePerHourMs() { - return exponentialAvgSearchTimePerHourMs; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(JOB_ID.getPreferredName(), jobId); - builder.field(SEARCH_COUNT.getPreferredName(), searchCount); - builder.field(BUCKET_COUNT.getPreferredName(), bucketCount); - builder.field(TOTAL_SEARCH_TIME_MS.getPreferredName(), totalSearchTimeMs); - if (avgSearchTimePerBucketMs != null) { - builder.field(AVG_SEARCH_TIME_PER_BUCKET_MS.getPreferredName(), avgSearchTimePerBucketMs); - } - if (exponentialAvgSearchTimePerHourMs != null) { - builder.field(EXPONENTIAL_AVG_SEARCH_TIME_PER_HOUR_MS.getPreferredName(), exponentialAvgSearchTimePerHourMs); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - DatafeedTimingStats other = (DatafeedTimingStats) obj; - return Objects.equals(this.jobId, other.jobId) - && this.searchCount == other.searchCount - && this.bucketCount == other.bucketCount - && this.totalSearchTimeMs == other.totalSearchTimeMs - && Objects.equals(this.avgSearchTimePerBucketMs, other.avgSearchTimePerBucketMs) - && Objects.equals(this.exponentialAvgSearchTimePerHourMs, other.exponentialAvgSearchTimePerHourMs); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - searchCount, - bucketCount, - totalSearchTimeMs, - avgSearchTimePerBucketMs, - exponentialAvgSearchTimePerHourMs - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - private static T getOrDefault(@Nullable T value, T defaultValue) { - return value != null ? value : defaultValue; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java deleted file mode 100644 index 3b4be882a868b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java +++ /dev/null @@ -1,455 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * A datafeed update contains partial properties to update a {@link DatafeedConfig}. - * The main difference between this class and {@link DatafeedConfig} is that here all - * fields are nullable. - */ -public class DatafeedUpdate implements ToXContentObject { - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "datafeed_update", - true, - a -> new Builder((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID); - - PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDEXES); - PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDICES); - PARSER.declareString( - (builder, val) -> builder.setQueryDelay(TimeValue.parseTimeValue(val, DatafeedConfig.QUERY_DELAY.getPreferredName())), - DatafeedConfig.QUERY_DELAY - ); - PARSER.declareString( - (builder, val) -> builder.setFrequency(TimeValue.parseTimeValue(val, DatafeedConfig.FREQUENCY.getPreferredName())), - DatafeedConfig.FREQUENCY - ); - PARSER.declareField(Builder::setQuery, DatafeedUpdate::parseBytes, DatafeedConfig.QUERY, ObjectParser.ValueType.OBJECT); - PARSER.declareField( - Builder::setAggregations, - DatafeedUpdate::parseBytes, - DatafeedConfig.AGGREGATIONS, - ObjectParser.ValueType.OBJECT - ); - PARSER.declareObject(Builder::setScriptFields, (p, c) -> { - List parsedScriptFields = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_OBJECT) { - parsedScriptFields.add(new SearchSourceBuilder.ScriptField(p)); - } - return parsedScriptFields; - }, DatafeedConfig.SCRIPT_FIELDS); - PARSER.declareInt(Builder::setScrollSize, DatafeedConfig.SCROLL_SIZE); - PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, DatafeedConfig.CHUNKING_CONFIG); - PARSER.declareObject(Builder::setDelayedDataCheckConfig, DelayedDataCheckConfig.PARSER, DatafeedConfig.DELAYED_DATA_CHECK_CONFIG); - PARSER.declareInt(Builder::setMaxEmptySearches, DatafeedConfig.MAX_EMPTY_SEARCHES); - PARSER.declareObject( - Builder::setIndicesOptions, - (p, c) -> IndicesOptions.fromMap(p.map(), new IndicesOptions(IndicesOptions.Option.NONE, IndicesOptions.WildcardStates.NONE)), - DatafeedConfig.INDICES_OPTIONS - ); - PARSER.declareObject(Builder::setRuntimeMappings, (p, c) -> p.map(), SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD); - } - - private static BytesReference parseBytes(XContentParser parser) throws IOException { - XContentBuilder contentBuilder = JsonXContent.contentBuilder(); - contentBuilder.generator().copyCurrentStructure(parser); - return BytesReference.bytes(contentBuilder); - } - - private final String id; - private final TimeValue queryDelay; - private final TimeValue frequency; - private final List indices; - private final BytesReference query; - private final BytesReference aggregations; - private final List scriptFields; - private final Integer scrollSize; - private final ChunkingConfig chunkingConfig; - private final DelayedDataCheckConfig delayedDataCheckConfig; - private final Integer maxEmptySearches; - private final IndicesOptions indicesOptions; - private final Map runtimeMappings; - - private DatafeedUpdate( - String id, - TimeValue queryDelay, - TimeValue frequency, - List indices, - BytesReference query, - BytesReference aggregations, - List scriptFields, - Integer scrollSize, - ChunkingConfig chunkingConfig, - DelayedDataCheckConfig delayedDataCheckConfig, - Integer maxEmptySearches, - IndicesOptions indicesOptions, - Map runtimeMappings - ) { - this.id = id; - this.queryDelay = queryDelay; - this.frequency = frequency; - this.indices = indices; - this.query = query; - this.aggregations = aggregations; - this.scriptFields = scriptFields; - this.scrollSize = scrollSize; - this.chunkingConfig = chunkingConfig; - this.delayedDataCheckConfig = delayedDataCheckConfig; - this.maxEmptySearches = maxEmptySearches; - this.indicesOptions = indicesOptions; - this.runtimeMappings = runtimeMappings; - } - - /** - * Get the id of the datafeed to update - */ - public String getId() { - return id; - } - - public Map getRuntimeMappings() { - return runtimeMappings; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), id); - if (queryDelay != null) { - builder.field(DatafeedConfig.QUERY_DELAY.getPreferredName(), queryDelay.getStringRep()); - } - if (frequency != null) { - builder.field(DatafeedConfig.FREQUENCY.getPreferredName(), frequency.getStringRep()); - } - addOptionalField(builder, DatafeedConfig.INDICES, indices); - if (query != null) { - builder.field(DatafeedConfig.QUERY.getPreferredName(), asMap(query)); - } - if (aggregations != null) { - builder.field(DatafeedConfig.AGGREGATIONS.getPreferredName(), asMap(aggregations)); - } - if (scriptFields != null) { - builder.startObject(DatafeedConfig.SCRIPT_FIELDS.getPreferredName()); - for (SearchSourceBuilder.ScriptField scriptField : scriptFields) { - scriptField.toXContent(builder, params); - } - builder.endObject(); - } - if (delayedDataCheckConfig != null) { - builder.field(DatafeedConfig.DELAYED_DATA_CHECK_CONFIG.getPreferredName(), delayedDataCheckConfig); - } - addOptionalField(builder, DatafeedConfig.SCROLL_SIZE, scrollSize); - addOptionalField(builder, DatafeedConfig.CHUNKING_CONFIG, chunkingConfig); - addOptionalField(builder, DatafeedConfig.MAX_EMPTY_SEARCHES, maxEmptySearches); - if (indicesOptions != null) { - builder.startObject(DatafeedConfig.INDICES_OPTIONS.getPreferredName()); - indicesOptions.toXContent(builder, params); - builder.endObject(); - } - addOptionalField(builder, SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD, runtimeMappings); - builder.endObject(); - return builder; - } - - private void addOptionalField(XContentBuilder builder, ParseField field, Object value) throws IOException { - if (value != null) { - builder.field(field.getPreferredName(), value); - } - } - - public TimeValue getQueryDelay() { - return queryDelay; - } - - public TimeValue getFrequency() { - return frequency; - } - - public List getIndices() { - return indices; - } - - public Integer getScrollSize() { - return scrollSize; - } - - public BytesReference getQuery() { - return query; - } - - public BytesReference getAggregations() { - return aggregations; - } - - public List getScriptFields() { - return scriptFields == null ? Collections.emptyList() : scriptFields; - } - - public ChunkingConfig getChunkingConfig() { - return chunkingConfig; - } - - public DelayedDataCheckConfig getDelayedDataCheckConfig() { - return delayedDataCheckConfig; - } - - public Integer getMaxEmptySearches() { - return maxEmptySearches; - } - - public IndicesOptions getIndicesOptions() { - return indicesOptions; - } - - private static Map asMap(BytesReference bytesReference) { - return bytesReference == null ? null : XContentHelper.convertToMap(bytesReference, true, XContentType.JSON).v2(); - } - - /** - * The lists of indices and types are compared for equality but they are not - * sorted first so this test could fail simply because the indices and types - * lists are in different orders. - * - * Also note this could be a heavy operation when a query or aggregations - * are set as we need to convert the bytes references into maps to correctly - * compare them. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DatafeedUpdate that = (DatafeedUpdate) other; - - return Objects.equals(this.id, that.id) - && Objects.equals(this.frequency, that.frequency) - && Objects.equals(this.queryDelay, that.queryDelay) - && Objects.equals(this.indices, that.indices) - && Objects.equals(asMap(this.query), asMap(that.query)) - && Objects.equals(this.scrollSize, that.scrollSize) - && Objects.equals(asMap(this.aggregations), asMap(that.aggregations)) - && Objects.equals(this.delayedDataCheckConfig, that.delayedDataCheckConfig) - && Objects.equals(this.scriptFields, that.scriptFields) - && Objects.equals(this.chunkingConfig, that.chunkingConfig) - && Objects.equals(this.maxEmptySearches, that.maxEmptySearches) - && Objects.equals(this.indicesOptions, that.indicesOptions) - && Objects.equals(this.runtimeMappings, that.runtimeMappings); - } - - /** - * Note this could be a heavy operation when a query or aggregations - * are set as we need to convert the bytes references into maps to - * compute a stable hash code. - */ - @Override - public int hashCode() { - return Objects.hash( - id, - frequency, - queryDelay, - indices, - asMap(query), - scrollSize, - asMap(aggregations), - scriptFields, - chunkingConfig, - delayedDataCheckConfig, - maxEmptySearches, - indicesOptions, - runtimeMappings - ); - } - - public static Builder builder(String id) { - return new Builder(id); - } - - public static class Builder { - - private String id; - private TimeValue queryDelay; - private TimeValue frequency; - private List indices; - private BytesReference query; - private BytesReference aggregations; - private List scriptFields; - private Integer scrollSize; - private ChunkingConfig chunkingConfig; - private DelayedDataCheckConfig delayedDataCheckConfig; - private Integer maxEmptySearches; - private IndicesOptions indicesOptions; - private Map runtimeMappings; - - public Builder(String id) { - this.id = Objects.requireNonNull(id, DatafeedConfig.ID.getPreferredName()); - } - - public Builder(DatafeedUpdate config) { - this.id = config.id; - this.queryDelay = config.queryDelay; - this.frequency = config.frequency; - this.indices = config.indices; - this.query = config.query; - this.aggregations = config.aggregations; - this.scriptFields = config.scriptFields; - this.scrollSize = config.scrollSize; - this.chunkingConfig = config.chunkingConfig; - this.delayedDataCheckConfig = config.delayedDataCheckConfig; - this.maxEmptySearches = config.maxEmptySearches; - this.indicesOptions = config.indicesOptions; - this.runtimeMappings = config.runtimeMappings != null ? new HashMap<>(config.runtimeMappings) : null; - } - - public Builder setIndices(List indices) { - this.indices = indices; - return this; - } - - public Builder setIndices(String... indices) { - return setIndices(Arrays.asList(indices)); - } - - public Builder setQueryDelay(TimeValue queryDelay) { - this.queryDelay = queryDelay; - return this; - } - - public Builder setFrequency(TimeValue frequency) { - this.frequency = frequency; - return this; - } - - private Builder setQuery(BytesReference query) { - this.query = query; - return this; - } - - public Builder setQuery(String queryAsJson) { - this.query = queryAsJson == null ? null : new BytesArray(queryAsJson); - return this; - } - - public Builder setQuery(QueryBuilder query) throws IOException { - this.query = query == null ? null : xContentToBytes(query); - return this; - } - - private Builder setAggregations(BytesReference aggregations) { - this.aggregations = aggregations; - return this; - } - - public Builder setAggregations(String aggsAsJson) { - this.aggregations = aggsAsJson == null ? null : new BytesArray(aggsAsJson); - return this; - } - - public Builder setAggregations(AggregatorFactories.Builder aggregations) throws IOException { - this.aggregations = aggregations == null ? null : xContentToBytes(aggregations); - return this; - } - - public Builder setScriptFields(List scriptFields) { - List sorted = new ArrayList<>(scriptFields); - sorted.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName)); - this.scriptFields = sorted; - return this; - } - - public Builder setScrollSize(int scrollSize) { - this.scrollSize = scrollSize; - return this; - } - - public Builder setChunkingConfig(ChunkingConfig chunkingConfig) { - this.chunkingConfig = chunkingConfig; - return this; - } - - public Builder setDelayedDataCheckConfig(DelayedDataCheckConfig delayedDataCheckConfig) { - this.delayedDataCheckConfig = delayedDataCheckConfig; - return this; - } - - public Builder setMaxEmptySearches(int maxEmptySearches) { - this.maxEmptySearches = maxEmptySearches; - return this; - } - - public Builder setIndicesOptions(IndicesOptions indicesOptions) { - this.indicesOptions = indicesOptions; - return this; - } - - public Builder setRuntimeMappings(Map runtimeMappings) { - this.runtimeMappings = runtimeMappings; - return this; - } - - public DatafeedUpdate build() { - return new DatafeedUpdate( - id, - queryDelay, - frequency, - indices, - query, - aggregations, - scriptFields, - scrollSize, - chunkingConfig, - delayedDataCheckConfig, - maxEmptySearches, - indicesOptions, - runtimeMappings - ); - } - - private static BytesReference xContentToBytes(ToXContentObject object) throws IOException { - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - object.toXContent(builder, ToXContentObject.EMPTY_PARAMS); - return BytesReference.bytes(builder); - } - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DelayedDataCheckConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DelayedDataCheckConfig.java deleted file mode 100644 index 4c55662f8b833..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DelayedDataCheckConfig.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * The configuration object containing the delayed data check settings. - * - * See {@link DelayedDataCheckConfig#enabledDelayedDataCheckConfig(TimeValue)} for creating a new - * enabled datacheck with the given check_window - * - * See {@link DelayedDataCheckConfig#disabledDelayedDataCheckConfig()} for creating a config for disabling - * delayed data checking. - */ -public class DelayedDataCheckConfig implements ToXContentObject { - - public static final ParseField ENABLED = new ParseField("enabled"); - public static final ParseField CHECK_WINDOW = new ParseField("check_window"); - - // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "delayed_data_check_config", - true, - a -> new DelayedDataCheckConfig((Boolean) a[0], (TimeValue) a[1]) - ); - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED); - PARSER.declareString( - ConstructingObjectParser.optionalConstructorArg(), - text -> TimeValue.parseTimeValue(text, CHECK_WINDOW.getPreferredName()), - CHECK_WINDOW - ); - } - - /** - * This creates a new DelayedDataCheckConfig that has a check_window of the passed `timeValue` - * - * We query the index to the latest finalized bucket from this TimeValue in the past looking to see if any data has been indexed - * since the data was read with the Datafeed. - * - * The window must be larger than the {@link org.elasticsearch.client.ml.job.config.AnalysisConfig#bucketSpan}, less than - * 24 hours, and span less than 10,000x buckets. - * - * - * @param timeValue The time length in the past from the latest finalized bucket to look for latent data. - * If `null` is provided, the appropriate window is calculated when it is used - **/ - public static DelayedDataCheckConfig enabledDelayedDataCheckConfig(TimeValue timeValue) { - return new DelayedDataCheckConfig(true, timeValue); - } - - /** - * This creates a new DelayedDataCheckConfig that disables the data check. - */ - public static DelayedDataCheckConfig disabledDelayedDataCheckConfig() { - return new DelayedDataCheckConfig(false, null); - } - - private final boolean enabled; - private final TimeValue checkWindow; - - DelayedDataCheckConfig(Boolean enabled, TimeValue checkWindow) { - this.enabled = enabled; - this.checkWindow = checkWindow; - } - - public boolean isEnabled() { - return enabled; - } - - @Nullable - public TimeValue getCheckWindow() { - return checkWindow; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ENABLED.getPreferredName(), enabled); - if (checkWindow != null) { - builder.field(CHECK_WINDOW.getPreferredName(), checkWindow.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(enabled, checkWindow); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - DelayedDataCheckConfig other = (DelayedDataCheckConfig) obj; - return Objects.equals(this.enabled, other.enabled) && Objects.equals(this.checkWindow, other.checkWindow); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java deleted file mode 100644 index b1fe4a5d1b87c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java +++ /dev/null @@ -1,550 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper; -import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; -import java.util.Objects; - -public class Classification implements DataFrameAnalysis { - - public static Classification fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static Builder builder(String dependentVariable) { - return new Builder(dependentVariable); - } - - public static final ParseField NAME = new ParseField("classification"); - - static final ParseField DEPENDENT_VARIABLE = new ParseField("dependent_variable"); - static final ParseField LAMBDA = new ParseField("lambda"); - static final ParseField GAMMA = new ParseField("gamma"); - static final ParseField ETA = new ParseField("eta"); - static final ParseField MAX_TREES = new ParseField("max_trees"); - static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); - static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); - static final ParseField PREDICTION_FIELD_NAME = new ParseField("prediction_field_name"); - static final ParseField TRAINING_PERCENT = new ParseField("training_percent"); - static final ParseField CLASS_ASSIGNMENT_OBJECTIVE = new ParseField("class_assignment_objective"); - static final ParseField NUM_TOP_CLASSES = new ParseField("num_top_classes"); - static final ParseField RANDOMIZE_SEED = new ParseField("randomize_seed"); - static final ParseField FEATURE_PROCESSORS = new ParseField("feature_processors"); - static final ParseField ALPHA = new ParseField("alpha"); - static final ParseField ETA_GROWTH_RATE_PER_TREE = new ParseField("eta_growth_rate_per_tree"); - static final ParseField SOFT_TREE_DEPTH_LIMIT = new ParseField("soft_tree_depth_limit"); - static final ParseField SOFT_TREE_DEPTH_TOLERANCE = new ParseField("soft_tree_depth_tolerance"); - static final ParseField DOWNSAMPLE_FACTOR = new ParseField("downsample_factor"); - static final ParseField MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER = new ParseField("max_optimization_rounds_per_hyperparameter"); - static final ParseField EARLY_STOPPING_ENABLED = new ParseField("early_stopping_enabled"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new Classification( - (String) a[0], - (Double) a[1], - (Double) a[2], - (Double) a[3], - (Integer) a[4], - (Double) a[5], - (Integer) a[6], - (String) a[7], - (Double) a[8], - (Integer) a[9], - (Long) a[10], - (ClassAssignmentObjective) a[11], - (List) a[12], - (Double) a[13], - (Double) a[14], - (Double) a[15], - (Double) a[16], - (Double) a[17], - (Integer) a[18], - (Boolean) a[19] - ) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DEPENDENT_VARIABLE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), LAMBDA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), GAMMA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_TREES); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), FEATURE_BAG_FRACTION); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), NUM_TOP_FEATURE_IMPORTANCE_VALUES); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), PREDICTION_FIELD_NAME); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), TRAINING_PERCENT); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), NUM_TOP_CLASSES); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), RANDOMIZE_SEED); - PARSER.declareString( - ConstructingObjectParser.optionalConstructorArg(), - ClassAssignmentObjective::fromString, - CLASS_ASSIGNMENT_OBJECTIVE - ); - PARSER.declareNamedObjects( - ConstructingObjectParser.optionalConstructorArg(), - (p, c, n) -> p.namedObject(PreProcessor.class, n, c), - (classification) -> {}, - FEATURE_PROCESSORS - ); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ALPHA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA_GROWTH_RATE_PER_TREE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), SOFT_TREE_DEPTH_LIMIT); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), SOFT_TREE_DEPTH_TOLERANCE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), DOWNSAMPLE_FACTOR); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), EARLY_STOPPING_ENABLED); - } - - private final String dependentVariable; - private final Double lambda; - private final Double gamma; - private final Double eta; - private final Integer maxTrees; - private final Double featureBagFraction; - private final Integer numTopFeatureImportanceValues; - private final String predictionFieldName; - private final Double trainingPercent; - private final ClassAssignmentObjective classAssignmentObjective; - private final Integer numTopClasses; - private final Long randomizeSeed; - private final List featureProcessors; - private final Double alpha; - private final Double etaGrowthRatePerTree; - private final Double softTreeDepthLimit; - private final Double softTreeDepthTolerance; - private final Double downsampleFactor; - private final Integer maxOptimizationRoundsPerHyperparameter; - private final Boolean earlyStoppingEnabled; - - private Classification( - String dependentVariable, - @Nullable Double lambda, - @Nullable Double gamma, - @Nullable Double eta, - @Nullable Integer maxTrees, - @Nullable Double featureBagFraction, - @Nullable Integer numTopFeatureImportanceValues, - @Nullable String predictionFieldName, - @Nullable Double trainingPercent, - @Nullable Integer numTopClasses, - @Nullable Long randomizeSeed, - @Nullable ClassAssignmentObjective classAssignmentObjective, - @Nullable List featureProcessors, - @Nullable Double alpha, - @Nullable Double etaGrowthRatePerTree, - @Nullable Double softTreeDepthLimit, - @Nullable Double softTreeDepthTolerance, - @Nullable Double downsampleFactor, - @Nullable Integer maxOptimizationRoundsPerHyperparameter, - @Nullable Boolean earlyStoppingEnabled - ) { - this.dependentVariable = Objects.requireNonNull(dependentVariable); - this.lambda = lambda; - this.gamma = gamma; - this.eta = eta; - this.maxTrees = maxTrees; - this.featureBagFraction = featureBagFraction; - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - this.predictionFieldName = predictionFieldName; - this.trainingPercent = trainingPercent; - this.classAssignmentObjective = classAssignmentObjective; - this.numTopClasses = numTopClasses; - this.randomizeSeed = randomizeSeed; - this.featureProcessors = featureProcessors; - this.alpha = alpha; - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - this.softTreeDepthLimit = softTreeDepthLimit; - this.softTreeDepthTolerance = softTreeDepthTolerance; - this.downsampleFactor = downsampleFactor; - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - this.earlyStoppingEnabled = earlyStoppingEnabled; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - - public String getDependentVariable() { - return dependentVariable; - } - - public Double getLambda() { - return lambda; - } - - public Double getGamma() { - return gamma; - } - - public Double getEta() { - return eta; - } - - public Integer getMaxTrees() { - return maxTrees; - } - - public Double getFeatureBagFraction() { - return featureBagFraction; - } - - public Integer getNumTopFeatureImportanceValues() { - return numTopFeatureImportanceValues; - } - - public String getPredictionFieldName() { - return predictionFieldName; - } - - public Double getTrainingPercent() { - return trainingPercent; - } - - public Long getRandomizeSeed() { - return randomizeSeed; - } - - public ClassAssignmentObjective getClassAssignmentObjective() { - return classAssignmentObjective; - } - - public Integer getNumTopClasses() { - return numTopClasses; - } - - public List getFeatureProcessors() { - return featureProcessors; - } - - public Double getAlpha() { - return alpha; - } - - public Double getEtaGrowthRatePerTree() { - return etaGrowthRatePerTree; - } - - public Double getSoftTreeDepthLimit() { - return softTreeDepthLimit; - } - - public Double getSoftTreeDepthTolerance() { - return softTreeDepthTolerance; - } - - public Double getDownsampleFactor() { - return downsampleFactor; - } - - public Integer getMaxOptimizationRoundsPerHyperparameter() { - return maxOptimizationRoundsPerHyperparameter; - } - - public Boolean getEarlyStoppingEnable() { - return earlyStoppingEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DEPENDENT_VARIABLE.getPreferredName(), dependentVariable); - if (lambda != null) { - builder.field(LAMBDA.getPreferredName(), lambda); - } - if (gamma != null) { - builder.field(GAMMA.getPreferredName(), gamma); - } - if (eta != null) { - builder.field(ETA.getPreferredName(), eta); - } - if (maxTrees != null) { - builder.field(MAX_TREES.getPreferredName(), maxTrees); - } - if (featureBagFraction != null) { - builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); - } - if (numTopFeatureImportanceValues != null) { - builder.field(NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName(), numTopFeatureImportanceValues); - } - if (predictionFieldName != null) { - builder.field(PREDICTION_FIELD_NAME.getPreferredName(), predictionFieldName); - } - if (trainingPercent != null) { - builder.field(TRAINING_PERCENT.getPreferredName(), trainingPercent); - } - if (randomizeSeed != null) { - builder.field(RANDOMIZE_SEED.getPreferredName(), randomizeSeed); - } - if (classAssignmentObjective != null) { - builder.field(CLASS_ASSIGNMENT_OBJECTIVE.getPreferredName(), classAssignmentObjective); - } - if (numTopClasses != null) { - builder.field(NUM_TOP_CLASSES.getPreferredName(), numTopClasses); - } - if (featureProcessors != null) { - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, FEATURE_PROCESSORS.getPreferredName(), featureProcessors); - } - if (alpha != null) { - builder.field(ALPHA.getPreferredName(), alpha); - } - if (etaGrowthRatePerTree != null) { - builder.field(ETA_GROWTH_RATE_PER_TREE.getPreferredName(), etaGrowthRatePerTree); - } - if (softTreeDepthLimit != null) { - builder.field(SOFT_TREE_DEPTH_LIMIT.getPreferredName(), softTreeDepthLimit); - } - if (softTreeDepthTolerance != null) { - builder.field(SOFT_TREE_DEPTH_TOLERANCE.getPreferredName(), softTreeDepthTolerance); - } - if (downsampleFactor != null) { - builder.field(DOWNSAMPLE_FACTOR.getPreferredName(), downsampleFactor); - } - if (maxOptimizationRoundsPerHyperparameter != null) { - builder.field(MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER.getPreferredName(), maxOptimizationRoundsPerHyperparameter); - } - if (earlyStoppingEnabled != null) { - builder.field(EARLY_STOPPING_ENABLED.getPreferredName(), earlyStoppingEnabled); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash( - dependentVariable, - lambda, - gamma, - eta, - maxTrees, - featureBagFraction, - numTopFeatureImportanceValues, - predictionFieldName, - trainingPercent, - randomizeSeed, - numTopClasses, - classAssignmentObjective, - featureProcessors, - alpha, - etaGrowthRatePerTree, - softTreeDepthLimit, - softTreeDepthTolerance, - downsampleFactor, - maxOptimizationRoundsPerHyperparameter, - earlyStoppingEnabled - ); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Classification that = (Classification) o; - return Objects.equals(dependentVariable, that.dependentVariable) - && Objects.equals(lambda, that.lambda) - && Objects.equals(gamma, that.gamma) - && Objects.equals(eta, that.eta) - && Objects.equals(maxTrees, that.maxTrees) - && Objects.equals(featureBagFraction, that.featureBagFraction) - && Objects.equals(numTopFeatureImportanceValues, that.numTopFeatureImportanceValues) - && Objects.equals(predictionFieldName, that.predictionFieldName) - && Objects.equals(trainingPercent, that.trainingPercent) - && Objects.equals(randomizeSeed, that.randomizeSeed) - && Objects.equals(numTopClasses, that.numTopClasses) - && Objects.equals(classAssignmentObjective, that.classAssignmentObjective) - && Objects.equals(featureProcessors, that.featureProcessors) - && Objects.equals(alpha, that.alpha) - && Objects.equals(etaGrowthRatePerTree, that.etaGrowthRatePerTree) - && Objects.equals(softTreeDepthLimit, that.softTreeDepthLimit) - && Objects.equals(softTreeDepthTolerance, that.softTreeDepthTolerance) - && Objects.equals(downsampleFactor, that.downsampleFactor) - && Objects.equals(maxOptimizationRoundsPerHyperparameter, that.maxOptimizationRoundsPerHyperparameter) - && Objects.equals(earlyStoppingEnabled, that.earlyStoppingEnabled); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public enum ClassAssignmentObjective { - MAXIMIZE_ACCURACY, - MAXIMIZE_MINIMUM_RECALL; - - public static ClassAssignmentObjective fromString(String value) { - return ClassAssignmentObjective.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - public static class Builder { - private String dependentVariable; - private Double lambda; - private Double gamma; - private Double eta; - private Integer maxTrees; - private Double featureBagFraction; - private Integer numTopFeatureImportanceValues; - private String predictionFieldName; - private Double trainingPercent; - private Integer numTopClasses; - private Long randomizeSeed; - private ClassAssignmentObjective classAssignmentObjective; - private List featureProcessors; - private Double alpha; - private Double etaGrowthRatePerTree; - private Double softTreeDepthLimit; - private Double softTreeDepthTolerance; - private Double downsampleFactor; - private Integer maxOptimizationRoundsPerHyperparameter; - private Boolean earlyStoppingEnabled; - - private Builder(String dependentVariable) { - this.dependentVariable = Objects.requireNonNull(dependentVariable); - } - - public Builder setLambda(Double lambda) { - this.lambda = lambda; - return this; - } - - public Builder setGamma(Double gamma) { - this.gamma = gamma; - return this; - } - - public Builder setEta(Double eta) { - this.eta = eta; - return this; - } - - public Builder setMaxTrees(Integer maxTrees) { - this.maxTrees = maxTrees; - return this; - } - - public Builder setFeatureBagFraction(Double featureBagFraction) { - this.featureBagFraction = featureBagFraction; - return this; - } - - public Builder setNumTopFeatureImportanceValues(Integer numTopFeatureImportanceValues) { - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - return this; - } - - public Builder setPredictionFieldName(String predictionFieldName) { - this.predictionFieldName = predictionFieldName; - return this; - } - - public Builder setTrainingPercent(Double trainingPercent) { - this.trainingPercent = trainingPercent; - return this; - } - - public Builder setRandomizeSeed(Long randomizeSeed) { - this.randomizeSeed = randomizeSeed; - return this; - } - - public Builder setNumTopClasses(Integer numTopClasses) { - this.numTopClasses = numTopClasses; - return this; - } - - public Builder setClassAssignmentObjective(ClassAssignmentObjective classAssignmentObjective) { - this.classAssignmentObjective = classAssignmentObjective; - return this; - } - - public Builder setFeatureProcessors(List featureProcessors) { - this.featureProcessors = featureProcessors; - return this; - } - - public Builder setAlpha(Double alpha) { - this.alpha = alpha; - return this; - } - - public Builder setEtaGrowthRatePerTree(Double etaGrowthRatePerTree) { - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - return this; - } - - public Builder setSoftTreeDepthLimit(Double softTreeDepthLimit) { - this.softTreeDepthLimit = softTreeDepthLimit; - return this; - } - - public Builder setSoftTreeDepthTolerance(Double softTreeDepthTolerance) { - this.softTreeDepthTolerance = softTreeDepthTolerance; - return this; - } - - public Builder setDownsampleFactor(Double downsampleFactor) { - this.downsampleFactor = downsampleFactor; - return this; - } - - public Builder setMaxOptimizationRoundsPerHyperparameter(Integer maxOptimizationRoundsPerHyperparameter) { - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - return this; - } - - public Builder setEarlyStoppingEnabled(Boolean earlyStoppingEnabled) { - this.earlyStoppingEnabled = earlyStoppingEnabled; - return this; - } - - public Classification build() { - return new Classification( - dependentVariable, - lambda, - gamma, - eta, - maxTrees, - featureBagFraction, - numTopFeatureImportanceValues, - predictionFieldName, - trainingPercent, - numTopClasses, - randomizeSeed, - classAssignmentObjective, - featureProcessors, - alpha, - etaGrowthRatePerTree, - softTreeDepthLimit, - softTreeDepthTolerance, - downsampleFactor, - maxOptimizationRoundsPerHyperparameter, - earlyStoppingEnabled - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalysis.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalysis.java deleted file mode 100644 index e7c13da72880a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalysis.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.xcontent.ToXContentObject; - -public interface DataFrameAnalysis extends ToXContentObject { - - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java deleted file mode 100644 index 53c92d792fe20..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.Version; -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.search.fetch.subphase.FetchSourceContext; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class DataFrameAnalyticsConfig implements ToXContentObject { - - public static DataFrameAnalyticsConfig fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static Builder builder() { - return new Builder(); - } - - static final ParseField ID = new ParseField("id"); - static final ParseField DESCRIPTION = new ParseField("description"); - static final ParseField SOURCE = new ParseField("source"); - static final ParseField DEST = new ParseField("dest"); - static final ParseField ANALYSIS = new ParseField("analysis"); - static final ParseField ANALYZED_FIELDS = new ParseField("analyzed_fields"); - static final ParseField MODEL_MEMORY_LIMIT = new ParseField("model_memory_limit"); - static final ParseField CREATE_TIME = new ParseField("create_time"); - static final ParseField VERSION = new ParseField("version"); - static final ParseField ALLOW_LAZY_START = new ParseField("allow_lazy_start"); - static final ParseField MAX_NUM_THREADS = new ParseField("max_num_threads"); - - private static final ObjectParser PARSER = new ObjectParser<>("data_frame_analytics_config", true, Builder::new); - - static { - PARSER.declareString(Builder::setId, ID); - PARSER.declareString(Builder::setDescription, DESCRIPTION); - PARSER.declareObject(Builder::setSource, (p, c) -> DataFrameAnalyticsSource.fromXContent(p), SOURCE); - PARSER.declareObject(Builder::setDest, (p, c) -> DataFrameAnalyticsDest.fromXContent(p), DEST); - PARSER.declareObject(Builder::setAnalysis, (p, c) -> parseAnalysis(p), ANALYSIS); - PARSER.declareField( - Builder::setAnalyzedFields, - (p, c) -> FetchSourceContext.fromXContent(p), - ANALYZED_FIELDS, - ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING - ); - PARSER.declareField( - Builder::setModelMemoryLimit, - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()), - MODEL_MEMORY_LIMIT, - ValueType.VALUE - ); - PARSER.declareField( - Builder::setCreateTime, - p -> TimeUtil.parseTimeFieldToInstant(p, CREATE_TIME.getPreferredName()), - CREATE_TIME, - ValueType.VALUE - ); - PARSER.declareString(Builder::setVersion, Version::fromString, VERSION); - PARSER.declareBoolean(Builder::setAllowLazyStart, ALLOW_LAZY_START); - PARSER.declareInt(Builder::setMaxNumThreads, MAX_NUM_THREADS); - } - - private static DataFrameAnalysis parseAnalysis(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); - DataFrameAnalysis analysis = parser.namedObject(DataFrameAnalysis.class, parser.currentName(), true); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return analysis; - } - - private final String id; - private final String description; - private final DataFrameAnalyticsSource source; - private final DataFrameAnalyticsDest dest; - private final DataFrameAnalysis analysis; - private final FetchSourceContext analyzedFields; - private final ByteSizeValue modelMemoryLimit; - private final Instant createTime; - private final Version version; - private final Boolean allowLazyStart; - private final Integer maxNumThreads; - - private DataFrameAnalyticsConfig( - @Nullable String id, - @Nullable String description, - @Nullable DataFrameAnalyticsSource source, - @Nullable DataFrameAnalyticsDest dest, - @Nullable DataFrameAnalysis analysis, - @Nullable FetchSourceContext analyzedFields, - @Nullable ByteSizeValue modelMemoryLimit, - @Nullable Instant createTime, - @Nullable Version version, - @Nullable Boolean allowLazyStart, - @Nullable Integer maxNumThreads - ) { - this.id = id; - this.description = description; - this.source = source; - this.dest = dest; - this.analysis = analysis; - this.analyzedFields = analyzedFields; - this.modelMemoryLimit = modelMemoryLimit; - this.createTime = createTime == null ? null : Instant.ofEpochMilli(createTime.toEpochMilli()); - ; - this.version = version; - this.allowLazyStart = allowLazyStart; - this.maxNumThreads = maxNumThreads; - } - - public String getId() { - return id; - } - - public String getDescription() { - return description; - } - - public DataFrameAnalyticsSource getSource() { - return source; - } - - public DataFrameAnalyticsDest getDest() { - return dest; - } - - public DataFrameAnalysis getAnalysis() { - return analysis; - } - - public FetchSourceContext getAnalyzedFields() { - return analyzedFields; - } - - public ByteSizeValue getModelMemoryLimit() { - return modelMemoryLimit; - } - - public Instant getCreateTime() { - return createTime; - } - - public Version getVersion() { - return version; - } - - public Boolean getAllowLazyStart() { - return allowLazyStart; - } - - public Integer getMaxNumThreads() { - return maxNumThreads; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (id != null) { - builder.field(ID.getPreferredName(), id); - } - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - if (source != null) { - builder.field(SOURCE.getPreferredName(), source); - } - if (dest != null) { - builder.field(DEST.getPreferredName(), dest); - } - if (analysis != null) { - builder.startObject(ANALYSIS.getPreferredName()).field(analysis.getName(), analysis).endObject(); - } - if (analyzedFields != null) { - builder.field(ANALYZED_FIELDS.getPreferredName(), analyzedFields); - } - if (modelMemoryLimit != null) { - builder.field(MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit.getStringRep()); - } - if (createTime != null) { - builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli()); - } - if (version != null) { - builder.field(VERSION.getPreferredName(), version); - } - if (allowLazyStart != null) { - builder.field(ALLOW_LAZY_START.getPreferredName(), allowLazyStart); - } - if (maxNumThreads != null) { - builder.field(MAX_NUM_THREADS.getPreferredName(), maxNumThreads); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - DataFrameAnalyticsConfig other = (DataFrameAnalyticsConfig) o; - return Objects.equals(id, other.id) - && Objects.equals(description, other.description) - && Objects.equals(source, other.source) - && Objects.equals(dest, other.dest) - && Objects.equals(analysis, other.analysis) - && Objects.equals(analyzedFields, other.analyzedFields) - && Objects.equals(modelMemoryLimit, other.modelMemoryLimit) - && Objects.equals(createTime, other.createTime) - && Objects.equals(version, other.version) - && Objects.equals(allowLazyStart, other.allowLazyStart) - && Objects.equals(maxNumThreads, other.maxNumThreads); - } - - @Override - public int hashCode() { - return Objects.hash( - id, - description, - source, - dest, - analysis, - analyzedFields, - modelMemoryLimit, - createTime, - version, - allowLazyStart, - maxNumThreads - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static class Builder { - - private String id; - private String description; - private DataFrameAnalyticsSource source; - private DataFrameAnalyticsDest dest; - private DataFrameAnalysis analysis; - private FetchSourceContext analyzedFields; - private ByteSizeValue modelMemoryLimit; - private Instant createTime; - private Version version; - private Boolean allowLazyStart; - private Integer maxNumThreads; - - private Builder() {} - - public Builder setId(String id) { - this.id = Objects.requireNonNull(id); - return this; - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setSource(DataFrameAnalyticsSource source) { - this.source = Objects.requireNonNull(source); - return this; - } - - public Builder setDest(DataFrameAnalyticsDest dest) { - this.dest = Objects.requireNonNull(dest); - return this; - } - - public Builder setAnalysis(DataFrameAnalysis analysis) { - this.analysis = Objects.requireNonNull(analysis); - return this; - } - - public Builder setAnalyzedFields(FetchSourceContext fields) { - this.analyzedFields = fields; - return this; - } - - public Builder setModelMemoryLimit(ByteSizeValue modelMemoryLimit) { - this.modelMemoryLimit = modelMemoryLimit; - return this; - } - - Builder setCreateTime(Instant createTime) { - this.createTime = createTime; - return this; - } - - Builder setVersion(Version version) { - this.version = version; - return this; - } - - public Builder setAllowLazyStart(Boolean allowLazyStart) { - this.allowLazyStart = allowLazyStart; - return this; - } - - public Builder setMaxNumThreads(Integer maxNumThreads) { - this.maxNumThreads = maxNumThreads; - return this; - } - - public DataFrameAnalyticsConfig build() { - return new DataFrameAnalyticsConfig( - id, - description, - source, - dest, - analysis, - analyzedFields, - modelMemoryLimit, - createTime, - version, - allowLazyStart, - maxNumThreads - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigUpdate.java deleted file mode 100644 index 4dccee1019ce1..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigUpdate.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ObjectParser.ValueType.VALUE; - -public class DataFrameAnalyticsConfigUpdate implements ToXContentObject { - - public static DataFrameAnalyticsConfigUpdate fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static Builder builder() { - return new Builder(); - } - - public static final ObjectParser PARSER = new ObjectParser<>("data_frame_analytics_config_update", true, Builder::new); - - static { - PARSER.declareString(Builder::setId, DataFrameAnalyticsConfig.ID); - PARSER.declareStringOrNull(Builder::setDescription, DataFrameAnalyticsConfig.DESCRIPTION); - PARSER.declareField( - Builder::setModelMemoryLimit, - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), DataFrameAnalyticsConfig.MODEL_MEMORY_LIMIT.getPreferredName()), - DataFrameAnalyticsConfig.MODEL_MEMORY_LIMIT, - VALUE - ); - PARSER.declareBoolean(Builder::setAllowLazyStart, DataFrameAnalyticsConfig.ALLOW_LAZY_START); - PARSER.declareInt(Builder::setMaxNumThreads, DataFrameAnalyticsConfig.MAX_NUM_THREADS); - } - - private final String id; - private final String description; - private final ByteSizeValue modelMemoryLimit; - private final Boolean allowLazyStart; - private final Integer maxNumThreads; - - private DataFrameAnalyticsConfigUpdate( - String id, - @Nullable String description, - @Nullable ByteSizeValue modelMemoryLimit, - @Nullable Boolean allowLazyStart, - @Nullable Integer maxNumThreads - ) { - this.id = id; - this.description = description; - this.modelMemoryLimit = modelMemoryLimit; - this.allowLazyStart = allowLazyStart; - this.maxNumThreads = maxNumThreads; - } - - public String getId() { - return id; - } - - public String getDescription() { - return description; - } - - public ByteSizeValue getModelMemoryLimit() { - return modelMemoryLimit; - } - - public Boolean isAllowLazyStart() { - return allowLazyStart; - } - - public Integer getMaxNumThreads() { - return maxNumThreads; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DataFrameAnalyticsConfig.ID.getPreferredName(), id); - if (description != null) { - builder.field(DataFrameAnalyticsConfig.DESCRIPTION.getPreferredName(), description); - } - if (modelMemoryLimit != null) { - builder.field(DataFrameAnalyticsConfig.MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit.getStringRep()); - } - if (allowLazyStart != null) { - builder.field(DataFrameAnalyticsConfig.ALLOW_LAZY_START.getPreferredName(), allowLazyStart); - } - if (maxNumThreads != null) { - builder.field(DataFrameAnalyticsConfig.MAX_NUM_THREADS.getPreferredName(), maxNumThreads); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof DataFrameAnalyticsConfigUpdate == false) { - return false; - } - - DataFrameAnalyticsConfigUpdate that = (DataFrameAnalyticsConfigUpdate) other; - - return Objects.equals(this.id, that.id) - && Objects.equals(this.description, that.description) - && Objects.equals(this.modelMemoryLimit, that.modelMemoryLimit) - && Objects.equals(this.allowLazyStart, that.allowLazyStart) - && Objects.equals(this.maxNumThreads, that.maxNumThreads); - } - - @Override - public int hashCode() { - return Objects.hash(id, description, modelMemoryLimit, allowLazyStart, maxNumThreads); - } - - public static class Builder { - - private String id; - private String description; - private ByteSizeValue modelMemoryLimit; - private Boolean allowLazyStart; - private Integer maxNumThreads; - - private Builder() {} - - public String getId() { - return id; - } - - public Builder setId(String id) { - this.id = id; - return this; - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setModelMemoryLimit(ByteSizeValue modelMemoryLimit) { - this.modelMemoryLimit = modelMemoryLimit; - return this; - } - - public Builder setAllowLazyStart(Boolean allowLazyStart) { - this.allowLazyStart = allowLazyStart; - return this; - } - - public Builder setMaxNumThreads(Integer maxNumThreads) { - this.maxNumThreads = maxNumThreads; - return this; - } - - public DataFrameAnalyticsConfigUpdate build() { - return new DataFrameAnalyticsConfigUpdate(id, description, modelMemoryLimit, allowLazyStart, maxNumThreads); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsDest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsDest.java deleted file mode 100644 index fe576411f131b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsDest.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static java.util.Objects.requireNonNull; - -public class DataFrameAnalyticsDest implements ToXContentObject { - - public static DataFrameAnalyticsDest fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static Builder builder() { - return new Builder(); - } - - private static final ParseField INDEX = new ParseField("index"); - private static final ParseField RESULTS_FIELD = new ParseField("results_field"); - - private static final ObjectParser PARSER = new ObjectParser<>("data_frame_analytics_dest", true, Builder::new); - - static { - PARSER.declareString(Builder::setIndex, INDEX); - PARSER.declareString(Builder::setResultsField, RESULTS_FIELD); - } - - private final String index; - private final String resultsField; - - private DataFrameAnalyticsDest(String index, @Nullable String resultsField) { - this.index = requireNonNull(index); - this.resultsField = resultsField; - } - - public String getIndex() { - return index; - } - - public String getResultsField() { - return resultsField; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(INDEX.getPreferredName(), index); - if (resultsField != null) { - builder.field(RESULTS_FIELD.getPreferredName(), resultsField); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - DataFrameAnalyticsDest other = (DataFrameAnalyticsDest) o; - return Objects.equals(index, other.index) && Objects.equals(resultsField, other.resultsField); - } - - @Override - public int hashCode() { - return Objects.hash(index, resultsField); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static class Builder { - - private String index; - private String resultsField; - - private Builder() {} - - public Builder setIndex(String index) { - this.index = index; - return this; - } - - public Builder setResultsField(String resultsField) { - this.resultsField = resultsField; - return this; - } - - public DataFrameAnalyticsDest build() { - return new DataFrameAnalyticsDest(index, resultsField); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java deleted file mode 100644 index da9cf7aa15b44..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.FetchSourceContext; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -public class DataFrameAnalyticsSource implements ToXContentObject { - - public static DataFrameAnalyticsSource fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static Builder builder() { - return new Builder(); - } - - private static final ParseField INDEX = new ParseField("index"); - private static final ParseField QUERY = new ParseField("query"); - public static final ParseField _SOURCE = new ParseField("_source"); - - private static final ObjectParser PARSER = new ObjectParser<>("data_frame_analytics_source", true, Builder::new); - - static { - PARSER.declareStringArray(Builder::setIndex, INDEX); - PARSER.declareObject(Builder::setQueryConfig, (p, c) -> QueryConfig.fromXContent(p), QUERY); - PARSER.declareField( - Builder::setSourceFiltering, - (p, c) -> FetchSourceContext.fromXContent(p), - _SOURCE, - ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING - ); - PARSER.declareObject(Builder::setRuntimeMappings, (p, c) -> p.map(), SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD); - } - - private final String[] index; - private final QueryConfig queryConfig; - private final FetchSourceContext sourceFiltering; - private final Map runtimeMappings; - - private DataFrameAnalyticsSource( - String[] index, - @Nullable QueryConfig queryConfig, - @Nullable FetchSourceContext sourceFiltering, - @Nullable Map runtimeMappings - ) { - this.index = Objects.requireNonNull(index); - this.queryConfig = queryConfig; - this.sourceFiltering = sourceFiltering; - this.runtimeMappings = runtimeMappings; - } - - public String[] getIndex() { - return index; - } - - public QueryConfig getQueryConfig() { - return queryConfig; - } - - public FetchSourceContext getSourceFiltering() { - return sourceFiltering; - } - - public Map getRuntimeMappings() { - return runtimeMappings; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(INDEX.getPreferredName(), index); - if (queryConfig != null) { - builder.field(QUERY.getPreferredName(), queryConfig.getQuery()); - } - if (sourceFiltering != null) { - builder.field(_SOURCE.getPreferredName(), sourceFiltering); - } - if (runtimeMappings != null) { - builder.field(SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD.getPreferredName(), runtimeMappings); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - DataFrameAnalyticsSource other = (DataFrameAnalyticsSource) o; - return Arrays.equals(index, other.index) - && Objects.equals(queryConfig, other.queryConfig) - && Objects.equals(sourceFiltering, other.sourceFiltering) - && Objects.equals(runtimeMappings, other.runtimeMappings); - } - - @Override - public int hashCode() { - return Objects.hash(Arrays.asList(index), queryConfig, sourceFiltering, runtimeMappings); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static class Builder { - - private String[] index; - private QueryConfig queryConfig; - private FetchSourceContext sourceFiltering; - private Map runtimeMappings; - - private Builder() {} - - public Builder setIndex(String... index) { - this.index = index; - return this; - } - - public Builder setIndex(List index) { - this.index = index.toArray(new String[0]); - return this; - } - - public Builder setQueryConfig(QueryConfig queryConfig) { - this.queryConfig = queryConfig; - return this; - } - - public Builder setSourceFiltering(FetchSourceContext sourceFiltering) { - this.sourceFiltering = sourceFiltering; - return this; - } - - public Builder setRuntimeMappings(Map runtimeMappings) { - this.runtimeMappings = runtimeMappings; - return this; - } - - public DataFrameAnalyticsSource build() { - return new DataFrameAnalyticsSource(index, queryConfig, sourceFiltering, runtimeMappings); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsState.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsState.java deleted file mode 100644 index 157ebe614f761..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsState.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import java.util.Locale; - -public enum DataFrameAnalyticsState { - STARTED, - REINDEXING, - ANALYZING, - STOPPING, - STOPPED, - STARTING, - FAILED; - - public static DataFrameAnalyticsState fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - public String value() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java deleted file mode 100644 index 75eb216aed402..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.client.ml.NodeAttributes; -import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats; -import org.elasticsearch.client.ml.dataframe.stats.common.DataCounts; -import org.elasticsearch.client.ml.dataframe.stats.common.MemoryUsage; -import org.elasticsearch.common.inject.internal.ToStringBuilder; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class DataFrameAnalyticsStats { - - public static DataFrameAnalyticsStats fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - static final ParseField ID = new ParseField("id"); - static final ParseField STATE = new ParseField("state"); - static final ParseField FAILURE_REASON = new ParseField("failure_reason"); - static final ParseField PROGRESS = new ParseField("progress"); - static final ParseField DATA_COUNTS = new ParseField("data_counts"); - static final ParseField MEMORY_USAGE = new ParseField("memory_usage"); - static final ParseField ANALYSIS_STATS = new ParseField("analysis_stats"); - static final ParseField NODE = new ParseField("node"); - static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "data_frame_analytics_stats", - true, - args -> new DataFrameAnalyticsStats( - (String) args[0], - (DataFrameAnalyticsState) args[1], - (String) args[2], - (List) args[3], - (DataCounts) args[4], - (MemoryUsage) args[5], - (AnalysisStats) args[6], - (NodeAttributes) args[7], - (String) args[8] - ) - ); - - static { - PARSER.declareString(constructorArg(), ID); - PARSER.declareString(constructorArg(), DataFrameAnalyticsState::fromString, STATE); - PARSER.declareString(optionalConstructorArg(), FAILURE_REASON); - PARSER.declareObjectArray(optionalConstructorArg(), PhaseProgress.PARSER, PROGRESS); - PARSER.declareObject(optionalConstructorArg(), DataCounts.PARSER, DATA_COUNTS); - PARSER.declareObject(optionalConstructorArg(), MemoryUsage.PARSER, MEMORY_USAGE); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseAnalysisStats(p), ANALYSIS_STATS); - PARSER.declareObject(optionalConstructorArg(), NodeAttributes.PARSER, NODE); - PARSER.declareString(optionalConstructorArg(), ASSIGNMENT_EXPLANATION); - } - - private static AnalysisStats parseAnalysisStats(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); - AnalysisStats analysisStats = parser.namedObject(AnalysisStats.class, parser.currentName(), true); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return analysisStats; - } - - private final String id; - private final DataFrameAnalyticsState state; - private final String failureReason; - private final List progress; - private final DataCounts dataCounts; - private final MemoryUsage memoryUsage; - private final AnalysisStats analysisStats; - private final NodeAttributes node; - private final String assignmentExplanation; - - public DataFrameAnalyticsStats( - String id, - DataFrameAnalyticsState state, - @Nullable String failureReason, - @Nullable List progress, - @Nullable DataCounts dataCounts, - @Nullable MemoryUsage memoryUsage, - @Nullable AnalysisStats analysisStats, - @Nullable NodeAttributes node, - @Nullable String assignmentExplanation - ) { - this.id = id; - this.state = state; - this.failureReason = failureReason; - this.progress = progress; - this.dataCounts = dataCounts; - this.memoryUsage = memoryUsage; - this.analysisStats = analysisStats; - this.node = node; - this.assignmentExplanation = assignmentExplanation; - } - - public String getId() { - return id; - } - - public DataFrameAnalyticsState getState() { - return state; - } - - public String getFailureReason() { - return failureReason; - } - - public List getProgress() { - return progress; - } - - @Nullable - public DataCounts getDataCounts() { - return dataCounts; - } - - @Nullable - public MemoryUsage getMemoryUsage() { - return memoryUsage; - } - - @Nullable - public AnalysisStats getAnalysisStats() { - return analysisStats; - } - - public NodeAttributes getNode() { - return node; - } - - public String getAssignmentExplanation() { - return assignmentExplanation; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - DataFrameAnalyticsStats other = (DataFrameAnalyticsStats) o; - return Objects.equals(id, other.id) - && Objects.equals(state, other.state) - && Objects.equals(failureReason, other.failureReason) - && Objects.equals(progress, other.progress) - && Objects.equals(dataCounts, other.dataCounts) - && Objects.equals(memoryUsage, other.memoryUsage) - && Objects.equals(analysisStats, other.analysisStats) - && Objects.equals(node, other.node) - && Objects.equals(assignmentExplanation, other.assignmentExplanation); - } - - @Override - public int hashCode() { - return Objects.hash(id, state, failureReason, progress, dataCounts, memoryUsage, analysisStats, node, assignmentExplanation); - } - - @Override - public String toString() { - return new ToStringBuilder(getClass()).add("id", id) - .add("state", state) - .add("failureReason", failureReason) - .add("progress", progress) - .add("dataCounts", dataCounts) - .add("memoryUsage", memoryUsage) - .add("analysisStats", analysisStats) - .add("node", node) - .add("assignmentExplanation", assignmentExplanation) - .toString(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/MlDataFrameAnalysisNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/MlDataFrameAnalysisNamedXContentProvider.java deleted file mode 100644 index 562409b53df8d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/MlDataFrameAnalysisNamedXContentProvider.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.xcontent.NamedXContentRegistry; - -import java.util.Arrays; -import java.util.List; - -public class MlDataFrameAnalysisNamedXContentProvider implements NamedXContentProvider { - - @Override - public List getNamedXContentParsers() { - return Arrays.asList( - new NamedXContentRegistry.Entry(DataFrameAnalysis.class, OutlierDetection.NAME, (p, c) -> OutlierDetection.fromXContent(p)), - new NamedXContentRegistry.Entry(DataFrameAnalysis.class, Regression.NAME, (p, c) -> Regression.fromXContent(p)), - new NamedXContentRegistry.Entry(DataFrameAnalysis.class, Classification.NAME, (p, c) -> Classification.fromXContent(p)) - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java deleted file mode 100644 index d514de3558db6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -public class OutlierDetection implements DataFrameAnalysis { - - public static OutlierDetection fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static OutlierDetection createDefault() { - return builder().build(); - } - - public static Builder builder() { - return new Builder(); - } - - public static final ParseField NAME = new ParseField("outlier_detection"); - static final ParseField N_NEIGHBORS = new ParseField("n_neighbors"); - static final ParseField METHOD = new ParseField("method"); - public static final ParseField FEATURE_INFLUENCE_THRESHOLD = new ParseField("feature_influence_threshold"); - static final ParseField COMPUTE_FEATURE_INFLUENCE = new ParseField("compute_feature_influence"); - static final ParseField OUTLIER_FRACTION = new ParseField("outlier_fraction"); - static final ParseField STANDARDIZATION_ENABLED = new ParseField("standardization_enabled"); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME.getPreferredName(), true, Builder::new); - - static { - PARSER.declareInt(Builder::setNNeighbors, N_NEIGHBORS); - PARSER.declareString(Builder::setMethod, Method::fromString, METHOD); - PARSER.declareDouble(Builder::setFeatureInfluenceThreshold, FEATURE_INFLUENCE_THRESHOLD); - PARSER.declareBoolean(Builder::setComputeFeatureInfluence, COMPUTE_FEATURE_INFLUENCE); - PARSER.declareDouble(Builder::setOutlierFraction, OUTLIER_FRACTION); - PARSER.declareBoolean(Builder::setStandardizationEnabled, STANDARDIZATION_ENABLED); - } - - /** - * The number of neighbors. Leave unspecified for dynamic detection. - */ - private final Integer nNeighbors; - - /** - * The method. Leave unspecified for a dynamic mixture of methods. - */ - private final Method method; - - /** - * The min outlier score required to calculate feature influence. Defaults to 0.1. - */ - private final Double featureInfluenceThreshold; - - /** - * Whether to compute feature influence or not. Defaults to true. - */ - private final Boolean computeFeatureInfluence; - - /** - * The proportion of data assumed to be outlying prior to outlier detection. Defaults to 0.05. - */ - private final Double outlierFraction; - - /** - * Whether to perform standardization. - */ - private final Boolean standardizationEnabled; - - private OutlierDetection( - Integer nNeighbors, - Method method, - Double featureInfluenceThreshold, - Boolean computeFeatureInfluence, - Double outlierFraction, - Boolean standardizationEnabled - ) { - this.nNeighbors = nNeighbors; - this.method = method; - this.featureInfluenceThreshold = featureInfluenceThreshold; - this.computeFeatureInfluence = computeFeatureInfluence; - this.outlierFraction = outlierFraction; - this.standardizationEnabled = standardizationEnabled; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - - public Integer getNNeighbors() { - return nNeighbors; - } - - public Method getMethod() { - return method; - } - - public Double getFeatureInfluenceThreshold() { - return featureInfluenceThreshold; - } - - public Boolean getComputeFeatureInfluence() { - return computeFeatureInfluence; - } - - public Double getOutlierFraction() { - return outlierFraction; - } - - public Boolean getStandardizationEnabled() { - return standardizationEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (nNeighbors != null) { - builder.field(N_NEIGHBORS.getPreferredName(), nNeighbors); - } - if (method != null) { - builder.field(METHOD.getPreferredName(), method); - } - if (featureInfluenceThreshold != null) { - builder.field(FEATURE_INFLUENCE_THRESHOLD.getPreferredName(), featureInfluenceThreshold); - } - if (computeFeatureInfluence != null) { - builder.field(COMPUTE_FEATURE_INFLUENCE.getPreferredName(), computeFeatureInfluence); - } - if (outlierFraction != null) { - builder.field(OUTLIER_FRACTION.getPreferredName(), outlierFraction); - } - if (standardizationEnabled != null) { - builder.field(STANDARDIZATION_ENABLED.getPreferredName(), standardizationEnabled); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - OutlierDetection other = (OutlierDetection) o; - return Objects.equals(nNeighbors, other.nNeighbors) - && Objects.equals(method, other.method) - && Objects.equals(featureInfluenceThreshold, other.featureInfluenceThreshold) - && Objects.equals(computeFeatureInfluence, other.computeFeatureInfluence) - && Objects.equals(outlierFraction, other.outlierFraction) - && Objects.equals(standardizationEnabled, other.standardizationEnabled); - } - - @Override - public int hashCode() { - return Objects.hash( - nNeighbors, - method, - featureInfluenceThreshold, - computeFeatureInfluence, - outlierFraction, - standardizationEnabled - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public enum Method { - LOF, - LDOF, - DISTANCE_KTH_NN, - DISTANCE_KNN; - - public static Method fromString(String value) { - return Method.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - public static class Builder { - - private Integer nNeighbors; - private Method method; - private Double featureInfluenceThreshold; - private Boolean computeFeatureInfluence; - private Double outlierFraction; - private Boolean standardizationEnabled; - - private Builder() {} - - public Builder setNNeighbors(Integer nNeighborsValue) { - this.nNeighbors = nNeighborsValue; - return this; - } - - public Builder setMethod(Method method) { - this.method = method; - return this; - } - - public Builder setFeatureInfluenceThreshold(Double featureInfluenceThreshold) { - this.featureInfluenceThreshold = featureInfluenceThreshold; - return this; - } - - public Builder setComputeFeatureInfluence(Boolean computeFeatureInfluence) { - this.computeFeatureInfluence = computeFeatureInfluence; - return this; - } - - public Builder setOutlierFraction(Double outlierFraction) { - this.outlierFraction = outlierFraction; - return this; - } - - public Builder setStandardizationEnabled(Boolean standardizationEnabled) { - this.standardizationEnabled = standardizationEnabled; - return this; - } - - public OutlierDetection build() { - return new OutlierDetection( - nNeighbors, - method, - featureInfluenceThreshold, - computeFeatureInfluence, - outlierFraction, - standardizationEnabled - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/PhaseProgress.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/PhaseProgress.java deleted file mode 100644 index f8d629586d2e7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/PhaseProgress.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.inject.internal.ToStringBuilder; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A class that describes a phase and its progress as a percentage - */ -public class PhaseProgress implements ToXContentObject { - - static final ParseField PHASE = new ParseField("phase"); - static final ParseField PROGRESS_PERCENT = new ParseField("progress_percent"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "phase_progress", - true, - a -> new PhaseProgress((String) a[0], (int) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), PHASE); - PARSER.declareInt(ConstructingObjectParser.constructorArg(), PROGRESS_PERCENT); - } - - private final String phase; - private final int progressPercent; - - public PhaseProgress(String phase, int progressPercent) { - this.phase = Objects.requireNonNull(phase); - this.progressPercent = progressPercent; - } - - public String getPhase() { - return phase; - } - - public int getProgressPercent() { - return progressPercent; - } - - @Override - public int hashCode() { - return Objects.hash(phase, progressPercent); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PhaseProgress that = (PhaseProgress) o; - return Objects.equals(phase, that.phase) && progressPercent == that.progressPercent; - } - - @Override - public String toString() { - return new ToStringBuilder(getClass()).add(PHASE.getPreferredName(), phase) - .add(PROGRESS_PERCENT.getPreferredName(), progressPercent) - .toString(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(PhaseProgress.PHASE.getPreferredName(), phase); - builder.field(PhaseProgress.PROGRESS_PERCENT.getPreferredName(), progressPercent); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/QueryConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/QueryConfig.java deleted file mode 100644 index 73dd0d82b2221..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/QueryConfig.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.index.query.AbstractQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static java.util.Objects.requireNonNull; - -/** - * Object for encapsulating the desired Query for a DataFrameAnalysis - */ -public class QueryConfig implements ToXContentObject { - - public static QueryConfig fromXContent(XContentParser parser) throws IOException { - QueryBuilder query = AbstractQueryBuilder.parseInnerQueryBuilder(parser); - return new QueryConfig(query); - } - - private final QueryBuilder query; - - public QueryConfig(QueryBuilder query) { - this.query = requireNonNull(query); - } - - public QueryConfig(QueryConfig queryConfig) { - this(requireNonNull(queryConfig).query); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - query.toXContent(builder, params); - return builder; - } - - public QueryBuilder getQuery() { - return query; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - QueryConfig other = (QueryConfig) o; - return Objects.equals(query, other.query); - } - - @Override - public int hashCode() { - return Objects.hash(query); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java deleted file mode 100644 index 04f61d09305c2..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java +++ /dev/null @@ -1,549 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper; -import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class Regression implements DataFrameAnalysis { - - public static Regression fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static Builder builder(String dependentVariable) { - return new Builder(dependentVariable); - } - - public static final ParseField NAME = new ParseField("regression"); - - static final ParseField DEPENDENT_VARIABLE = new ParseField("dependent_variable"); - static final ParseField LAMBDA = new ParseField("lambda"); - static final ParseField GAMMA = new ParseField("gamma"); - static final ParseField ETA = new ParseField("eta"); - static final ParseField MAX_TREES = new ParseField("max_trees"); - static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); - static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); - static final ParseField PREDICTION_FIELD_NAME = new ParseField("prediction_field_name"); - static final ParseField TRAINING_PERCENT = new ParseField("training_percent"); - static final ParseField RANDOMIZE_SEED = new ParseField("randomize_seed"); - static final ParseField LOSS_FUNCTION = new ParseField("loss_function"); - static final ParseField LOSS_FUNCTION_PARAMETER = new ParseField("loss_function_parameter"); - static final ParseField FEATURE_PROCESSORS = new ParseField("feature_processors"); - static final ParseField ALPHA = new ParseField("alpha"); - static final ParseField ETA_GROWTH_RATE_PER_TREE = new ParseField("eta_growth_rate_per_tree"); - static final ParseField SOFT_TREE_DEPTH_LIMIT = new ParseField("soft_tree_depth_limit"); - static final ParseField SOFT_TREE_DEPTH_TOLERANCE = new ParseField("soft_tree_depth_tolerance"); - static final ParseField DOWNSAMPLE_FACTOR = new ParseField("downsample_factor"); - static final ParseField MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER = new ParseField("max_optimization_rounds_per_hyperparameter"); - static final ParseField EARLY_STOPPING_ENABLED = new ParseField("early_stopping_enabled"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new Regression( - (String) a[0], - (Double) a[1], - (Double) a[2], - (Double) a[3], - (Integer) a[4], - (Double) a[5], - (Integer) a[6], - (String) a[7], - (Double) a[8], - (Long) a[9], - (LossFunction) a[10], - (Double) a[11], - (List) a[12], - (Double) a[13], - (Double) a[14], - (Double) a[15], - (Double) a[16], - (Double) a[17], - (Integer) a[18], - (Boolean) a[19] - ) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DEPENDENT_VARIABLE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), LAMBDA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), GAMMA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_TREES); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), FEATURE_BAG_FRACTION); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), NUM_TOP_FEATURE_IMPORTANCE_VALUES); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), PREDICTION_FIELD_NAME); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), TRAINING_PERCENT); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), RANDOMIZE_SEED); - PARSER.declareString(optionalConstructorArg(), LossFunction::fromString, LOSS_FUNCTION); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), LOSS_FUNCTION_PARAMETER); - PARSER.declareNamedObjects( - ConstructingObjectParser.optionalConstructorArg(), - (p, c, n) -> p.namedObject(PreProcessor.class, n, c), - (regression) -> {}, - FEATURE_PROCESSORS - ); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ALPHA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA_GROWTH_RATE_PER_TREE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), SOFT_TREE_DEPTH_LIMIT); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), SOFT_TREE_DEPTH_TOLERANCE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), DOWNSAMPLE_FACTOR); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), EARLY_STOPPING_ENABLED); - } - - private final String dependentVariable; - private final Double lambda; - private final Double gamma; - private final Double eta; - private final Integer maxTrees; - private final Double featureBagFraction; - private final Integer numTopFeatureImportanceValues; - private final String predictionFieldName; - private final Double trainingPercent; - private final Long randomizeSeed; - private final LossFunction lossFunction; - private final Double lossFunctionParameter; - private final List featureProcessors; - private final Double alpha; - private final Double etaGrowthRatePerTree; - private final Double softTreeDepthLimit; - private final Double softTreeDepthTolerance; - private final Double downsampleFactor; - private final Integer maxOptimizationRoundsPerHyperparameter; - private final Boolean earlyStoppingEnabled; - - private Regression( - String dependentVariable, - @Nullable Double lambda, - @Nullable Double gamma, - @Nullable Double eta, - @Nullable Integer maxTrees, - @Nullable Double featureBagFraction, - @Nullable Integer numTopFeatureImportanceValues, - @Nullable String predictionFieldName, - @Nullable Double trainingPercent, - @Nullable Long randomizeSeed, - @Nullable LossFunction lossFunction, - @Nullable Double lossFunctionParameter, - @Nullable List featureProcessors, - @Nullable Double alpha, - @Nullable Double etaGrowthRatePerTree, - @Nullable Double softTreeDepthLimit, - @Nullable Double softTreeDepthTolerance, - @Nullable Double downsampleFactor, - @Nullable Integer maxOptimizationRoundsPerHyperparameter, - @Nullable Boolean earlyStoppingEnabled - ) { - this.dependentVariable = Objects.requireNonNull(dependentVariable); - this.lambda = lambda; - this.gamma = gamma; - this.eta = eta; - this.maxTrees = maxTrees; - this.featureBagFraction = featureBagFraction; - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - this.predictionFieldName = predictionFieldName; - this.trainingPercent = trainingPercent; - this.randomizeSeed = randomizeSeed; - this.lossFunction = lossFunction; - this.lossFunctionParameter = lossFunctionParameter; - this.featureProcessors = featureProcessors; - this.alpha = alpha; - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - this.softTreeDepthLimit = softTreeDepthLimit; - this.softTreeDepthTolerance = softTreeDepthTolerance; - this.downsampleFactor = downsampleFactor; - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - this.earlyStoppingEnabled = earlyStoppingEnabled; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - - public String getDependentVariable() { - return dependentVariable; - } - - public Double getLambda() { - return lambda; - } - - public Double getGamma() { - return gamma; - } - - public Double getEta() { - return eta; - } - - public Integer getMaxTrees() { - return maxTrees; - } - - public Double getFeatureBagFraction() { - return featureBagFraction; - } - - public Integer getNumTopFeatureImportanceValues() { - return numTopFeatureImportanceValues; - } - - public String getPredictionFieldName() { - return predictionFieldName; - } - - public Double getTrainingPercent() { - return trainingPercent; - } - - public Long getRandomizeSeed() { - return randomizeSeed; - } - - public LossFunction getLossFunction() { - return lossFunction; - } - - public Double getLossFunctionParameter() { - return lossFunctionParameter; - } - - public List getFeatureProcessors() { - return featureProcessors; - } - - public Double getAlpha() { - return alpha; - } - - public Double getEtaGrowthRatePerTree() { - return etaGrowthRatePerTree; - } - - public Double getSoftTreeDepthLimit() { - return softTreeDepthLimit; - } - - public Double getSoftTreeDepthTolerance() { - return softTreeDepthTolerance; - } - - public Double getDownsampleFactor() { - return downsampleFactor; - } - - public Integer getMaxOptimizationRoundsPerHyperparameter() { - return maxOptimizationRoundsPerHyperparameter; - } - - public Boolean getEarlyStoppingEnabled() { - return earlyStoppingEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DEPENDENT_VARIABLE.getPreferredName(), dependentVariable); - if (lambda != null) { - builder.field(LAMBDA.getPreferredName(), lambda); - } - if (gamma != null) { - builder.field(GAMMA.getPreferredName(), gamma); - } - if (eta != null) { - builder.field(ETA.getPreferredName(), eta); - } - if (maxTrees != null) { - builder.field(MAX_TREES.getPreferredName(), maxTrees); - } - if (featureBagFraction != null) { - builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); - } - if (numTopFeatureImportanceValues != null) { - builder.field(NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName(), numTopFeatureImportanceValues); - } - if (predictionFieldName != null) { - builder.field(PREDICTION_FIELD_NAME.getPreferredName(), predictionFieldName); - } - if (trainingPercent != null) { - builder.field(TRAINING_PERCENT.getPreferredName(), trainingPercent); - } - if (randomizeSeed != null) { - builder.field(RANDOMIZE_SEED.getPreferredName(), randomizeSeed); - } - if (lossFunction != null) { - builder.field(LOSS_FUNCTION.getPreferredName(), lossFunction); - } - if (lossFunctionParameter != null) { - builder.field(LOSS_FUNCTION_PARAMETER.getPreferredName(), lossFunctionParameter); - } - if (featureProcessors != null) { - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, FEATURE_PROCESSORS.getPreferredName(), featureProcessors); - } - if (alpha != null) { - builder.field(ALPHA.getPreferredName(), alpha); - } - if (etaGrowthRatePerTree != null) { - builder.field(ETA_GROWTH_RATE_PER_TREE.getPreferredName(), etaGrowthRatePerTree); - } - if (softTreeDepthLimit != null) { - builder.field(SOFT_TREE_DEPTH_LIMIT.getPreferredName(), softTreeDepthLimit); - } - if (softTreeDepthTolerance != null) { - builder.field(SOFT_TREE_DEPTH_TOLERANCE.getPreferredName(), softTreeDepthTolerance); - } - if (downsampleFactor != null) { - builder.field(DOWNSAMPLE_FACTOR.getPreferredName(), downsampleFactor); - } - if (maxOptimizationRoundsPerHyperparameter != null) { - builder.field(MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER.getPreferredName(), maxOptimizationRoundsPerHyperparameter); - } - if (earlyStoppingEnabled != null) { - builder.field(EARLY_STOPPING_ENABLED.getPreferredName(), earlyStoppingEnabled); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash( - dependentVariable, - lambda, - gamma, - eta, - maxTrees, - featureBagFraction, - numTopFeatureImportanceValues, - predictionFieldName, - trainingPercent, - randomizeSeed, - lossFunction, - lossFunctionParameter, - featureProcessors, - alpha, - etaGrowthRatePerTree, - softTreeDepthLimit, - softTreeDepthTolerance, - downsampleFactor, - maxOptimizationRoundsPerHyperparameter, - earlyStoppingEnabled - ); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Regression that = (Regression) o; - return Objects.equals(dependentVariable, that.dependentVariable) - && Objects.equals(lambda, that.lambda) - && Objects.equals(gamma, that.gamma) - && Objects.equals(eta, that.eta) - && Objects.equals(maxTrees, that.maxTrees) - && Objects.equals(featureBagFraction, that.featureBagFraction) - && Objects.equals(numTopFeatureImportanceValues, that.numTopFeatureImportanceValues) - && Objects.equals(predictionFieldName, that.predictionFieldName) - && Objects.equals(trainingPercent, that.trainingPercent) - && Objects.equals(randomizeSeed, that.randomizeSeed) - && Objects.equals(lossFunction, that.lossFunction) - && Objects.equals(lossFunctionParameter, that.lossFunctionParameter) - && Objects.equals(featureProcessors, that.featureProcessors) - && Objects.equals(alpha, that.alpha) - && Objects.equals(etaGrowthRatePerTree, that.etaGrowthRatePerTree) - && Objects.equals(softTreeDepthLimit, that.softTreeDepthLimit) - && Objects.equals(softTreeDepthTolerance, that.softTreeDepthTolerance) - && Objects.equals(downsampleFactor, that.downsampleFactor) - && Objects.equals(maxOptimizationRoundsPerHyperparameter, that.maxOptimizationRoundsPerHyperparameter) - && Objects.equals(earlyStoppingEnabled, that.earlyStoppingEnabled); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static class Builder { - private String dependentVariable; - private Double lambda; - private Double gamma; - private Double eta; - private Integer maxTrees; - private Double featureBagFraction; - private Integer numTopFeatureImportanceValues; - private String predictionFieldName; - private Double trainingPercent; - private Long randomizeSeed; - private LossFunction lossFunction; - private Double lossFunctionParameter; - private List featureProcessors; - private Double alpha; - private Double etaGrowthRatePerTree; - private Double softTreeDepthLimit; - private Double softTreeDepthTolerance; - private Double downsampleFactor; - private Integer maxOptimizationRoundsPerHyperparameter; - private Boolean earlyStoppingEnabled; - - private Builder(String dependentVariable) { - this.dependentVariable = Objects.requireNonNull(dependentVariable); - } - - public Builder setLambda(Double lambda) { - this.lambda = lambda; - return this; - } - - public Builder setGamma(Double gamma) { - this.gamma = gamma; - return this; - } - - public Builder setEta(Double eta) { - this.eta = eta; - return this; - } - - public Builder setMaxTrees(Integer maxTrees) { - this.maxTrees = maxTrees; - return this; - } - - public Builder setFeatureBagFraction(Double featureBagFraction) { - this.featureBagFraction = featureBagFraction; - return this; - } - - public Builder setNumTopFeatureImportanceValues(Integer numTopFeatureImportanceValues) { - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - return this; - } - - public Builder setPredictionFieldName(String predictionFieldName) { - this.predictionFieldName = predictionFieldName; - return this; - } - - public Builder setTrainingPercent(Double trainingPercent) { - this.trainingPercent = trainingPercent; - return this; - } - - public Builder setRandomizeSeed(Long randomizeSeed) { - this.randomizeSeed = randomizeSeed; - return this; - } - - public Builder setLossFunction(LossFunction lossFunction) { - this.lossFunction = lossFunction; - return this; - } - - public Builder setLossFunctionParameter(Double lossFunctionParameter) { - this.lossFunctionParameter = lossFunctionParameter; - return this; - } - - public Builder setFeatureProcessors(List featureProcessors) { - this.featureProcessors = featureProcessors; - return this; - } - - public Builder setAlpha(Double alpha) { - this.alpha = alpha; - return this; - } - - public Builder setEtaGrowthRatePerTree(Double etaGrowthRatePerTree) { - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - return this; - } - - public Builder setSoftTreeDepthLimit(Double softTreeDepthLimit) { - this.softTreeDepthLimit = softTreeDepthLimit; - return this; - } - - public Builder setSoftTreeDepthTolerance(Double softTreeDepthTolerance) { - this.softTreeDepthTolerance = softTreeDepthTolerance; - return this; - } - - public Builder setDownsampleFactor(Double downsampleFactor) { - this.downsampleFactor = downsampleFactor; - return this; - } - - public Builder setMaxOptimizationRoundsPerHyperparameter(Integer maxOptimizationRoundsPerHyperparameter) { - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - return this; - } - - public Builder setEarlyStoppingEnabled(Boolean earlyStoppingEnabled) { - this.earlyStoppingEnabled = earlyStoppingEnabled; - return this; - } - - public Regression build() { - return new Regression( - dependentVariable, - lambda, - gamma, - eta, - maxTrees, - featureBagFraction, - numTopFeatureImportanceValues, - predictionFieldName, - trainingPercent, - randomizeSeed, - lossFunction, - lossFunctionParameter, - featureProcessors, - alpha, - etaGrowthRatePerTree, - softTreeDepthLimit, - softTreeDepthTolerance, - downsampleFactor, - maxOptimizationRoundsPerHyperparameter, - earlyStoppingEnabled - ); - } - } - - public enum LossFunction { - MSE, - MSLE, - HUBER; - - private static LossFunction fromString(String value) { - return LossFunction.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/Evaluation.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/Evaluation.java deleted file mode 100644 index e5f3189a5920f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/Evaluation.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation; - -import org.elasticsearch.xcontent.ToXContentObject; - -/** - * Defines an evaluation - */ -public interface Evaluation extends ToXContentObject { - - /** - * Returns the evaluation name - */ - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/EvaluationMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/EvaluationMetric.java deleted file mode 100644 index daa1051a92b9b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/EvaluationMetric.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation; - -import org.elasticsearch.xcontent.ToXContentObject; - -/** - * Defines an evaluation metric - */ -public interface EvaluationMetric extends ToXContentObject { - - /** - * Returns the name of the metric - */ - String getName(); - - /** - * The result of an evaluation metric - */ - interface Result extends ToXContentObject { - - /** - * Returns the name of the metric - */ - String getMetricName(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/MlEvaluationNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/MlEvaluationNamedXContentProvider.java deleted file mode 100644 index da1d66785f386..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/MlEvaluationNamedXContentProvider.java +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation; - -import org.elasticsearch.client.ml.dataframe.evaluation.classification.AccuracyMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.AucRocMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.Classification; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.common.AucRocResult; -import org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.ConfusionMatrixMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.OutlierDetection; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.HuberMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.MeanSquaredErrorMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.MeanSquaredLogarithmicErrorMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.RSquaredMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.Regression; -import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; - -import java.util.Arrays; -import java.util.List; - -public class MlEvaluationNamedXContentProvider implements NamedXContentProvider { - - /** - * Constructs the name under which a metric (or metric result) is registered. - * The name is prefixed with evaluation name so that registered names are unique. - * - * @param evaluationName name of the evaluation - * @param metricName name of the metric - * @return name appropriate for registering a metric (or metric result) in {@link NamedXContentRegistry} - */ - public static String registeredMetricName(String evaluationName, String metricName) { - return evaluationName + "." + metricName; - } - - @Override - public List getNamedXContentParsers() { - return Arrays.asList( - // Evaluations - new NamedXContentRegistry.Entry(Evaluation.class, new ParseField(OutlierDetection.NAME), OutlierDetection::fromXContent), - new NamedXContentRegistry.Entry(Evaluation.class, new ParseField(Classification.NAME), Classification::fromXContent), - new NamedXContentRegistry.Entry(Evaluation.class, new ParseField(Regression.NAME), Regression::fromXContent), - // Evaluation metrics - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.AucRocMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.AucRocMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.PrecisionMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.PrecisionMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.RecallMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.RecallMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(OutlierDetection.NAME, ConfusionMatrixMetric.NAME)), - ConfusionMatrixMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, AucRocMetric.NAME)), - AucRocMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, AccuracyMetric.NAME)), - AccuracyMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, PrecisionMetric.NAME)), - PrecisionMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, RecallMetric.NAME)), - RecallMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, MulticlassConfusionMatrixMetric.NAME)), - MulticlassConfusionMatrixMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Regression.NAME, MeanSquaredErrorMetric.NAME)), - MeanSquaredErrorMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Regression.NAME, MeanSquaredLogarithmicErrorMetric.NAME)), - MeanSquaredLogarithmicErrorMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Regression.NAME, HuberMetric.NAME)), - HuberMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Regression.NAME, RSquaredMetric.NAME)), - RSquaredMetric::fromXContent - ), - // Evaluation metrics results - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.AucRocMetric.NAME - ) - ), - AucRocResult::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.PrecisionMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.PrecisionMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.RecallMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.RecallMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(OutlierDetection.NAME, ConfusionMatrixMetric.NAME)), - ConfusionMatrixMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, AucRocMetric.NAME)), - AucRocResult::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, AccuracyMetric.NAME)), - AccuracyMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, PrecisionMetric.NAME)), - PrecisionMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, RecallMetric.NAME)), - RecallMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, MulticlassConfusionMatrixMetric.NAME)), - MulticlassConfusionMatrixMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Regression.NAME, MeanSquaredErrorMetric.NAME)), - MeanSquaredErrorMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Regression.NAME, MeanSquaredLogarithmicErrorMetric.NAME)), - MeanSquaredLogarithmicErrorMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Regression.NAME, HuberMetric.NAME)), - HuberMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Regression.NAME, RSquaredMetric.NAME)), - RSquaredMetric.Result::fromXContent - ) - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AccuracyMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AccuracyMetric.java deleted file mode 100644 index f95b8a0b77344..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AccuracyMetric.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * {@link AccuracyMetric} is a metric that answers the following two questions: - * - * 1. What is the fraction of documents for which predicted class equals the actual class? - * - * equation: overall_accuracy = 1/n * Σ(y == y') - * where: n = total number of documents - * y = document's actual class - * y' = document's predicted class - * - * 2. For any given class X, what is the fraction of documents for which either - * a) both actual and predicted class are equal to X (true positives) - * or - * b) both actual and predicted class are not equal to X (true negatives) - * - * equation: accuracy(X) = 1/n * (TP(X) + TN(X)) - * where: X = class being examined - * n = total number of documents - * TP(X) = number of true positives wrt X - * TN(X) = number of true negatives wrt X - */ -public class AccuracyMetric implements EvaluationMetric { - - public static final String NAME = "accuracy"; - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, AccuracyMetric::new); - - public static AccuracyMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public AccuracyMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - private static final ParseField CLASSES = new ParseField("classes"); - private static final ParseField OVERALL_ACCURACY = new ParseField("overall_accuracy"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "accuracy_result", - true, - a -> new Result((List) a[0], (double) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), PerClassSingleValue.PARSER, CLASSES); - PARSER.declareDouble(constructorArg(), OVERALL_ACCURACY); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** List of per-class results. */ - private final List classes; - /** Fraction of documents for which predicted class equals the actual class. */ - private final double overallAccuracy; - - public Result(List classes, double overallAccuracy) { - this.classes = Collections.unmodifiableList(Objects.requireNonNull(classes)); - this.overallAccuracy = overallAccuracy; - } - - @Override - public String getMetricName() { - return NAME; - } - - public List getClasses() { - return classes; - } - - public double getOverallAccuracy() { - return overallAccuracy; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASSES.getPreferredName(), classes); - builder.field(OVERALL_ACCURACY.getPreferredName(), overallAccuracy); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(this.classes, that.classes) && this.overallAccuracy == that.overallAccuracy; - } - - @Override - public int hashCode() { - return Objects.hash(classes, overallAccuracy); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AucRocMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AucRocMetric.java deleted file mode 100644 index f8a85d7d665b7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AucRocMetric.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.common.AucRocResult; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Area under the curve (AUC) of the receiver operating characteristic (ROC). - * The ROC curve is a plot of the TPR (true positive rate) against - * the FPR (false positive rate) over a varying threshold. - */ -public class AucRocMetric implements EvaluationMetric { - - public static final String NAME = AucRocResult.NAME; - - public static final ParseField CLASS_NAME = new ParseField("class_name"); - public static final ParseField INCLUDE_CURVE = new ParseField("include_curve"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new AucRocMetric((String) args[0], (Boolean) args[1]) - ); - - static { - PARSER.declareString(constructorArg(), CLASS_NAME); - PARSER.declareBoolean(optionalConstructorArg(), INCLUDE_CURVE); - } - - public static AucRocMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static AucRocMetric forClass(String className) { - return new AucRocMetric(className, false); - } - - public static AucRocMetric forClassWithCurve(String className) { - return new AucRocMetric(className, true); - } - - private final String className; - private final Boolean includeCurve; - - public AucRocMetric(String className, Boolean includeCurve) { - this.className = Objects.requireNonNull(className); - this.includeCurve = includeCurve; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(CLASS_NAME.getPreferredName(), className); - if (includeCurve != null) { - builder.field(INCLUDE_CURVE.getPreferredName(), includeCurve); - } - builder.endObject(); - return builder; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AucRocMetric that = (AucRocMetric) o; - return Objects.equals(className, that.className) && Objects.equals(includeCurve, that.includeCurve); - } - - @Override - public int hashCode() { - return Objects.hash(className, includeCurve); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/Classification.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/Classification.java deleted file mode 100644 index 7f394ff30a046..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/Classification.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Evaluation of classification results. - */ -public class Classification implements Evaluation { - - public static final String NAME = "classification"; - - private static final ParseField ACTUAL_FIELD = new ParseField("actual_field"); - private static final ParseField PREDICTED_FIELD = new ParseField("predicted_field"); - private static final ParseField TOP_CLASSES_FIELD = new ParseField("top_classes_field"); - - private static final ParseField METRICS = new ParseField("metrics"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new Classification((String) a[0], (String) a[1], (String) a[2], (List) a[3]) - ); - - static { - PARSER.declareString(constructorArg(), ACTUAL_FIELD); - PARSER.declareString(optionalConstructorArg(), PREDICTED_FIELD); - PARSER.declareString(optionalConstructorArg(), TOP_CLASSES_FIELD); - PARSER.declareNamedObjects( - optionalConstructorArg(), - (p, c, n) -> p.namedObject(EvaluationMetric.class, registeredMetricName(NAME, n), c), - METRICS - ); - } - - public static Classification fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** - * The field containing the actual value - */ - private final String actualField; - - /** - * The field containing the predicted value - */ - private final String predictedField; - - /** - * The field containing the array of top classes - */ - private final String topClassesField; - - /** - * The list of metrics to calculate - */ - private final List metrics; - - public Classification(String actualField, String predictedField, String topClassesField) { - this(actualField, predictedField, topClassesField, (List) null); - } - - public Classification(String actualField, String predictedField, String topClassesField, EvaluationMetric... metrics) { - this(actualField, predictedField, topClassesField, Arrays.asList(metrics)); - } - - public Classification( - String actualField, - @Nullable String predictedField, - @Nullable String topClassesField, - @Nullable List metrics - ) { - this.actualField = Objects.requireNonNull(actualField); - this.predictedField = predictedField; - this.topClassesField = topClassesField; - if (metrics != null) { - metrics.sort(Comparator.comparing(EvaluationMetric::getName)); - } - this.metrics = metrics; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ACTUAL_FIELD.getPreferredName(), actualField); - if (predictedField != null) { - builder.field(PREDICTED_FIELD.getPreferredName(), predictedField); - } - if (topClassesField != null) { - builder.field(TOP_CLASSES_FIELD.getPreferredName(), topClassesField); - } - if (metrics != null) { - builder.startObject(METRICS.getPreferredName()); - for (EvaluationMetric metric : metrics) { - builder.field(metric.getName(), metric); - } - builder.endObject(); - } - - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Classification that = (Classification) o; - return Objects.equals(that.actualField, this.actualField) - && Objects.equals(that.predictedField, this.predictedField) - && Objects.equals(that.topClassesField, this.topClassesField) - && Objects.equals(that.metrics, this.metrics); - } - - @Override - public int hashCode() { - return Objects.hash(actualField, predictedField, topClassesField, metrics); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixMetric.java deleted file mode 100644 index ae55246c11dc5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixMetric.java +++ /dev/null @@ -1,289 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Calculates the multiclass confusion matrix. - */ -public class MulticlassConfusionMatrixMetric implements EvaluationMetric { - - public static final String NAME = "multiclass_confusion_matrix"; - - public static final ParseField SIZE = new ParseField("size"); - - private static final ConstructingObjectParser PARSER = createParser(); - - private static ConstructingObjectParser createParser() { - ConstructingObjectParser parser = new ConstructingObjectParser<>( - NAME, - true, - args -> new MulticlassConfusionMatrixMetric((Integer) args[0]) - ); - parser.declareInt(optionalConstructorArg(), SIZE); - return parser; - } - - public static MulticlassConfusionMatrixMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Integer size; - - public MulticlassConfusionMatrixMetric() { - this(null); - } - - public MulticlassConfusionMatrixMetric(@Nullable Integer size) { - this.size = size; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (size != null) { - builder.field(SIZE.getPreferredName(), size); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - MulticlassConfusionMatrixMetric that = (MulticlassConfusionMatrixMetric) o; - return Objects.equals(this.size, that.size); - } - - @Override - public int hashCode() { - return Objects.hash(size); - } - - public static class Result implements EvaluationMetric.Result { - - private static final ParseField CONFUSION_MATRIX = new ParseField("confusion_matrix"); - private static final ParseField OTHER_ACTUAL_CLASS_COUNT = new ParseField("other_actual_class_count"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "multiclass_confusion_matrix_result", - true, - a -> new Result((List) a[0], (Long) a[1]) - ); - - static { - PARSER.declareObjectArray(optionalConstructorArg(), ActualClass.PARSER, CONFUSION_MATRIX); - PARSER.declareLong(optionalConstructorArg(), OTHER_ACTUAL_CLASS_COUNT); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List confusionMatrix; - private final Long otherActualClassCount; - - public Result(@Nullable List confusionMatrix, @Nullable Long otherActualClassCount) { - this.confusionMatrix = confusionMatrix != null ? Collections.unmodifiableList(Objects.requireNonNull(confusionMatrix)) : null; - this.otherActualClassCount = otherActualClassCount; - } - - @Override - public String getMetricName() { - return NAME; - } - - public List getConfusionMatrix() { - return confusionMatrix; - } - - public Long getOtherActualClassCount() { - return otherActualClassCount; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (confusionMatrix != null) { - builder.field(CONFUSION_MATRIX.getPreferredName(), confusionMatrix); - } - if (otherActualClassCount != null) { - builder.field(OTHER_ACTUAL_CLASS_COUNT.getPreferredName(), otherActualClassCount); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(this.confusionMatrix, that.confusionMatrix) - && Objects.equals(this.otherActualClassCount, that.otherActualClassCount); - } - - @Override - public int hashCode() { - return Objects.hash(confusionMatrix, otherActualClassCount); - } - } - - public static class ActualClass implements ToXContentObject { - - private static final ParseField ACTUAL_CLASS = new ParseField("actual_class"); - private static final ParseField ACTUAL_CLASS_DOC_COUNT = new ParseField("actual_class_doc_count"); - private static final ParseField PREDICTED_CLASSES = new ParseField("predicted_classes"); - private static final ParseField OTHER_PREDICTED_CLASS_DOC_COUNT = new ParseField("other_predicted_class_doc_count"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "multiclass_confusion_matrix_actual_class", - true, - a -> new ActualClass((String) a[0], (Long) a[1], (List) a[2], (Long) a[3]) - ); - - static { - PARSER.declareString(optionalConstructorArg(), ACTUAL_CLASS); - PARSER.declareLong(optionalConstructorArg(), ACTUAL_CLASS_DOC_COUNT); - PARSER.declareObjectArray(optionalConstructorArg(), PredictedClass.PARSER, PREDICTED_CLASSES); - PARSER.declareLong(optionalConstructorArg(), OTHER_PREDICTED_CLASS_DOC_COUNT); - } - - private final String actualClass; - private final Long actualClassDocCount; - private final List predictedClasses; - private final Long otherPredictedClassDocCount; - - public ActualClass( - @Nullable String actualClass, - @Nullable Long actualClassDocCount, - @Nullable List predictedClasses, - @Nullable Long otherPredictedClassDocCount - ) { - this.actualClass = actualClass; - this.actualClassDocCount = actualClassDocCount; - this.predictedClasses = predictedClasses != null ? Collections.unmodifiableList(predictedClasses) : null; - this.otherPredictedClassDocCount = otherPredictedClassDocCount; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (actualClass != null) { - builder.field(ACTUAL_CLASS.getPreferredName(), actualClass); - } - if (actualClassDocCount != null) { - builder.field(ACTUAL_CLASS_DOC_COUNT.getPreferredName(), actualClassDocCount); - } - if (predictedClasses != null) { - builder.field(PREDICTED_CLASSES.getPreferredName(), predictedClasses); - } - if (otherPredictedClassDocCount != null) { - builder.field(OTHER_PREDICTED_CLASS_DOC_COUNT.getPreferredName(), otherPredictedClassDocCount); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ActualClass that = (ActualClass) o; - return Objects.equals(this.actualClass, that.actualClass) - && Objects.equals(this.actualClassDocCount, that.actualClassDocCount) - && Objects.equals(this.predictedClasses, that.predictedClasses) - && Objects.equals(this.otherPredictedClassDocCount, that.otherPredictedClassDocCount); - } - - @Override - public int hashCode() { - return Objects.hash(actualClass, actualClassDocCount, predictedClasses, otherPredictedClassDocCount); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } - - public static class PredictedClass implements ToXContentObject { - - private static final ParseField PREDICTED_CLASS = new ParseField("predicted_class"); - private static final ParseField COUNT = new ParseField("count"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "multiclass_confusion_matrix_predicted_class", - true, - a -> new PredictedClass((String) a[0], (Long) a[1]) - ); - - static { - PARSER.declareString(optionalConstructorArg(), PREDICTED_CLASS); - PARSER.declareLong(optionalConstructorArg(), COUNT); - } - - private final String predictedClass; - private final Long count; - - public PredictedClass(@Nullable String predictedClass, @Nullable Long count) { - this.predictedClass = predictedClass; - this.count = count; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (predictedClass != null) { - builder.field(PREDICTED_CLASS.getPreferredName(), predictedClass); - } - if (count != null) { - builder.field(COUNT.getPreferredName(), count); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PredictedClass that = (PredictedClass) o; - return Objects.equals(this.predictedClass, that.predictedClass) && Objects.equals(this.count, that.count); - } - - @Override - public int hashCode() { - return Objects.hash(predictedClass, count); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PerClassSingleValue.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PerClassSingleValue.java deleted file mode 100644 index 703468b5ec282..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PerClassSingleValue.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class PerClassSingleValue implements ToXContentObject { - private static final ParseField CLASS_NAME = new ParseField("class_name"); - private static final ParseField VALUE = new ParseField("value"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "per_class_result", - true, - a -> new PerClassSingleValue((String) a[0], (double) a[1]) - ); - - static { - PARSER.declareString(constructorArg(), CLASS_NAME); - PARSER.declareDouble(constructorArg(), VALUE); - } - - private final String className; - private final double value; - - public PerClassSingleValue(String className, double value) { - this.className = Objects.requireNonNull(className); - this.value = value; - } - - public String getClassName() { - return className; - } - - public double getValue() { - return value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASS_NAME.getPreferredName(), className); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PerClassSingleValue that = (PerClassSingleValue) o; - return Objects.equals(this.className, that.className) && this.value == that.value; - } - - @Override - public int hashCode() { - return Objects.hash(className, value); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PrecisionMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PrecisionMetric.java deleted file mode 100644 index 168eeed66d67d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PrecisionMetric.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * {@link PrecisionMetric} is a metric that answers the question: - * "What fraction of documents classified as X actually belongs to X?" - * for any given class X - * - * equation: precision(X) = TP(X) / (TP(X) + FP(X)) - * where: TP(X) - number of true positives wrt X - * FP(X) - number of false positives wrt X - */ -public class PrecisionMetric implements EvaluationMetric { - - public static final String NAME = "precision"; - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, PrecisionMetric::new); - - public static PrecisionMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public PrecisionMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - private static final ParseField CLASSES = new ParseField("classes"); - private static final ParseField AVG_PRECISION = new ParseField("avg_precision"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "precision_result", - true, - a -> new Result((List) a[0], (double) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), PerClassSingleValue.PARSER, CLASSES); - PARSER.declareDouble(constructorArg(), AVG_PRECISION); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** List of per-class results. */ - private final List classes; - /** Average of per-class precisions. */ - private final double avgPrecision; - - public Result(List classes, double avgPrecision) { - this.classes = Collections.unmodifiableList(Objects.requireNonNull(classes)); - this.avgPrecision = avgPrecision; - } - - @Override - public String getMetricName() { - return NAME; - } - - public List getClasses() { - return classes; - } - - public double getAvgPrecision() { - return avgPrecision; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASSES.getPreferredName(), classes); - builder.field(AVG_PRECISION.getPreferredName(), avgPrecision); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(this.classes, that.classes) && this.avgPrecision == that.avgPrecision; - } - - @Override - public int hashCode() { - return Objects.hash(classes, avgPrecision); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/RecallMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/RecallMetric.java deleted file mode 100644 index 689d441944e7a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/RecallMetric.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * {@link RecallMetric} is a metric that answers the question: - * "What fraction of documents belonging to X have been predicted as X by the classifier?" - * for any given class X - * - * equation: recall(X) = TP(X) / (TP(X) + FN(X)) - * where: TP(X) - number of true positives wrt X - * FN(X) - number of false negatives wrt X - */ -public class RecallMetric implements EvaluationMetric { - - public static final String NAME = "recall"; - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, RecallMetric::new); - - public static RecallMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public RecallMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - private static final ParseField CLASSES = new ParseField("classes"); - private static final ParseField AVG_RECALL = new ParseField("avg_recall"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "recall_result", - true, - a -> new Result((List) a[0], (double) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), PerClassSingleValue.PARSER, CLASSES); - PARSER.declareDouble(constructorArg(), AVG_RECALL); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** List of per-class results. */ - private final List classes; - /** Average of per-class recalls. */ - private final double avgRecall; - - public Result(List classes, double avgRecall) { - this.classes = Collections.unmodifiableList(Objects.requireNonNull(classes)); - this.avgRecall = avgRecall; - } - - @Override - public String getMetricName() { - return NAME; - } - - public List getClasses() { - return classes; - } - - public double getAvgRecall() { - return avgRecall; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASSES.getPreferredName(), classes); - builder.field(AVG_RECALL.getPreferredName(), avgRecall); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(this.classes, that.classes) && this.avgRecall == that.avgRecall; - } - - @Override - public int hashCode() { - return Objects.hash(classes, avgRecall); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocPoint.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocPoint.java deleted file mode 100644 index 08e5122181269..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocPoint.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.common; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class AucRocPoint implements ToXContentObject { - - public static AucRocPoint fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ParseField TPR = new ParseField("tpr"); - private static final ParseField FPR = new ParseField("fpr"); - private static final ParseField THRESHOLD = new ParseField("threshold"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "auc_roc_point", - true, - args -> new AucRocPoint((double) args[0], (double) args[1], (double) args[2]) - ); - - static { - PARSER.declareDouble(constructorArg(), TPR); - PARSER.declareDouble(constructorArg(), FPR); - PARSER.declareDouble(constructorArg(), THRESHOLD); - } - - private final double tpr; - private final double fpr; - private final double threshold; - - public AucRocPoint(double tpr, double fpr, double threshold) { - this.tpr = tpr; - this.fpr = fpr; - this.threshold = threshold; - } - - public double getTruePositiveRate() { - return tpr; - } - - public double getFalsePositiveRate() { - return fpr; - } - - public double getThreshold() { - return threshold; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject() - .field(TPR.getPreferredName(), tpr) - .field(FPR.getPreferredName(), fpr) - .field(THRESHOLD.getPreferredName(), threshold) - .endObject(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AucRocPoint that = (AucRocPoint) o; - return tpr == that.tpr && fpr == that.fpr && threshold == that.threshold; - } - - @Override - public int hashCode() { - return Objects.hash(tpr, fpr, threshold); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocResult.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocResult.java deleted file mode 100644 index d661115b67291..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocResult.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.common; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class AucRocResult implements EvaluationMetric.Result { - - public static final String NAME = "auc_roc"; - - public static AucRocResult fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ParseField VALUE = new ParseField("value"); - private static final ParseField CURVE = new ParseField("curve"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new AucRocResult((double) args[0], (List) args[1]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> AucRocPoint.fromXContent(p), CURVE); - } - - private final double value; - private final List curve; - - public AucRocResult(double value, @Nullable List curve) { - this.value = value; - this.curve = curve; - } - - @Override - public String getMetricName() { - return NAME; - } - - public double getValue() { - return value; - } - - public List getCurve() { - return curve == null ? null : Collections.unmodifiableList(curve); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - if (curve != null && curve.isEmpty() == false) { - builder.field(CURVE.getPreferredName(), curve); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AucRocResult that = (AucRocResult) o; - return value == that.value && Objects.equals(curve, that.curve); - } - - @Override - public int hashCode() { - return Objects.hash(value, curve); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java deleted file mode 100644 index e39af0d143c4b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -abstract class AbstractConfusionMatrixMetric implements EvaluationMetric { - - protected static final ParseField AT = new ParseField("at"); - - protected final double[] thresholds; - - protected AbstractConfusionMatrixMetric(List at) { - this.thresholds = Objects.requireNonNull(at).stream().mapToDouble(Double::doubleValue).toArray(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.startObject().field(AT.getPreferredName(), thresholds).endObject(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AucRocMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AucRocMetric.java deleted file mode 100644 index 7c8ea07ab6c77..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AucRocMetric.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.common.AucRocResult; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Area under the curve (AUC) of the receiver operating characteristic (ROC). - * The ROC curve is a plot of the TPR (true positive rate) against - * the FPR (false positive rate) over a varying threshold. - */ -public class AucRocMetric implements EvaluationMetric { - - public static final String NAME = AucRocResult.NAME; - - public static final ParseField INCLUDE_CURVE = new ParseField("include_curve"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new AucRocMetric((Boolean) args[0]) - ); - - static { - PARSER.declareBoolean(optionalConstructorArg(), INCLUDE_CURVE); - } - - public static AucRocMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static AucRocMetric withCurve() { - return new AucRocMetric(true); - } - - private final Boolean includeCurve; - - public AucRocMetric(Boolean includeCurve) { - this.includeCurve = includeCurve; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (includeCurve != null) { - builder.field(INCLUDE_CURVE.getPreferredName(), includeCurve); - } - builder.endObject(); - return builder; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AucRocMetric that = (AucRocMetric) o; - return Objects.equals(includeCurve, that.includeCurve); - } - - @Override - public int hashCode() { - return Objects.hash(includeCurve); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixMetric.java deleted file mode 100644 index 0d4617baeb56f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixMetric.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class ConfusionMatrixMetric extends AbstractConfusionMatrixMetric { - - public static final String NAME = "confusion_matrix"; - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - args -> new ConfusionMatrixMetric((List) args[0]) - ); - - static { - PARSER.declareDoubleArray(constructorArg(), AT); - } - - public static ConfusionMatrixMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static ConfusionMatrixMetric at(Double... at) { - return new ConfusionMatrixMetric(Arrays.asList(at)); - } - - public ConfusionMatrixMetric(List at) { - super(at); - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ConfusionMatrixMetric that = (ConfusionMatrixMetric) o; - return Arrays.equals(thresholds, that.thresholds); - } - - @Override - public int hashCode() { - return Arrays.hashCode(thresholds); - } - - public static class Result implements EvaluationMetric.Result { - - public static Result fromXContent(XContentParser parser) throws IOException { - return new Result(parser.map(LinkedHashMap::new, ConfusionMatrix::fromXContent)); - } - - private final Map results; - - public Result(Map results) { - this.results = Objects.requireNonNull(results); - } - - @Override - public String getMetricName() { - return NAME; - } - - public ConfusionMatrix getScoreByThreshold(String threshold) { - return results.get(threshold); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.map(results); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(results, that.results); - } - - @Override - public int hashCode() { - return Objects.hash(results); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } - - public static final class ConfusionMatrix implements ToXContentObject { - - public static ConfusionMatrix fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ParseField TP = new ParseField("tp"); - private static final ParseField FP = new ParseField("fp"); - private static final ParseField TN = new ParseField("tn"); - private static final ParseField FN = new ParseField("fn"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "confusion_matrix", - true, - args -> new ConfusionMatrix((long) args[0], (long) args[1], (long) args[2], (long) args[3]) - ); - - static { - PARSER.declareLong(constructorArg(), TP); - PARSER.declareLong(constructorArg(), FP); - PARSER.declareLong(constructorArg(), TN); - PARSER.declareLong(constructorArg(), FN); - } - - private final long tp; - private final long fp; - private final long tn; - private final long fn; - - public ConfusionMatrix(long tp, long fp, long tn, long fn) { - this.tp = tp; - this.fp = fp; - this.tn = tn; - this.fn = fn; - } - - public long getTruePositives() { - return tp; - } - - public long getFalsePositives() { - return fp; - } - - public long getTrueNegatives() { - return tn; - } - - public long getFalseNegatives() { - return fn; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject() - .field(TP.getPreferredName(), tp) - .field(FP.getPreferredName(), fp) - .field(TN.getPreferredName(), tn) - .field(FN.getPreferredName(), fn) - .endObject(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ConfusionMatrix that = (ConfusionMatrix) o; - return tp == that.tp && fp == that.fp && tn == that.tn && fn == that.fn; - } - - @Override - public int hashCode() { - return Objects.hash(tp, fp, tn, fn); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java deleted file mode 100644 index 7372e85d0bf05..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Evaluation of outlier detection results. - */ -public class OutlierDetection implements Evaluation { - - public static final String NAME = "outlier_detection"; - - private static final ParseField ACTUAL_FIELD = new ParseField("actual_field"); - private static final ParseField PREDICTED_PROBABILITY_FIELD = new ParseField("predicted_probability_field"); - private static final ParseField METRICS = new ParseField("metrics"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new OutlierDetection((String) args[0], (String) args[1], (List) args[2]) - ); - - static { - PARSER.declareString(constructorArg(), ACTUAL_FIELD); - PARSER.declareString(constructorArg(), PREDICTED_PROBABILITY_FIELD); - PARSER.declareNamedObjects( - optionalConstructorArg(), - (p, c, n) -> p.namedObject(EvaluationMetric.class, registeredMetricName(NAME, n), null), - METRICS - ); - } - - public static OutlierDetection fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** - * The field where the actual class is marked up. - * The value of this field is assumed to either be 1 or 0, or true or false. - */ - private final String actualField; - - /** - * The field of the predicted probability in [0.0, 1.0]. - */ - private final String predictedProbabilityField; - - /** - * The list of metrics to calculate - */ - private final List metrics; - - public OutlierDetection(String actualField, String predictedField) { - this(actualField, predictedField, (List) null); - } - - public OutlierDetection(String actualField, String predictedProbabilityField, EvaluationMetric... metric) { - this(actualField, predictedProbabilityField, Arrays.asList(metric)); - } - - public OutlierDetection(String actualField, String predictedProbabilityField, @Nullable List metrics) { - this.actualField = Objects.requireNonNull(actualField); - this.predictedProbabilityField = Objects.requireNonNull(predictedProbabilityField); - if (metrics != null) { - metrics.sort(Comparator.comparing(EvaluationMetric::getName)); - } - this.metrics = metrics; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(ACTUAL_FIELD.getPreferredName(), actualField); - builder.field(PREDICTED_PROBABILITY_FIELD.getPreferredName(), predictedProbabilityField); - - if (metrics != null) { - builder.startObject(METRICS.getPreferredName()); - for (EvaluationMetric metric : metrics) { - builder.field(metric.getName(), metric); - } - builder.endObject(); - } - - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - OutlierDetection that = (OutlierDetection) o; - return Objects.equals(actualField, that.actualField) - && Objects.equals(predictedProbabilityField, that.predictedProbabilityField) - && Objects.equals(metrics, that.metrics); - } - - @Override - public int hashCode() { - return Objects.hash(actualField, predictedProbabilityField, metrics); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/PrecisionMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/PrecisionMetric.java deleted file mode 100644 index 4f992615d79af..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/PrecisionMetric.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class PrecisionMetric extends AbstractConfusionMatrixMetric { - - public static final String NAME = "precision"; - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - args -> new PrecisionMetric((List) args[0]) - ); - - static { - PARSER.declareDoubleArray(constructorArg(), AT); - } - - public static PrecisionMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static PrecisionMetric at(Double... at) { - return new PrecisionMetric(Arrays.asList(at)); - } - - public PrecisionMetric(List at) { - super(at); - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PrecisionMetric that = (PrecisionMetric) o; - return Arrays.equals(thresholds, that.thresholds); - } - - @Override - public int hashCode() { - return Arrays.hashCode(thresholds); - } - - public static class Result implements EvaluationMetric.Result { - - public static Result fromXContent(XContentParser parser) throws IOException { - return new Result(parser.map(LinkedHashMap::new, p -> p.doubleValue())); - } - - private final Map results; - - public Result(Map results) { - this.results = Objects.requireNonNull(results); - } - - @Override - public String getMetricName() { - return NAME; - } - - public Double getScoreByThreshold(String threshold) { - return results.get(threshold); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.map(results); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(results, that.results); - } - - @Override - public int hashCode() { - return Objects.hash(results); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/RecallMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/RecallMetric.java deleted file mode 100644 index 531c62f825722..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/RecallMetric.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class RecallMetric extends AbstractConfusionMatrixMetric { - - public static final String NAME = "recall"; - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - args -> new RecallMetric((List) args[0]) - ); - - static { - PARSER.declareDoubleArray(constructorArg(), AT); - } - - public static RecallMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static RecallMetric at(Double... at) { - return new RecallMetric(Arrays.asList(at)); - } - - public RecallMetric(List at) { - super(at); - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - RecallMetric that = (RecallMetric) o; - return Arrays.equals(thresholds, that.thresholds); - } - - @Override - public int hashCode() { - return Arrays.hashCode(thresholds); - } - - public static class Result implements EvaluationMetric.Result { - - public static Result fromXContent(XContentParser parser) throws IOException { - return new Result(parser.map(LinkedHashMap::new, p -> p.doubleValue())); - } - - private final Map results; - - public Result(Map results) { - this.results = Objects.requireNonNull(results); - } - - @Override - public String getMetricName() { - return NAME; - } - - public Double getScoreByThreshold(String threshold) { - return results.get(threshold); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.map(results); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(results, that.results); - } - - @Override - public int hashCode() { - return Objects.hash(results); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/HuberMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/HuberMetric.java deleted file mode 100644 index 038c659324da4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/HuberMetric.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.Regression.LossFunction; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Calculates the pseudo Huber loss function. - * - * equation: huber = 1/n * Σ(δ^2 * sqrt(1 + a^2 / δ^2) - 1) - * where: a = y - y´ - * δ - parameter that controls the steepness - */ -public class HuberMetric implements EvaluationMetric { - - public static final String NAME = LossFunction.HUBER.toString(); - - public static final ParseField DELTA = new ParseField("delta"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new HuberMetric((Double) args[0]) - ); - - static { - PARSER.declareDouble(optionalConstructorArg(), DELTA); - } - - public static HuberMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Double delta; - - public HuberMetric(@Nullable Double delta) { - this.delta = delta; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (delta != null) { - builder.field(DELTA.getPreferredName(), delta); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - HuberMetric that = (HuberMetric) o; - return Objects.equals(this.delta, that.delta); - } - - @Override - public int hashCode() { - return Objects.hash(delta); - } - - public static class Result implements EvaluationMetric.Result { - - public static final ParseField VALUE = new ParseField("value"); - private final double value; - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME + "_result", - true, - args -> new Result((double) args[0]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - } - - public Result(double value) { - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public double getValue() { - return value; - } - - @Override - public String getMetricName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return this.value == that.value; - } - - @Override - public int hashCode() { - return Double.hashCode(value); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredErrorMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredErrorMetric.java deleted file mode 100644 index 4c593dc75db4e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredErrorMetric.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.Regression.LossFunction; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Calculates the mean squared error between two known numerical fields. - * - * equation: mse = 1/n * Σ(y - y´)^2 - */ -public class MeanSquaredErrorMetric implements EvaluationMetric { - - public static final String NAME = LossFunction.MSE.toString(); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, MeanSquaredErrorMetric::new); - - public static MeanSquaredErrorMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public MeanSquaredErrorMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - // create static hash code from name as there are currently no unique fields per class instance - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - public static final ParseField VALUE = new ParseField("value"); - private final double value; - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME + "_result", - true, - args -> new Result((double) args[0]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - } - - public Result(double value) { - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public double getValue() { - return value; - } - - @Override - public String getMetricName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return this.value == that.value; - } - - @Override - public int hashCode() { - return Double.hashCode(value); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorMetric.java deleted file mode 100644 index 676ee74cb3f83..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorMetric.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.Regression.LossFunction; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Calculates the mean squared error between two known numerical fields. - * - * equation: msle = 1/n * Σ(log(y + offset) - log(y´ + offset))^2 - * where offset is used to make sure the argument to log function is always positive - */ -public class MeanSquaredLogarithmicErrorMetric implements EvaluationMetric { - - public static final String NAME = LossFunction.MSLE.toString(); - - public static final ParseField OFFSET = new ParseField("offset"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new MeanSquaredLogarithmicErrorMetric((Double) args[0]) - ); - - static { - PARSER.declareDouble(optionalConstructorArg(), OFFSET); - } - - public static MeanSquaredLogarithmicErrorMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Double offset; - - public MeanSquaredLogarithmicErrorMetric(@Nullable Double offset) { - this.offset = offset; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (offset != null) { - builder.field(OFFSET.getPreferredName(), offset); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - MeanSquaredLogarithmicErrorMetric that = (MeanSquaredLogarithmicErrorMetric) o; - return Objects.equals(this.offset, that.offset); - } - - @Override - public int hashCode() { - return Objects.hash(offset); - } - - public static class Result implements EvaluationMetric.Result { - - public static final ParseField VALUE = new ParseField("value"); - private final double value; - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME + "_result", - true, - args -> new Result((double) args[0]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - } - - public Result(double value) { - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public double getValue() { - return value; - } - - @Override - public String getMetricName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return this.value == that.value; - } - - @Override - public int hashCode() { - return Double.hashCode(value); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/RSquaredMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/RSquaredMetric.java deleted file mode 100644 index 496a3d55c0e51..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/RSquaredMetric.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Calculates R-Squared between two known numerical fields. - * - * equation: mse = 1 - SSres/SStot - * such that, - * SSres = Σ(y - y´)^2 - * SStot = Σ(y - y_mean)^2 - */ -public class RSquaredMetric implements EvaluationMetric { - - public static final String NAME = "r_squared"; - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, RSquaredMetric::new); - - public static RSquaredMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public RSquaredMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - // create static hash code from name as there are currently no unique fields per class instance - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - public static final ParseField VALUE = new ParseField("value"); - private final double value; - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME + "_result", - true, - args -> new Result((double) args[0]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - } - - public Result(double value) { - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public double getValue() { - return value; - } - - @Override - public String getMetricName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return this.value == that.value; - } - - @Override - public int hashCode() { - return Double.hashCode(value); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/Regression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/Regression.java deleted file mode 100644 index 622013957281e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/Regression.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Evaluation of regression results. - */ -public class Regression implements Evaluation { - - public static final String NAME = "regression"; - - private static final ParseField ACTUAL_FIELD = new ParseField("actual_field"); - private static final ParseField PREDICTED_FIELD = new ParseField("predicted_field"); - private static final ParseField METRICS = new ParseField("metrics"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new Regression((String) a[0], (String) a[1], (List) a[2]) - ); - - static { - PARSER.declareString(constructorArg(), ACTUAL_FIELD); - PARSER.declareString(constructorArg(), PREDICTED_FIELD); - PARSER.declareNamedObjects( - optionalConstructorArg(), - (p, c, n) -> p.namedObject(EvaluationMetric.class, registeredMetricName(NAME, n), c), - METRICS - ); - } - - public static Regression fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** - * The field containing the actual value - * The value of this field is assumed to be numeric - */ - private final String actualField; - - /** - * The field containing the predicted value - * The value of this field is assumed to be numeric - */ - private final String predictedField; - - /** - * The list of metrics to calculate - */ - private final List metrics; - - public Regression(String actualField, String predictedField) { - this(actualField, predictedField, (List) null); - } - - public Regression(String actualField, String predictedField, EvaluationMetric... metrics) { - this(actualField, predictedField, Arrays.asList(metrics)); - } - - public Regression(String actualField, String predictedField, @Nullable List metrics) { - this.actualField = Objects.requireNonNull(actualField); - this.predictedField = Objects.requireNonNull(predictedField); - if (metrics != null) { - metrics.sort(Comparator.comparing(EvaluationMetric::getName)); - } - this.metrics = metrics; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(ACTUAL_FIELD.getPreferredName(), actualField); - builder.field(PREDICTED_FIELD.getPreferredName(), predictedField); - - if (metrics != null) { - builder.startObject(METRICS.getPreferredName()); - for (EvaluationMetric metric : metrics) { - builder.field(metric.getName(), metric); - } - builder.endObject(); - } - - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Regression that = (Regression) o; - return Objects.equals(that.actualField, this.actualField) - && Objects.equals(that.predictedField, this.predictedField) - && Objects.equals(that.metrics, this.metrics); - } - - @Override - public int hashCode() { - return Objects.hash(actualField, predictedField, metrics); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/FieldSelection.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/FieldSelection.java deleted file mode 100644 index e6a0362e3c0ca..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/FieldSelection.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.explain; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; -import java.util.Objects; -import java.util.Set; - -public class FieldSelection implements ToXContentObject { - - private static final ParseField NAME = new ParseField("name"); - private static final ParseField MAPPING_TYPES = new ParseField("mapping_types"); - private static final ParseField IS_INCLUDED = new ParseField("is_included"); - private static final ParseField IS_REQUIRED = new ParseField("is_required"); - private static final ParseField FEATURE_TYPE = new ParseField("feature_type"); - private static final ParseField REASON = new ParseField("reason"); - - public enum FeatureType { - CATEGORICAL, - NUMERICAL; - - public static FeatureType fromString(String value) { - return FeatureType.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "field_selection", - true, - a -> new FieldSelection( - (String) a[0], - new HashSet<>((List) a[1]), - (boolean) a[2], - (boolean) a[3], - (FeatureType) a[4], - (String) a[5] - ) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), MAPPING_TYPES); - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), IS_INCLUDED); - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), IS_REQUIRED); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FeatureType::fromString, FEATURE_TYPE); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), REASON); - } - - private final String name; - private final Set mappingTypes; - private final boolean isIncluded; - private final boolean isRequired; - private final FeatureType featureType; - private final String reason; - - public static FieldSelection included(String name, Set mappingTypes, boolean isRequired, FeatureType featureType) { - return new FieldSelection(name, mappingTypes, true, isRequired, featureType, null); - } - - public static FieldSelection excluded(String name, Set mappingTypes, String reason) { - return new FieldSelection(name, mappingTypes, false, false, null, reason); - } - - FieldSelection( - String name, - Set mappingTypes, - boolean isIncluded, - boolean isRequired, - @Nullable FeatureType featureType, - @Nullable String reason - ) { - this.name = Objects.requireNonNull(name); - this.mappingTypes = Collections.unmodifiableSet(mappingTypes); - this.isIncluded = isIncluded; - this.isRequired = isRequired; - this.featureType = featureType; - this.reason = reason; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(NAME.getPreferredName(), name); - builder.field(MAPPING_TYPES.getPreferredName(), mappingTypes); - builder.field(IS_INCLUDED.getPreferredName(), isIncluded); - builder.field(IS_REQUIRED.getPreferredName(), isRequired); - if (featureType != null) { - builder.field(FEATURE_TYPE.getPreferredName(), featureType); - } - if (reason != null) { - builder.field(REASON.getPreferredName(), reason); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - FieldSelection that = (FieldSelection) o; - return Objects.equals(name, that.name) - && Objects.equals(mappingTypes, that.mappingTypes) - && isIncluded == that.isIncluded - && isRequired == that.isRequired - && Objects.equals(featureType, that.featureType) - && Objects.equals(reason, that.reason); - } - - @Override - public int hashCode() { - return Objects.hash(name, mappingTypes, isIncluded, isRequired, featureType, reason); - } - - public String getName() { - return name; - } - - public Set getMappingTypes() { - return mappingTypes; - } - - public boolean isIncluded() { - return isIncluded; - } - - public boolean isRequired() { - return isRequired; - } - - @Nullable - public FeatureType getFeatureType() { - return featureType; - } - - @Nullable - public String getReason() { - return reason; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/MemoryEstimation.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/MemoryEstimation.java deleted file mode 100644 index 54525134853aa..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/MemoryEstimation.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.explain; - -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class MemoryEstimation implements ToXContentObject { - - public static final ParseField EXPECTED_MEMORY_WITHOUT_DISK = new ParseField("expected_memory_without_disk"); - public static final ParseField EXPECTED_MEMORY_WITH_DISK = new ParseField("expected_memory_with_disk"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "memory_estimation", - true, - a -> new MemoryEstimation((ByteSizeValue) a[0], (ByteSizeValue) a[1]) - ); - - static { - PARSER.declareField( - optionalConstructorArg(), - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), EXPECTED_MEMORY_WITHOUT_DISK.getPreferredName()), - EXPECTED_MEMORY_WITHOUT_DISK, - ObjectParser.ValueType.VALUE - ); - PARSER.declareField( - optionalConstructorArg(), - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), EXPECTED_MEMORY_WITH_DISK.getPreferredName()), - EXPECTED_MEMORY_WITH_DISK, - ObjectParser.ValueType.VALUE - ); - } - - private final ByteSizeValue expectedMemoryWithoutDisk; - private final ByteSizeValue expectedMemoryWithDisk; - - public MemoryEstimation(@Nullable ByteSizeValue expectedMemoryWithoutDisk, @Nullable ByteSizeValue expectedMemoryWithDisk) { - this.expectedMemoryWithoutDisk = expectedMemoryWithoutDisk; - this.expectedMemoryWithDisk = expectedMemoryWithDisk; - } - - public ByteSizeValue getExpectedMemoryWithoutDisk() { - return expectedMemoryWithoutDisk; - } - - public ByteSizeValue getExpectedMemoryWithDisk() { - return expectedMemoryWithDisk; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (expectedMemoryWithoutDisk != null) { - builder.field(EXPECTED_MEMORY_WITHOUT_DISK.getPreferredName(), expectedMemoryWithoutDisk.getStringRep()); - } - if (expectedMemoryWithDisk != null) { - builder.field(EXPECTED_MEMORY_WITH_DISK.getPreferredName(), expectedMemoryWithDisk.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - if (other == null || getClass() != other.getClass()) { - return false; - } - - MemoryEstimation that = (MemoryEstimation) other; - return Objects.equals(expectedMemoryWithoutDisk, that.expectedMemoryWithoutDisk) - && Objects.equals(expectedMemoryWithDisk, that.expectedMemoryWithDisk); - } - - @Override - public int hashCode() { - return Objects.hash(expectedMemoryWithoutDisk, expectedMemoryWithDisk); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStats.java deleted file mode 100644 index dcd21d6f6b3e1..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStats.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats; - -import org.elasticsearch.xcontent.ToXContentObject; - -/** - * Statistics for the data frame analysis - */ -public interface AnalysisStats extends ToXContentObject { - - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStatsNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStatsNamedXContentProvider.java deleted file mode 100644 index 4da0981fa87d3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStatsNamedXContentProvider.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats; - -import org.elasticsearch.client.ml.dataframe.stats.classification.ClassificationStats; -import org.elasticsearch.client.ml.dataframe.stats.outlierdetection.OutlierDetectionStats; -import org.elasticsearch.client.ml.dataframe.stats.regression.RegressionStats; -import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.xcontent.NamedXContentRegistry; - -import java.util.Arrays; -import java.util.List; - -public class AnalysisStatsNamedXContentProvider implements NamedXContentProvider { - - @Override - public List getNamedXContentParsers() { - return Arrays.asList( - new NamedXContentRegistry.Entry( - AnalysisStats.class, - ClassificationStats.NAME, - (p, c) -> ClassificationStats.PARSER.apply(p, null) - ), - new NamedXContentRegistry.Entry( - AnalysisStats.class, - OutlierDetectionStats.NAME, - (p, c) -> OutlierDetectionStats.PARSER.apply(p, null) - ), - new NamedXContentRegistry.Entry(AnalysisStats.class, RegressionStats.NAME, (p, c) -> RegressionStats.PARSER.apply(p, null)) - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStats.java deleted file mode 100644 index e8367ae13c95e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStats.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.classification; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class ClassificationStats implements AnalysisStats { - - public static final ParseField NAME = new ParseField("classification_stats"); - - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField ITERATION = new ParseField("iteration"); - public static final ParseField HYPERPARAMETERS = new ParseField("hyperparameters"); - public static final ParseField TIMING_STATS = new ParseField("timing_stats"); - public static final ParseField VALIDATION_LOSS = new ParseField("validation_loss"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new ClassificationStats((Instant) a[0], (Integer) a[1], (Hyperparameters) a[2], (TimingStats) a[3], (ValidationLoss) a[4]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), ITERATION); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), Hyperparameters.PARSER, HYPERPARAMETERS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), TimingStats.PARSER, TIMING_STATS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), ValidationLoss.PARSER, VALIDATION_LOSS); - } - - private final Instant timestamp; - private final Integer iteration; - private final Hyperparameters hyperparameters; - private final TimingStats timingStats; - private final ValidationLoss validationLoss; - - public ClassificationStats( - Instant timestamp, - Integer iteration, - Hyperparameters hyperparameters, - TimingStats timingStats, - ValidationLoss validationLoss - ) { - this.timestamp = Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli()); - this.iteration = iteration; - this.hyperparameters = Objects.requireNonNull(hyperparameters); - this.timingStats = Objects.requireNonNull(timingStats); - this.validationLoss = Objects.requireNonNull(validationLoss); - } - - public Instant getTimestamp() { - return timestamp; - } - - public Integer getIteration() { - return iteration; - } - - public Hyperparameters getHyperparameters() { - return hyperparameters; - } - - public TimingStats getTimingStats() { - return timingStats; - } - - public ValidationLoss getValidationLoss() { - return validationLoss; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); - if (iteration != null) { - builder.field(ITERATION.getPreferredName(), iteration); - } - builder.field(HYPERPARAMETERS.getPreferredName(), hyperparameters); - builder.field(TIMING_STATS.getPreferredName(), timingStats); - builder.field(VALIDATION_LOSS.getPreferredName(), validationLoss); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ClassificationStats that = (ClassificationStats) o; - return Objects.equals(timestamp, that.timestamp) - && Objects.equals(iteration, that.iteration) - && Objects.equals(hyperparameters, that.hyperparameters) - && Objects.equals(timingStats, that.timingStats) - && Objects.equals(validationLoss, that.validationLoss); - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, iteration, hyperparameters, timingStats, validationLoss); - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/Hyperparameters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/Hyperparameters.java deleted file mode 100644 index c136928aeb76f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/Hyperparameters.java +++ /dev/null @@ -1,285 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.classification; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class Hyperparameters implements ToXContentObject { - - public static final ParseField CLASS_ASSIGNMENT_OBJECTIVE = new ParseField("class_assignment_objective"); - public static final ParseField ALPHA = new ParseField("alpha"); - public static final ParseField DOWNSAMPLE_FACTOR = new ParseField("downsample_factor"); - public static final ParseField ETA = new ParseField("eta"); - public static final ParseField ETA_GROWTH_RATE_PER_TREE = new ParseField("eta_growth_rate_per_tree"); - public static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); - public static final ParseField GAMMA = new ParseField("gamma"); - public static final ParseField LAMBDA = new ParseField("lambda"); - public static final ParseField MAX_ATTEMPTS_TO_ADD_TREE = new ParseField("max_attempts_to_add_tree"); - public static final ParseField MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER = new ParseField( - "max_optimization_rounds_per_hyperparameter" - ); - public static final ParseField MAX_TREES = new ParseField("max_trees"); - public static final ParseField NUM_FOLDS = new ParseField("num_folds"); - public static final ParseField NUM_SPLITS_PER_FEATURE = new ParseField("num_splits_per_feature"); - public static final ParseField SOFT_TREE_DEPTH_LIMIT = new ParseField("soft_tree_depth_limit"); - public static final ParseField SOFT_TREE_DEPTH_TOLERANCE = new ParseField("soft_tree_depth_tolerance"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "classification_hyperparameters", - true, - a -> new Hyperparameters( - (String) a[0], - (Double) a[1], - (Double) a[2], - (Double) a[3], - (Double) a[4], - (Double) a[5], - (Double) a[6], - (Double) a[7], - (Integer) a[8], - (Integer) a[9], - (Integer) a[10], - (Integer) a[11], - (Integer) a[12], - (Double) a[13], - (Double) a[14] - ) - ); - - static { - PARSER.declareString(optionalConstructorArg(), CLASS_ASSIGNMENT_OBJECTIVE); - PARSER.declareDouble(optionalConstructorArg(), ALPHA); - PARSER.declareDouble(optionalConstructorArg(), DOWNSAMPLE_FACTOR); - PARSER.declareDouble(optionalConstructorArg(), ETA); - PARSER.declareDouble(optionalConstructorArg(), ETA_GROWTH_RATE_PER_TREE); - PARSER.declareDouble(optionalConstructorArg(), FEATURE_BAG_FRACTION); - PARSER.declareDouble(optionalConstructorArg(), GAMMA); - PARSER.declareDouble(optionalConstructorArg(), LAMBDA); - PARSER.declareInt(optionalConstructorArg(), MAX_ATTEMPTS_TO_ADD_TREE); - PARSER.declareInt(optionalConstructorArg(), MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER); - PARSER.declareInt(optionalConstructorArg(), MAX_TREES); - PARSER.declareInt(optionalConstructorArg(), NUM_FOLDS); - PARSER.declareInt(optionalConstructorArg(), NUM_SPLITS_PER_FEATURE); - PARSER.declareDouble(optionalConstructorArg(), SOFT_TREE_DEPTH_LIMIT); - PARSER.declareDouble(optionalConstructorArg(), SOFT_TREE_DEPTH_TOLERANCE); - } - - private final String classAssignmentObjective; - private final Double alpha; - private final Double downsampleFactor; - private final Double eta; - private final Double etaGrowthRatePerTree; - private final Double featureBagFraction; - private final Double gamma; - private final Double lambda; - private final Integer maxAttemptsToAddTree; - private final Integer maxOptimizationRoundsPerHyperparameter; - private final Integer maxTrees; - private final Integer numFolds; - private final Integer numSplitsPerFeature; - private final Double softTreeDepthLimit; - private final Double softTreeDepthTolerance; - - public Hyperparameters( - String classAssignmentObjective, - Double alpha, - Double downsampleFactor, - Double eta, - Double etaGrowthRatePerTree, - Double featureBagFraction, - Double gamma, - Double lambda, - Integer maxAttemptsToAddTree, - Integer maxOptimizationRoundsPerHyperparameter, - Integer maxTrees, - Integer numFolds, - Integer numSplitsPerFeature, - Double softTreeDepthLimit, - Double softTreeDepthTolerance - ) { - this.classAssignmentObjective = classAssignmentObjective; - this.alpha = alpha; - this.downsampleFactor = downsampleFactor; - this.eta = eta; - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - this.featureBagFraction = featureBagFraction; - this.gamma = gamma; - this.lambda = lambda; - this.maxAttemptsToAddTree = maxAttemptsToAddTree; - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - this.maxTrees = maxTrees; - this.numFolds = numFolds; - this.numSplitsPerFeature = numSplitsPerFeature; - this.softTreeDepthLimit = softTreeDepthLimit; - this.softTreeDepthTolerance = softTreeDepthTolerance; - } - - public String getClassAssignmentObjective() { - return classAssignmentObjective; - } - - public Double getAlpha() { - return alpha; - } - - public Double getDownsampleFactor() { - return downsampleFactor; - } - - public Double getEta() { - return eta; - } - - public Double getEtaGrowthRatePerTree() { - return etaGrowthRatePerTree; - } - - public Double getFeatureBagFraction() { - return featureBagFraction; - } - - public Double getGamma() { - return gamma; - } - - public Double getLambda() { - return lambda; - } - - public Integer getMaxAttemptsToAddTree() { - return maxAttemptsToAddTree; - } - - public Integer getMaxOptimizationRoundsPerHyperparameter() { - return maxOptimizationRoundsPerHyperparameter; - } - - public Integer getMaxTrees() { - return maxTrees; - } - - public Integer getNumFolds() { - return numFolds; - } - - public Integer getNumSplitsPerFeature() { - return numSplitsPerFeature; - } - - public Double getSoftTreeDepthLimit() { - return softTreeDepthLimit; - } - - public Double getSoftTreeDepthTolerance() { - return softTreeDepthTolerance; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (classAssignmentObjective != null) { - builder.field(CLASS_ASSIGNMENT_OBJECTIVE.getPreferredName(), classAssignmentObjective); - } - if (alpha != null) { - builder.field(ALPHA.getPreferredName(), alpha); - } - if (downsampleFactor != null) { - builder.field(DOWNSAMPLE_FACTOR.getPreferredName(), downsampleFactor); - } - if (eta != null) { - builder.field(ETA.getPreferredName(), eta); - } - if (etaGrowthRatePerTree != null) { - builder.field(ETA_GROWTH_RATE_PER_TREE.getPreferredName(), etaGrowthRatePerTree); - } - if (featureBagFraction != null) { - builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); - } - if (gamma != null) { - builder.field(GAMMA.getPreferredName(), gamma); - } - if (lambda != null) { - builder.field(LAMBDA.getPreferredName(), lambda); - } - if (maxAttemptsToAddTree != null) { - builder.field(MAX_ATTEMPTS_TO_ADD_TREE.getPreferredName(), maxAttemptsToAddTree); - } - if (maxOptimizationRoundsPerHyperparameter != null) { - builder.field(MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER.getPreferredName(), maxOptimizationRoundsPerHyperparameter); - } - if (maxTrees != null) { - builder.field(MAX_TREES.getPreferredName(), maxTrees); - } - if (numFolds != null) { - builder.field(NUM_FOLDS.getPreferredName(), numFolds); - } - if (numSplitsPerFeature != null) { - builder.field(NUM_SPLITS_PER_FEATURE.getPreferredName(), numSplitsPerFeature); - } - if (softTreeDepthLimit != null) { - builder.field(SOFT_TREE_DEPTH_LIMIT.getPreferredName(), softTreeDepthLimit); - } - if (softTreeDepthTolerance != null) { - builder.field(SOFT_TREE_DEPTH_TOLERANCE.getPreferredName(), softTreeDepthTolerance); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Hyperparameters that = (Hyperparameters) o; - return Objects.equals(classAssignmentObjective, that.classAssignmentObjective) - && Objects.equals(alpha, that.alpha) - && Objects.equals(downsampleFactor, that.downsampleFactor) - && Objects.equals(eta, that.eta) - && Objects.equals(etaGrowthRatePerTree, that.etaGrowthRatePerTree) - && Objects.equals(featureBagFraction, that.featureBagFraction) - && Objects.equals(gamma, that.gamma) - && Objects.equals(lambda, that.lambda) - && Objects.equals(maxAttemptsToAddTree, that.maxAttemptsToAddTree) - && Objects.equals(maxOptimizationRoundsPerHyperparameter, that.maxOptimizationRoundsPerHyperparameter) - && Objects.equals(maxTrees, that.maxTrees) - && Objects.equals(numFolds, that.numFolds) - && Objects.equals(numSplitsPerFeature, that.numSplitsPerFeature) - && Objects.equals(softTreeDepthLimit, that.softTreeDepthLimit) - && Objects.equals(softTreeDepthTolerance, that.softTreeDepthTolerance); - } - - @Override - public int hashCode() { - return Objects.hash( - classAssignmentObjective, - alpha, - downsampleFactor, - eta, - etaGrowthRatePerTree, - featureBagFraction, - gamma, - lambda, - maxAttemptsToAddTree, - maxOptimizationRoundsPerHyperparameter, - maxTrees, - numFolds, - numSplitsPerFeature, - softTreeDepthLimit, - softTreeDepthTolerance - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStats.java deleted file mode 100644 index 9afeeeeb3a4f8..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStats.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.classification; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class TimingStats implements ToXContentObject { - - public static final ParseField ELAPSED_TIME = new ParseField("elapsed_time"); - public static final ParseField ITERATION_TIME = new ParseField("iteration_time"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "classification_timing_stats", - true, - a -> new TimingStats( - a[0] == null ? null : TimeValue.timeValueMillis((long) a[0]), - a[1] == null ? null : TimeValue.timeValueMillis((long) a[1]) - ) - ); - - static { - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ELAPSED_TIME); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ITERATION_TIME); - } - - private final TimeValue elapsedTime; - private final TimeValue iterationTime; - - public TimingStats(TimeValue elapsedTime, TimeValue iterationTime) { - this.elapsedTime = elapsedTime; - this.iterationTime = iterationTime; - } - - public TimeValue getElapsedTime() { - return elapsedTime; - } - - public TimeValue getIterationTime() { - return iterationTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (elapsedTime != null) { - builder.humanReadableField(ELAPSED_TIME.getPreferredName(), ELAPSED_TIME.getPreferredName() + "_string", elapsedTime); - } - if (iterationTime != null) { - builder.humanReadableField(ITERATION_TIME.getPreferredName(), ITERATION_TIME.getPreferredName() + "_string", iterationTime); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TimingStats that = (TimingStats) o; - return Objects.equals(elapsedTime, that.elapsedTime) && Objects.equals(iterationTime, that.iterationTime); - } - - @Override - public int hashCode() { - return Objects.hash(elapsedTime, iterationTime); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLoss.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLoss.java deleted file mode 100644 index ca781c8205300..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLoss.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.classification; - -import org.elasticsearch.client.ml.dataframe.stats.common.FoldValues; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class ValidationLoss implements ToXContentObject { - - public static final ParseField LOSS_TYPE = new ParseField("loss_type"); - public static final ParseField FOLD_VALUES = new ParseField("fold_values"); - - @SuppressWarnings("unchecked") - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "classification_validation_loss", - true, - a -> new ValidationLoss((String) a[0], (List) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), LOSS_TYPE); - PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), FoldValues.PARSER, FOLD_VALUES); - } - - private final String lossType; - private final List foldValues; - - public ValidationLoss(String lossType, List values) { - this.lossType = lossType; - this.foldValues = values; - } - - public String getLossType() { - return lossType; - } - - public List getFoldValues() { - return foldValues; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (lossType != null) { - builder.field(LOSS_TYPE.getPreferredName(), lossType); - } - if (foldValues != null) { - builder.field(FOLD_VALUES.getPreferredName(), foldValues); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ValidationLoss that = (ValidationLoss) o; - return Objects.equals(lossType, that.lossType) && Objects.equals(foldValues, that.foldValues); - } - - @Override - public int hashCode() { - return Objects.hash(lossType, foldValues); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCounts.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCounts.java deleted file mode 100644 index 82c4fccb09c8f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCounts.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe.stats.common; - -import org.elasticsearch.common.inject.internal.ToStringBuilder; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class DataCounts implements ToXContentObject { - - public static final String TYPE_VALUE = "analytics_data_counts"; - - public static final ParseField TRAINING_DOCS_COUNT = new ParseField("training_docs_count"); - public static final ParseField TEST_DOCS_COUNT = new ParseField("test_docs_count"); - public static final ParseField SKIPPED_DOCS_COUNT = new ParseField("skipped_docs_count"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(TYPE_VALUE, true, a -> { - Long trainingDocsCount = (Long) a[0]; - Long testDocsCount = (Long) a[1]; - Long skippedDocsCount = (Long) a[2]; - return new DataCounts(getOrDefault(trainingDocsCount, 0L), getOrDefault(testDocsCount, 0L), getOrDefault(skippedDocsCount, 0L)); - }); - - static { - PARSER.declareLong(optionalConstructorArg(), TRAINING_DOCS_COUNT); - PARSER.declareLong(optionalConstructorArg(), TEST_DOCS_COUNT); - PARSER.declareLong(optionalConstructorArg(), SKIPPED_DOCS_COUNT); - } - - private final long trainingDocsCount; - private final long testDocsCount; - private final long skippedDocsCount; - - public DataCounts(long trainingDocsCount, long testDocsCount, long skippedDocsCount) { - this.trainingDocsCount = trainingDocsCount; - this.testDocsCount = testDocsCount; - this.skippedDocsCount = skippedDocsCount; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(TRAINING_DOCS_COUNT.getPreferredName(), trainingDocsCount); - builder.field(TEST_DOCS_COUNT.getPreferredName(), testDocsCount); - builder.field(SKIPPED_DOCS_COUNT.getPreferredName(), skippedDocsCount); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DataCounts that = (DataCounts) o; - return trainingDocsCount == that.trainingDocsCount - && testDocsCount == that.testDocsCount - && skippedDocsCount == that.skippedDocsCount; - } - - @Override - public int hashCode() { - return Objects.hash(trainingDocsCount, testDocsCount, skippedDocsCount); - } - - @Override - public String toString() { - return new ToStringBuilder(getClass()).add(TRAINING_DOCS_COUNT.getPreferredName(), trainingDocsCount) - .add(TEST_DOCS_COUNT.getPreferredName(), testDocsCount) - .add(SKIPPED_DOCS_COUNT.getPreferredName(), skippedDocsCount) - .toString(); - } - - public long getTrainingDocsCount() { - return trainingDocsCount; - } - - public long getTestDocsCount() { - return testDocsCount; - } - - public long getSkippedDocsCount() { - return skippedDocsCount; - } - - private static T getOrDefault(@Nullable T value, T defaultValue) { - return value != null ? value : defaultValue; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValues.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValues.java deleted file mode 100644 index d9f9fbc74fe70..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValues.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.common; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -public class FoldValues implements ToXContentObject { - - public static final ParseField FOLD = new ParseField("fold"); - public static final ParseField VALUES = new ParseField("values"); - - @SuppressWarnings("unchecked") - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "fold_values", - true, - a -> new FoldValues((int) a[0], (List) a[1]) - ); - - static { - PARSER.declareInt(ConstructingObjectParser.constructorArg(), FOLD); - PARSER.declareDoubleArray(ConstructingObjectParser.constructorArg(), VALUES); - } - - private final int fold; - private final double[] values; - - private FoldValues(int fold, List values) { - this(fold, values.stream().mapToDouble(Double::doubleValue).toArray()); - } - - public FoldValues(int fold, double[] values) { - this.fold = fold; - this.values = values; - } - - public int getFold() { - return fold; - } - - public double[] getValues() { - return values; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FOLD.getPreferredName(), fold); - builder.array(VALUES.getPreferredName(), values); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - FoldValues other = (FoldValues) o; - return fold == other.fold && Arrays.equals(values, other.values); - } - - @Override - public int hashCode() { - return Objects.hash(fold, Arrays.hashCode(values)); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsage.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsage.java deleted file mode 100644 index a856df9c3130b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsage.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.common; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.common.inject.internal.ToStringBuilder; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Locale; -import java.util.Objects; - -public class MemoryUsage implements ToXContentObject { - - static final ParseField TIMESTAMP = new ParseField("timestamp"); - static final ParseField PEAK_USAGE_BYTES = new ParseField("peak_usage_bytes"); - static final ParseField STATUS = new ParseField("status"); - static final ParseField MEMORY_REESTIMATE_BYTES = new ParseField("memory_reestimate_bytes"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "analytics_memory_usage", - true, - a -> new MemoryUsage((Instant) a[0], (long) a[1], (Status) a[2], (Long) a[3]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), PEAK_USAGE_BYTES); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), Status::fromString, STATUS); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), MEMORY_REESTIMATE_BYTES); - } - - @Nullable - private final Instant timestamp; - private final long peakUsageBytes; - private final Status status; - private final Long memoryReestimateBytes; - - public MemoryUsage(@Nullable Instant timestamp, long peakUsageBytes, Status status, @Nullable Long memoryReestimateBytes) { - this.timestamp = timestamp == null ? null : Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli()); - this.peakUsageBytes = peakUsageBytes; - this.status = status; - this.memoryReestimateBytes = memoryReestimateBytes; - } - - @Nullable - public Instant getTimestamp() { - return timestamp; - } - - public long getPeakUsageBytes() { - return peakUsageBytes; - } - - public Status getStatus() { - return status; - } - - public Long getMemoryReestimateBytes() { - return memoryReestimateBytes; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (timestamp != null) { - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); - } - builder.field(PEAK_USAGE_BYTES.getPreferredName(), peakUsageBytes); - builder.field(STATUS.getPreferredName(), status); - if (memoryReestimateBytes != null) { - builder.field(MEMORY_REESTIMATE_BYTES.getPreferredName(), memoryReestimateBytes); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - MemoryUsage other = (MemoryUsage) o; - return Objects.equals(timestamp, other.timestamp) - && peakUsageBytes == other.peakUsageBytes - && Objects.equals(status, other.status) - && Objects.equals(memoryReestimateBytes, other.memoryReestimateBytes); - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, peakUsageBytes, status, memoryReestimateBytes); - } - - @Override - public String toString() { - return new ToStringBuilder(getClass()).add(TIMESTAMP.getPreferredName(), timestamp == null ? null : timestamp.getEpochSecond()) - .add(PEAK_USAGE_BYTES.getPreferredName(), peakUsageBytes) - .add(STATUS.getPreferredName(), status) - .add(MEMORY_REESTIMATE_BYTES.getPreferredName(), memoryReestimateBytes) - .toString(); - } - - public enum Status { - OK, - HARD_LIMIT; - - public static Status fromString(String value) { - return valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java deleted file mode 100644 index 8481aecf808a0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.outlierdetection; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class OutlierDetectionStats implements AnalysisStats { - - public static final ParseField NAME = new ParseField("outlier_detection_stats"); - - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField PARAMETERS = new ParseField("parameters"); - public static final ParseField TIMING_STATS = new ParseField("timing_stats"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new OutlierDetectionStats((Instant) a[0], (Parameters) a[1], (TimingStats) a[2]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), Parameters.PARSER, PARAMETERS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), TimingStats.PARSER, TIMING_STATS); - } - - private final Instant timestamp; - private final Parameters parameters; - private final TimingStats timingStats; - - public OutlierDetectionStats(Instant timestamp, Parameters parameters, TimingStats timingStats) { - this.timestamp = Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli()); - this.parameters = Objects.requireNonNull(parameters); - this.timingStats = Objects.requireNonNull(timingStats); - } - - public Instant getTimestamp() { - return timestamp; - } - - public Parameters getParameters() { - return parameters; - } - - public TimingStats getTimingStats() { - return timingStats; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); - builder.field(PARAMETERS.getPreferredName(), parameters); - builder.field(TIMING_STATS.getPreferredName(), timingStats); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - OutlierDetectionStats that = (OutlierDetectionStats) o; - return Objects.equals(timestamp, that.timestamp) - && Objects.equals(parameters, that.parameters) - && Objects.equals(timingStats, that.timingStats); - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, parameters, timingStats); - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/Parameters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/Parameters.java deleted file mode 100644 index aef6ad0833d42..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/Parameters.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.outlierdetection; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class Parameters implements ToXContentObject { - - public static final ParseField N_NEIGHBORS = new ParseField("n_neighbors"); - public static final ParseField METHOD = new ParseField("method"); - public static final ParseField FEATURE_INFLUENCE_THRESHOLD = new ParseField("feature_influence_threshold"); - public static final ParseField COMPUTE_FEATURE_INFLUENCE = new ParseField("compute_feature_influence"); - public static final ParseField OUTLIER_FRACTION = new ParseField("outlier_fraction"); - public static final ParseField STANDARDIZATION_ENABLED = new ParseField("standardization_enabled"); - - @SuppressWarnings("unchecked") - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "outlier_detection_parameters", - true, - a -> new Parameters((Integer) a[0], (String) a[1], (Boolean) a[2], (Double) a[3], (Double) a[4], (Boolean) a[5]) - ); - - static { - PARSER.declareInt(optionalConstructorArg(), N_NEIGHBORS); - PARSER.declareString(optionalConstructorArg(), METHOD); - PARSER.declareBoolean(optionalConstructorArg(), COMPUTE_FEATURE_INFLUENCE); - PARSER.declareDouble(optionalConstructorArg(), FEATURE_INFLUENCE_THRESHOLD); - PARSER.declareDouble(optionalConstructorArg(), OUTLIER_FRACTION); - PARSER.declareBoolean(optionalConstructorArg(), STANDARDIZATION_ENABLED); - } - - private final Integer nNeighbors; - private final String method; - private final Boolean computeFeatureInfluence; - private final Double featureInfluenceThreshold; - private final Double outlierFraction; - private final Boolean standardizationEnabled; - - public Parameters( - Integer nNeighbors, - String method, - Boolean computeFeatureInfluence, - Double featureInfluenceThreshold, - Double outlierFraction, - Boolean standardizationEnabled - ) { - this.nNeighbors = nNeighbors; - this.method = method; - this.computeFeatureInfluence = computeFeatureInfluence; - this.featureInfluenceThreshold = featureInfluenceThreshold; - this.outlierFraction = outlierFraction; - this.standardizationEnabled = standardizationEnabled; - } - - public Integer getnNeighbors() { - return nNeighbors; - } - - public String getMethod() { - return method; - } - - public Boolean getComputeFeatureInfluence() { - return computeFeatureInfluence; - } - - public Double getFeatureInfluenceThreshold() { - return featureInfluenceThreshold; - } - - public Double getOutlierFraction() { - return outlierFraction; - } - - public Boolean getStandardizationEnabled() { - return standardizationEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (nNeighbors != null) { - builder.field(N_NEIGHBORS.getPreferredName(), nNeighbors); - } - if (method != null) { - builder.field(METHOD.getPreferredName(), method); - } - if (computeFeatureInfluence != null) { - builder.field(COMPUTE_FEATURE_INFLUENCE.getPreferredName(), computeFeatureInfluence); - } - if (featureInfluenceThreshold != null) { - builder.field(FEATURE_INFLUENCE_THRESHOLD.getPreferredName(), featureInfluenceThreshold); - } - if (outlierFraction != null) { - builder.field(OUTLIER_FRACTION.getPreferredName(), outlierFraction); - } - if (standardizationEnabled != null) { - builder.field(STANDARDIZATION_ENABLED.getPreferredName(), standardizationEnabled); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Parameters that = (Parameters) o; - return Objects.equals(nNeighbors, that.nNeighbors) - && Objects.equals(method, that.method) - && Objects.equals(computeFeatureInfluence, that.computeFeatureInfluence) - && Objects.equals(featureInfluenceThreshold, that.featureInfluenceThreshold) - && Objects.equals(outlierFraction, that.outlierFraction) - && Objects.equals(standardizationEnabled, that.standardizationEnabled); - } - - @Override - public int hashCode() { - return Objects.hash( - nNeighbors, - method, - computeFeatureInfluence, - featureInfluenceThreshold, - outlierFraction, - standardizationEnabled - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStats.java deleted file mode 100644 index 72d96fa4d71cf..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStats.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.outlierdetection; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class TimingStats implements ToXContentObject { - - public static final ParseField ELAPSED_TIME = new ParseField("elapsed_time"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "outlier_detection_timing_stats", - true, - a -> new TimingStats(a[0] == null ? null : TimeValue.timeValueMillis((long) a[0])) - ); - - static { - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ELAPSED_TIME); - } - - private final TimeValue elapsedTime; - - public TimingStats(TimeValue elapsedTime) { - this.elapsedTime = elapsedTime; - } - - public TimeValue getElapsedTime() { - return elapsedTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (elapsedTime != null) { - builder.humanReadableField(ELAPSED_TIME.getPreferredName(), ELAPSED_TIME.getPreferredName() + "_string", elapsedTime); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TimingStats that = (TimingStats) o; - return Objects.equals(elapsedTime, that.elapsedTime); - } - - @Override - public int hashCode() { - return Objects.hash(elapsedTime); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/Hyperparameters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/Hyperparameters.java deleted file mode 100644 index bd89928f035c8..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/Hyperparameters.java +++ /dev/null @@ -1,270 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.regression; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class Hyperparameters implements ToXContentObject { - - public static final ParseField ALPHA = new ParseField("alpha"); - public static final ParseField DOWNSAMPLE_FACTOR = new ParseField("downsample_factor"); - public static final ParseField ETA = new ParseField("eta"); - public static final ParseField ETA_GROWTH_RATE_PER_TREE = new ParseField("eta_growth_rate_per_tree"); - public static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); - public static final ParseField GAMMA = new ParseField("gamma"); - public static final ParseField LAMBDA = new ParseField("lambda"); - public static final ParseField MAX_ATTEMPTS_TO_ADD_TREE = new ParseField("max_attempts_to_add_tree"); - public static final ParseField MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER = new ParseField( - "max_optimization_rounds_per_hyperparameter" - ); - public static final ParseField MAX_TREES = new ParseField("max_trees"); - public static final ParseField NUM_FOLDS = new ParseField("num_folds"); - public static final ParseField NUM_SPLITS_PER_FEATURE = new ParseField("num_splits_per_feature"); - public static final ParseField SOFT_TREE_DEPTH_LIMIT = new ParseField("soft_tree_depth_limit"); - public static final ParseField SOFT_TREE_DEPTH_TOLERANCE = new ParseField("soft_tree_depth_tolerance"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "regression_hyperparameters", - true, - a -> new Hyperparameters( - (Double) a[0], - (Double) a[1], - (Double) a[2], - (Double) a[3], - (Double) a[4], - (Double) a[5], - (Double) a[6], - (Integer) a[7], - (Integer) a[8], - (Integer) a[9], - (Integer) a[10], - (Integer) a[11], - (Double) a[12], - (Double) a[13] - ) - ); - - static { - PARSER.declareDouble(optionalConstructorArg(), ALPHA); - PARSER.declareDouble(optionalConstructorArg(), DOWNSAMPLE_FACTOR); - PARSER.declareDouble(optionalConstructorArg(), ETA); - PARSER.declareDouble(optionalConstructorArg(), ETA_GROWTH_RATE_PER_TREE); - PARSER.declareDouble(optionalConstructorArg(), FEATURE_BAG_FRACTION); - PARSER.declareDouble(optionalConstructorArg(), GAMMA); - PARSER.declareDouble(optionalConstructorArg(), LAMBDA); - PARSER.declareInt(optionalConstructorArg(), MAX_ATTEMPTS_TO_ADD_TREE); - PARSER.declareInt(optionalConstructorArg(), MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER); - PARSER.declareInt(optionalConstructorArg(), MAX_TREES); - PARSER.declareInt(optionalConstructorArg(), NUM_FOLDS); - PARSER.declareInt(optionalConstructorArg(), NUM_SPLITS_PER_FEATURE); - PARSER.declareDouble(optionalConstructorArg(), SOFT_TREE_DEPTH_LIMIT); - PARSER.declareDouble(optionalConstructorArg(), SOFT_TREE_DEPTH_TOLERANCE); - } - - private final Double alpha; - private final Double downsampleFactor; - private final Double eta; - private final Double etaGrowthRatePerTree; - private final Double featureBagFraction; - private final Double gamma; - private final Double lambda; - private final Integer maxAttemptsToAddTree; - private final Integer maxOptimizationRoundsPerHyperparameter; - private final Integer maxTrees; - private final Integer numFolds; - private final Integer numSplitsPerFeature; - private final Double softTreeDepthLimit; - private final Double softTreeDepthTolerance; - - public Hyperparameters( - Double alpha, - Double downsampleFactor, - Double eta, - Double etaGrowthRatePerTree, - Double featureBagFraction, - Double gamma, - Double lambda, - Integer maxAttemptsToAddTree, - Integer maxOptimizationRoundsPerHyperparameter, - Integer maxTrees, - Integer numFolds, - Integer numSplitsPerFeature, - Double softTreeDepthLimit, - Double softTreeDepthTolerance - ) { - this.alpha = alpha; - this.downsampleFactor = downsampleFactor; - this.eta = eta; - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - this.featureBagFraction = featureBagFraction; - this.gamma = gamma; - this.lambda = lambda; - this.maxAttemptsToAddTree = maxAttemptsToAddTree; - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - this.maxTrees = maxTrees; - this.numFolds = numFolds; - this.numSplitsPerFeature = numSplitsPerFeature; - this.softTreeDepthLimit = softTreeDepthLimit; - this.softTreeDepthTolerance = softTreeDepthTolerance; - } - - public Double getAlpha() { - return alpha; - } - - public Double getDownsampleFactor() { - return downsampleFactor; - } - - public Double getEta() { - return eta; - } - - public Double getEtaGrowthRatePerTree() { - return etaGrowthRatePerTree; - } - - public Double getFeatureBagFraction() { - return featureBagFraction; - } - - public Double getGamma() { - return gamma; - } - - public Double getLambda() { - return lambda; - } - - public Integer getMaxAttemptsToAddTree() { - return maxAttemptsToAddTree; - } - - public Integer getMaxOptimizationRoundsPerHyperparameter() { - return maxOptimizationRoundsPerHyperparameter; - } - - public Integer getMaxTrees() { - return maxTrees; - } - - public Integer getNumFolds() { - return numFolds; - } - - public Integer getNumSplitsPerFeature() { - return numSplitsPerFeature; - } - - public Double getSoftTreeDepthLimit() { - return softTreeDepthLimit; - } - - public Double getSoftTreeDepthTolerance() { - return softTreeDepthTolerance; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (alpha != null) { - builder.field(ALPHA.getPreferredName(), alpha); - } - if (downsampleFactor != null) { - builder.field(DOWNSAMPLE_FACTOR.getPreferredName(), downsampleFactor); - } - if (eta != null) { - builder.field(ETA.getPreferredName(), eta); - } - if (etaGrowthRatePerTree != null) { - builder.field(ETA_GROWTH_RATE_PER_TREE.getPreferredName(), etaGrowthRatePerTree); - } - if (featureBagFraction != null) { - builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); - } - if (gamma != null) { - builder.field(GAMMA.getPreferredName(), gamma); - } - if (lambda != null) { - builder.field(LAMBDA.getPreferredName(), lambda); - } - if (maxAttemptsToAddTree != null) { - builder.field(MAX_ATTEMPTS_TO_ADD_TREE.getPreferredName(), maxAttemptsToAddTree); - } - if (maxOptimizationRoundsPerHyperparameter != null) { - builder.field(MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER.getPreferredName(), maxOptimizationRoundsPerHyperparameter); - } - if (maxTrees != null) { - builder.field(MAX_TREES.getPreferredName(), maxTrees); - } - if (numFolds != null) { - builder.field(NUM_FOLDS.getPreferredName(), numFolds); - } - if (numSplitsPerFeature != null) { - builder.field(NUM_SPLITS_PER_FEATURE.getPreferredName(), numSplitsPerFeature); - } - if (softTreeDepthLimit != null) { - builder.field(SOFT_TREE_DEPTH_LIMIT.getPreferredName(), softTreeDepthLimit); - } - if (softTreeDepthTolerance != null) { - builder.field(SOFT_TREE_DEPTH_TOLERANCE.getPreferredName(), softTreeDepthTolerance); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Hyperparameters that = (Hyperparameters) o; - return Objects.equals(alpha, that.alpha) - && Objects.equals(downsampleFactor, that.downsampleFactor) - && Objects.equals(eta, that.eta) - && Objects.equals(etaGrowthRatePerTree, that.etaGrowthRatePerTree) - && Objects.equals(featureBagFraction, that.featureBagFraction) - && Objects.equals(gamma, that.gamma) - && Objects.equals(lambda, that.lambda) - && Objects.equals(maxAttemptsToAddTree, that.maxAttemptsToAddTree) - && Objects.equals(maxOptimizationRoundsPerHyperparameter, that.maxOptimizationRoundsPerHyperparameter) - && Objects.equals(maxTrees, that.maxTrees) - && Objects.equals(numFolds, that.numFolds) - && Objects.equals(numSplitsPerFeature, that.numSplitsPerFeature) - && Objects.equals(softTreeDepthLimit, that.softTreeDepthLimit) - && Objects.equals(softTreeDepthTolerance, that.softTreeDepthTolerance); - } - - @Override - public int hashCode() { - return Objects.hash( - alpha, - downsampleFactor, - eta, - etaGrowthRatePerTree, - featureBagFraction, - gamma, - lambda, - maxAttemptsToAddTree, - maxOptimizationRoundsPerHyperparameter, - maxTrees, - numFolds, - numSplitsPerFeature, - softTreeDepthLimit, - softTreeDepthTolerance - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStats.java deleted file mode 100644 index 8507a2c88f3a9..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStats.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.regression; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class RegressionStats implements AnalysisStats { - - public static final ParseField NAME = new ParseField("regression_stats"); - - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField ITERATION = new ParseField("iteration"); - public static final ParseField HYPERPARAMETERS = new ParseField("hyperparameters"); - public static final ParseField TIMING_STATS = new ParseField("timing_stats"); - public static final ParseField VALIDATION_LOSS = new ParseField("validation_loss"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new RegressionStats((Instant) a[0], (Integer) a[1], (Hyperparameters) a[2], (TimingStats) a[3], (ValidationLoss) a[4]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), ITERATION); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), Hyperparameters.PARSER, HYPERPARAMETERS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), TimingStats.PARSER, TIMING_STATS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), ValidationLoss.PARSER, VALIDATION_LOSS); - } - - private final Instant timestamp; - private final Integer iteration; - private final Hyperparameters hyperparameters; - private final TimingStats timingStats; - private final ValidationLoss validationLoss; - - public RegressionStats( - Instant timestamp, - Integer iteration, - Hyperparameters hyperparameters, - TimingStats timingStats, - ValidationLoss validationLoss - ) { - this.timestamp = Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli()); - this.iteration = iteration; - this.hyperparameters = Objects.requireNonNull(hyperparameters); - this.timingStats = Objects.requireNonNull(timingStats); - this.validationLoss = Objects.requireNonNull(validationLoss); - } - - public Instant getTimestamp() { - return timestamp; - } - - public Integer getIteration() { - return iteration; - } - - public Hyperparameters getHyperparameters() { - return hyperparameters; - } - - public TimingStats getTimingStats() { - return timingStats; - } - - public ValidationLoss getValidationLoss() { - return validationLoss; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); - if (iteration != null) { - builder.field(ITERATION.getPreferredName(), iteration); - } - builder.field(HYPERPARAMETERS.getPreferredName(), hyperparameters); - builder.field(TIMING_STATS.getPreferredName(), timingStats); - builder.field(VALIDATION_LOSS.getPreferredName(), validationLoss); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - RegressionStats that = (RegressionStats) o; - return Objects.equals(timestamp, that.timestamp) - && Objects.equals(iteration, that.iteration) - && Objects.equals(hyperparameters, that.hyperparameters) - && Objects.equals(timingStats, that.timingStats) - && Objects.equals(validationLoss, that.validationLoss); - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, iteration, hyperparameters, timingStats, validationLoss); - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStats.java deleted file mode 100644 index 7a06a2aa3b4d5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStats.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.regression; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class TimingStats implements ToXContentObject { - - public static final ParseField ELAPSED_TIME = new ParseField("elapsed_time"); - public static final ParseField ITERATION_TIME = new ParseField("iteration_time"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "regression_timing_stats", - true, - a -> new TimingStats( - a[0] == null ? null : TimeValue.timeValueMillis((long) a[0]), - a[1] == null ? null : TimeValue.timeValueMillis((long) a[1]) - ) - ); - - static { - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ELAPSED_TIME); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ITERATION_TIME); - } - - private final TimeValue elapsedTime; - private final TimeValue iterationTime; - - public TimingStats(TimeValue elapsedTime, TimeValue iterationTime) { - this.elapsedTime = elapsedTime; - this.iterationTime = iterationTime; - } - - public TimeValue getElapsedTime() { - return elapsedTime; - } - - public TimeValue getIterationTime() { - return iterationTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (elapsedTime != null) { - builder.humanReadableField(ELAPSED_TIME.getPreferredName(), ELAPSED_TIME.getPreferredName() + "_string", elapsedTime); - } - if (iterationTime != null) { - builder.humanReadableField(ITERATION_TIME.getPreferredName(), ITERATION_TIME.getPreferredName() + "_string", iterationTime); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TimingStats that = (TimingStats) o; - return Objects.equals(elapsedTime, that.elapsedTime) && Objects.equals(iterationTime, that.iterationTime); - } - - @Override - public int hashCode() { - return Objects.hash(elapsedTime, iterationTime); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLoss.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLoss.java deleted file mode 100644 index 2fabaad16ffc5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLoss.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.regression; - -import org.elasticsearch.client.ml.dataframe.stats.common.FoldValues; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class ValidationLoss implements ToXContentObject { - - public static final ParseField LOSS_TYPE = new ParseField("loss_type"); - public static final ParseField FOLD_VALUES = new ParseField("fold_values"); - - @SuppressWarnings("unchecked") - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "regression_validation_loss", - true, - a -> new ValidationLoss((String) a[0], (List) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), LOSS_TYPE); - PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), FoldValues.PARSER, FOLD_VALUES); - } - - private final String lossType; - private final List foldValues; - - public ValidationLoss(String lossType, List values) { - this.lossType = lossType; - this.foldValues = values; - } - - public String getLossType() { - return lossType; - } - - public List getFoldValues() { - return foldValues; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (lossType != null) { - builder.field(LOSS_TYPE.getPreferredName(), lossType); - } - if (foldValues != null) { - builder.field(FOLD_VALUES.getPreferredName(), foldValues); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ValidationLoss that = (ValidationLoss) o; - return Objects.equals(lossType, that.lossType) && Objects.equals(foldValues, that.foldValues); - } - - @Override - public int hashCode() { - return Objects.hash(lossType, foldValues); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/InferenceToXContentCompressor.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/InferenceToXContentCompressor.java deleted file mode 100644 index af06d177d9bf9..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/InferenceToXContentCompressor.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.xcontent.DeprecationHandler; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.charset.StandardCharsets; -import java.util.Base64; -import java.util.zip.GZIPInputStream; -import java.util.zip.GZIPOutputStream; - -/** - * Collection of helper methods. Similar to CompressedXContent, but this utilizes GZIP. - */ -public final class InferenceToXContentCompressor { - private static final int BUFFER_SIZE = 4096; - private static final long MAX_INFLATED_BYTES = 1_000_000_000; // 1 gb maximum - - private InferenceToXContentCompressor() {} - - public static String deflate(T objectToCompress) throws IOException { - BytesReference reference = XContentHelper.toXContent(objectToCompress, XContentType.JSON, false); - return deflate(reference); - } - - public static T inflate( - String compressedString, - CheckedFunction parserFunction, - NamedXContentRegistry xContentRegistry - ) throws IOException { - try ( - XContentParser parser = XContentHelper.createParser( - xContentRegistry, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - inflate(compressedString, MAX_INFLATED_BYTES), - XContentType.JSON - ) - ) { - return parserFunction.apply(parser); - } - } - - static BytesReference inflate(String compressedString, long streamSize) throws IOException { - byte[] compressedBytes = Base64.getDecoder().decode(compressedString.getBytes(StandardCharsets.UTF_8)); - InputStream gzipStream = new GZIPInputStream(new BytesArray(compressedBytes).streamInput(), BUFFER_SIZE); - InputStream inflateStream = new SimpleBoundedInputStream(gzipStream, streamSize); - return Streams.readFully(inflateStream); - } - - private static String deflate(BytesReference reference) throws IOException { - BytesStreamOutput out = new BytesStreamOutput(); - try (OutputStream compressedOutput = new GZIPOutputStream(out, BUFFER_SIZE)) { - reference.writeTo(compressedOutput); - } - return new String(Base64.getEncoder().encode(BytesReference.toBytes(out.bytes())), StandardCharsets.UTF_8); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java deleted file mode 100644 index 271b882f697e3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.client.ml.inference.preprocessing.CustomWordEmbedding; -import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding; -import org.elasticsearch.client.ml.inference.preprocessing.Multi; -import org.elasticsearch.client.ml.inference.preprocessing.NGram; -import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding; -import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; -import org.elasticsearch.client.ml.inference.preprocessing.TargetMeanEncoding; -import org.elasticsearch.client.ml.inference.trainedmodel.ClassificationConfig; -import org.elasticsearch.client.ml.inference.trainedmodel.IndexLocation; -import org.elasticsearch.client.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.client.ml.inference.trainedmodel.RegressionConfig; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModelLocation; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.Ensemble; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.Exponent; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.LogisticRegression; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.OutputAggregator; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.WeightedMode; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.WeightedSum; -import org.elasticsearch.client.ml.inference.trainedmodel.langident.LangIdentNeuralNetwork; -import org.elasticsearch.client.ml.inference.trainedmodel.tree.Tree; -import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; - -import java.util.ArrayList; -import java.util.List; - -public class MlInferenceNamedXContentProvider implements NamedXContentProvider { - - @Override - public List getNamedXContentParsers() { - List namedXContent = new ArrayList<>(); - - // PreProcessing - namedXContent.add( - new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(OneHotEncoding.NAME), OneHotEncoding::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(TargetMeanEncoding.NAME), TargetMeanEncoding::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(FrequencyEncoding.NAME), FrequencyEncoding::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(CustomWordEmbedding.NAME), CustomWordEmbedding::fromXContent) - ); - namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(NGram.NAME), NGram::fromXContent)); - namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(Multi.NAME), Multi::fromXContent)); - - // Model - namedXContent.add(new NamedXContentRegistry.Entry(TrainedModel.class, new ParseField(Tree.NAME), Tree::fromXContent)); - namedXContent.add(new NamedXContentRegistry.Entry(TrainedModel.class, new ParseField(Ensemble.NAME), Ensemble::fromXContent)); - namedXContent.add( - new NamedXContentRegistry.Entry( - TrainedModel.class, - new ParseField(LangIdentNeuralNetwork.NAME), - LangIdentNeuralNetwork::fromXContent - ) - ); - - // Inference Config - namedXContent.add( - new NamedXContentRegistry.Entry(InferenceConfig.class, ClassificationConfig.NAME, ClassificationConfig::fromXContent) - ); - namedXContent.add(new NamedXContentRegistry.Entry(InferenceConfig.class, RegressionConfig.NAME, RegressionConfig::fromXContent)); - - // Aggregating output - namedXContent.add( - new NamedXContentRegistry.Entry(OutputAggregator.class, new ParseField(WeightedMode.NAME), WeightedMode::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry(OutputAggregator.class, new ParseField(WeightedSum.NAME), WeightedSum::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry( - OutputAggregator.class, - new ParseField(LogisticRegression.NAME), - LogisticRegression::fromXContent - ) - ); - namedXContent.add(new NamedXContentRegistry.Entry(OutputAggregator.class, new ParseField(Exponent.NAME), Exponent::fromXContent)); - - // location - namedXContent.add( - new NamedXContentRegistry.Entry(TrainedModelLocation.class, new ParseField(IndexLocation.INDEX), IndexLocation::fromXContent) - ); - - return namedXContent; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObject.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObject.java deleted file mode 100644 index 1a6eb8afdac24..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObject.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.xcontent.ToXContentObject; - -/** - * Simple interface for XContent Objects that are named. - * - * This affords more general handling when serializing and de-serializing this type of XContent when it is used in a NamedObjects - * parser. - */ -public interface NamedXContentObject extends ToXContentObject { - /** - * @return The name of the XContentObject that is to be serialized - */ - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObjectHelper.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObjectHelper.java deleted file mode 100644 index b0c4015e186a0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObjectHelper.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; - -public final class NamedXContentObjectHelper { - - private NamedXContentObjectHelper() {} - - public static XContentBuilder writeNamedObjects( - XContentBuilder builder, - ToXContent.Params params, - boolean useExplicitOrder, - String namedObjectsName, - List namedObjects - ) throws IOException { - if (useExplicitOrder) { - builder.startArray(namedObjectsName); - } else { - builder.startObject(namedObjectsName); - } - for (NamedXContentObject object : namedObjects) { - if (useExplicitOrder) { - builder.startObject(); - } - builder.field(object.getName(), object, params); - if (useExplicitOrder) { - builder.endObject(); - } - } - if (useExplicitOrder) { - builder.endArray(); - } else { - builder.endObject(); - } - return builder; - } - - public static XContentBuilder writeNamedObject( - XContentBuilder builder, - ToXContent.Params params, - String namedObjectName, - NamedXContentObject namedObject - ) throws IOException { - builder.startObject(namedObjectName); - builder.field(namedObject.getName(), namedObject, params); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/SimpleBoundedInputStream.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/SimpleBoundedInputStream.java deleted file mode 100644 index f0c274d49592a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/SimpleBoundedInputStream.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Objects; - -/** - * This is a pared down bounded input stream. - * Only read is specifically enforced. - */ -final class SimpleBoundedInputStream extends InputStream { - - private final InputStream in; - private final long maxBytes; - private long numBytes; - - SimpleBoundedInputStream(InputStream inputStream, long maxBytes) { - this.in = Objects.requireNonNull(inputStream, "inputStream"); - if (maxBytes < 0) { - throw new IllegalArgumentException("[maxBytes] must be greater than or equal to 0"); - } - this.maxBytes = maxBytes; - } - - /** - * A simple wrapper around the injected input stream that restricts the total number of bytes able to be read. - * @return The byte read. -1 on internal stream completion or when maxBytes is exceeded. - * @throws IOException on failure - */ - @Override - public int read() throws IOException { - // We have reached the maximum, signal stream completion. - if (numBytes >= maxBytes) { - return -1; - } - numBytes++; - return in.read(); - } - - /** - * Delegates `close` to the wrapped InputStream - * @throws IOException on failure - */ - @Override - public void close() throws IOException { - in.close(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java deleted file mode 100644 index 8defbcfce2e83..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java +++ /dev/null @@ -1,504 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.Version; -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModelLocation; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.time.Instant; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.client.ml.inference.NamedXContentObjectHelper.writeNamedObject; - -public class TrainedModelConfig implements ToXContentObject { - - public static final String NAME = "trained_model_config"; - - public static final ParseField MODEL_ID = new ParseField("model_id"); - public static final ParseField MODEL_TYPE = new ParseField("model_type"); - public static final ParseField CREATED_BY = new ParseField("created_by"); - public static final ParseField VERSION = new ParseField("version"); - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField CREATE_TIME = new ParseField("create_time"); - public static final ParseField DEFINITION = new ParseField("definition"); - public static final ParseField COMPRESSED_DEFINITION = new ParseField("compressed_definition"); - public static final ParseField TAGS = new ParseField("tags"); - public static final ParseField METADATA = new ParseField("metadata"); - public static final ParseField INPUT = new ParseField("input"); - @Deprecated - public static final ParseField ESTIMATED_HEAP_MEMORY_USAGE_BYTES = new ParseField("estimated_heap_memory_usage_bytes"); - public static final ParseField MODEL_SIZE_BYTES = new ParseField("model_size_bytes", "estimated_heap_memory_usage_bytes"); - public static final ParseField ESTIMATED_OPERATIONS = new ParseField("estimated_operations"); - public static final ParseField LICENSE_LEVEL = new ParseField("license_level"); - public static final ParseField DEFAULT_FIELD_MAP = new ParseField("default_field_map"); - public static final ParseField INFERENCE_CONFIG = new ParseField("inference_config"); - public static final ParseField LOCATION = new ParseField("location"); - - public static final ObjectParser PARSER = new ObjectParser<>(NAME, true, TrainedModelConfig.Builder::new); - static { - PARSER.declareString(TrainedModelConfig.Builder::setModelId, MODEL_ID); - PARSER.declareString(TrainedModelConfig.Builder::setModelType, MODEL_TYPE); - PARSER.declareString(TrainedModelConfig.Builder::setCreatedBy, CREATED_BY); - PARSER.declareString(TrainedModelConfig.Builder::setVersion, VERSION); - PARSER.declareString(TrainedModelConfig.Builder::setDescription, DESCRIPTION); - PARSER.declareField( - TrainedModelConfig.Builder::setCreateTime, - (p, c) -> TimeUtil.parseTimeFieldToInstant(p, CREATE_TIME.getPreferredName()), - CREATE_TIME, - ObjectParser.ValueType.VALUE - ); - PARSER.declareObject(TrainedModelConfig.Builder::setDefinition, (p, c) -> TrainedModelDefinition.fromXContent(p), DEFINITION); - PARSER.declareString(TrainedModelConfig.Builder::setCompressedDefinition, COMPRESSED_DEFINITION); - PARSER.declareStringArray(TrainedModelConfig.Builder::setTags, TAGS); - PARSER.declareObject(TrainedModelConfig.Builder::setMetadata, (p, c) -> p.map(), METADATA); - PARSER.declareObject(TrainedModelConfig.Builder::setInput, (p, c) -> TrainedModelInput.fromXContent(p), INPUT); - PARSER.declareLong(TrainedModelConfig.Builder::setModelSize, MODEL_SIZE_BYTES); - PARSER.declareLong(TrainedModelConfig.Builder::setEstimatedOperations, ESTIMATED_OPERATIONS); - PARSER.declareString(TrainedModelConfig.Builder::setLicenseLevel, LICENSE_LEVEL); - PARSER.declareObject(TrainedModelConfig.Builder::setDefaultFieldMap, (p, c) -> p.mapStrings(), DEFAULT_FIELD_MAP); - PARSER.declareNamedObject( - TrainedModelConfig.Builder::setInferenceConfig, - (p, c, n) -> p.namedObject(InferenceConfig.class, n, null), - INFERENCE_CONFIG - ); - PARSER.declareNamedObject( - TrainedModelConfig.Builder::setLocation, - (p, c, n) -> p.namedObject(TrainedModelLocation.class, n, null), - LOCATION - ); - } - - public static TrainedModelConfig fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null).build(); - } - - private final String modelId; - private final TrainedModelType modelType; - private final String createdBy; - private final Version version; - private final String description; - private final Instant createTime; - private final TrainedModelDefinition definition; - private final String compressedDefinition; - private final List tags; - private final Map metadata; - private final TrainedModelInput input; - private final Long modelSize; - private final Long estimatedOperations; - private final String licenseLevel; - private final Map defaultFieldMap; - private final InferenceConfig inferenceConfig; - private final TrainedModelLocation location; - - TrainedModelConfig( - String modelId, - TrainedModelType modelType, - String createdBy, - Version version, - String description, - Instant createTime, - TrainedModelDefinition definition, - String compressedDefinition, - List tags, - Map metadata, - TrainedModelInput input, - Long modelSize, - Long estimatedOperations, - String licenseLevel, - Map defaultFieldMap, - InferenceConfig inferenceConfig, - TrainedModelLocation location - ) { - this.modelId = modelId; - this.modelType = modelType; - this.createdBy = createdBy; - this.version = version; - this.createTime = createTime == null ? null : Instant.ofEpochMilli(createTime.toEpochMilli()); - this.definition = definition; - this.compressedDefinition = compressedDefinition; - this.description = description; - this.tags = tags == null ? null : Collections.unmodifiableList(tags); - this.metadata = metadata == null ? null : Collections.unmodifiableMap(metadata); - this.input = input; - this.modelSize = modelSize; - this.estimatedOperations = estimatedOperations; - this.licenseLevel = licenseLevel; - this.defaultFieldMap = defaultFieldMap == null ? null : Collections.unmodifiableMap(defaultFieldMap); - this.inferenceConfig = inferenceConfig; - this.location = location; - } - - public String getModelId() { - return modelId; - } - - public TrainedModelType getModelType() { - return modelType; - } - - public String getCreatedBy() { - return createdBy; - } - - public Version getVersion() { - return version; - } - - public String getDescription() { - return description; - } - - public Instant getCreateTime() { - return createTime; - } - - public List getTags() { - return tags; - } - - public Map getMetadata() { - return metadata; - } - - public TrainedModelDefinition getDefinition() { - return definition; - } - - public String getCompressedDefinition() { - return compressedDefinition; - } - - @Nullable - public TrainedModelLocation getLocation() { - return location; - } - - public TrainedModelInput getInput() { - return input; - } - - /** - * @deprecated use {@link TrainedModelConfig#getModelSize()} instead - * @return the {@link ByteSizeValue} of the model size if available. - */ - @Deprecated - public ByteSizeValue getEstimatedHeapMemory() { - return modelSize == null ? null : new ByteSizeValue(modelSize); - } - - /** - * @deprecated use {@link TrainedModelConfig#getModelSizeBytes()} instead - * @return the model size in bytes if available. - */ - @Deprecated - public Long getEstimatedHeapMemoryBytes() { - return modelSize; - } - - /** - * @return the {@link ByteSizeValue} of the model size if available. - */ - public ByteSizeValue getModelSize() { - return modelSize == null ? null : new ByteSizeValue(modelSize); - } - - /** - * @return the model size in bytes if available. - */ - public Long getModelSizeBytes() { - return modelSize; - } - - public String getLicenseLevel() { - return licenseLevel; - } - - public Map getDefaultFieldMap() { - return defaultFieldMap; - } - - public InferenceConfig getInferenceConfig() { - return inferenceConfig; - } - - public static Builder builder() { - return new Builder(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (modelId != null) { - builder.field(MODEL_ID.getPreferredName(), modelId); - } - if (modelType != null) { - builder.field(MODEL_TYPE.getPreferredName(), modelType.toString()); - } - if (createdBy != null) { - builder.field(CREATED_BY.getPreferredName(), createdBy); - } - if (version != null) { - builder.field(VERSION.getPreferredName(), version.toString()); - } - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - if (createTime != null) { - builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli()); - } - if (definition != null) { - builder.field(DEFINITION.getPreferredName(), definition); - } - if (tags != null) { - builder.field(TAGS.getPreferredName(), tags); - } - if (metadata != null) { - builder.field(METADATA.getPreferredName(), metadata); - } - if (input != null) { - builder.field(INPUT.getPreferredName(), input); - } - if (modelSize != null) { - builder.field(MODEL_SIZE_BYTES.getPreferredName(), modelSize); - } - if (estimatedOperations != null) { - builder.field(ESTIMATED_OPERATIONS.getPreferredName(), estimatedOperations); - } - if (compressedDefinition != null) { - builder.field(COMPRESSED_DEFINITION.getPreferredName(), compressedDefinition); - } - if (licenseLevel != null) { - builder.field(LICENSE_LEVEL.getPreferredName(), licenseLevel); - } - if (defaultFieldMap != null) { - builder.field(DEFAULT_FIELD_MAP.getPreferredName(), defaultFieldMap); - } - if (inferenceConfig != null) { - writeNamedObject(builder, params, INFERENCE_CONFIG.getPreferredName(), inferenceConfig); - } - if (location != null) { - writeNamedObject(builder, params, LOCATION.getPreferredName(), location); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TrainedModelConfig that = (TrainedModelConfig) o; - return Objects.equals(modelId, that.modelId) - && Objects.equals(modelType, that.modelType) - && Objects.equals(createdBy, that.createdBy) - && Objects.equals(version, that.version) - && Objects.equals(description, that.description) - && Objects.equals(createTime, that.createTime) - && Objects.equals(definition, that.definition) - && Objects.equals(compressedDefinition, that.compressedDefinition) - && Objects.equals(tags, that.tags) - && Objects.equals(input, that.input) - && Objects.equals(modelSize, that.modelSize) - && Objects.equals(estimatedOperations, that.estimatedOperations) - && Objects.equals(licenseLevel, that.licenseLevel) - && Objects.equals(defaultFieldMap, that.defaultFieldMap) - && Objects.equals(inferenceConfig, that.inferenceConfig) - && Objects.equals(metadata, that.metadata) - && Objects.equals(location, that.location); - } - - @Override - public int hashCode() { - return Objects.hash( - modelId, - modelType, - createdBy, - version, - createTime, - definition, - compressedDefinition, - description, - tags, - modelSize, - estimatedOperations, - metadata, - licenseLevel, - input, - inferenceConfig, - defaultFieldMap, - location - ); - } - - public static class Builder { - - private String modelId; - private TrainedModelType modelType; - private String createdBy; - private Version version; - private String description; - private Instant createTime; - private Map metadata; - private List tags; - private TrainedModelDefinition definition; - private String compressedDefinition; - private TrainedModelInput input; - private Long modelSize; - private Long estimatedOperations; - private String licenseLevel; - private Map defaultFieldMap; - private InferenceConfig inferenceConfig; - private TrainedModelLocation location; - - public Builder setModelId(String modelId) { - this.modelId = modelId; - return this; - } - - public Builder setModelType(String modelType) { - this.modelType = TrainedModelType.fromString(modelType); - return this; - } - - public Builder setModelType(TrainedModelType modelType) { - this.modelType = modelType; - return this; - } - - private Builder setCreatedBy(String createdBy) { - this.createdBy = createdBy; - return this; - } - - private Builder setVersion(Version version) { - this.version = version; - return this; - } - - private Builder setVersion(String version) { - return this.setVersion(Version.fromString(version)); - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - private Builder setCreateTime(Instant createTime) { - this.createTime = createTime; - return this; - } - - public Builder setTags(List tags) { - this.tags = tags; - return this; - } - - public Builder setTags(String... tags) { - return setTags(Arrays.asList(tags)); - } - - public Builder setMetadata(Map metadata) { - this.metadata = metadata; - return this; - } - - public Builder setDefinition(TrainedModelDefinition.Builder definition) { - this.definition = definition == null ? null : definition.build(); - return this; - } - - public Builder setCompressedDefinition(String compressedDefinition) { - this.compressedDefinition = compressedDefinition; - return this; - } - - public Builder setDefinition(TrainedModelDefinition definition) { - this.definition = definition; - return this; - } - - public Builder setLocation(TrainedModelLocation location) { - this.location = location; - return this; - } - - public Builder setInput(TrainedModelInput input) { - this.input = input; - return this; - } - - private Builder setModelSize(Long modelSize) { - this.modelSize = modelSize; - return this; - } - - private Builder setEstimatedOperations(Long estimatedOperations) { - this.estimatedOperations = estimatedOperations; - return this; - } - - private Builder setLicenseLevel(String licenseLevel) { - this.licenseLevel = licenseLevel; - return this; - } - - public Builder setDefaultFieldMap(Map defaultFieldMap) { - this.defaultFieldMap = defaultFieldMap; - return this; - } - - public Builder setInferenceConfig(InferenceConfig inferenceConfig) { - this.inferenceConfig = inferenceConfig; - return this; - } - - public TrainedModelConfig build() { - return new TrainedModelConfig( - modelId, - modelType, - createdBy, - version, - description, - createTime, - definition, - compressedDefinition, - tags, - metadata, - input, - modelSize, - estimatedOperations, - licenseLevel, - defaultFieldMap, - inferenceConfig, - location - ); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java deleted file mode 100644 index 3ca84bc62cbd5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -public class TrainedModelDefinition implements ToXContentObject { - - public static final String NAME = "trained_model_doc"; - - public static final ParseField TRAINED_MODEL = new ParseField("trained_model"); - public static final ParseField PREPROCESSORS = new ParseField("preprocessors"); - - public static final ObjectParser PARSER = new ObjectParser<>(NAME, true, TrainedModelDefinition.Builder::new); - static { - PARSER.declareNamedObject( - TrainedModelDefinition.Builder::setTrainedModel, - (p, c, n) -> p.namedObject(TrainedModel.class, n, null), - TRAINED_MODEL - ); - PARSER.declareNamedObjects( - TrainedModelDefinition.Builder::setPreProcessors, - (p, c, n) -> p.namedObject(PreProcessor.class, n, null), - (trainedModelDefBuilder) -> {/* Does not matter client side*/ }, - PREPROCESSORS - ); - } - - public static TrainedModelDefinition.Builder fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final TrainedModel trainedModel; - private final List preProcessors; - - TrainedModelDefinition(TrainedModel trainedModel, List preProcessors) { - this.trainedModel = trainedModel; - this.preProcessors = preProcessors == null ? Collections.emptyList() : Collections.unmodifiableList(preProcessors); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - NamedXContentObjectHelper.writeNamedObjects( - builder, - params, - false, - TRAINED_MODEL.getPreferredName(), - Collections.singletonList(trainedModel) - ); - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, PREPROCESSORS.getPreferredName(), preProcessors); - builder.endObject(); - return builder; - } - - public TrainedModel getTrainedModel() { - return trainedModel; - } - - public List getPreProcessors() { - return preProcessors; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TrainedModelDefinition that = (TrainedModelDefinition) o; - return Objects.equals(trainedModel, that.trainedModel) && Objects.equals(preProcessors, that.preProcessors); - } - - @Override - public int hashCode() { - return Objects.hash(trainedModel, preProcessors); - } - - public static class Builder { - - private List preProcessors; - private TrainedModel trainedModel; - - public Builder setPreProcessors(List preProcessors) { - this.preProcessors = preProcessors; - return this; - } - - public Builder setTrainedModel(TrainedModel trainedModel) { - this.trainedModel = trainedModel; - return this; - } - - public TrainedModelDefinition build() { - return new TrainedModelDefinition(this.trainedModel, this.preProcessors); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelInput.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelInput.java deleted file mode 100644 index d6e2d0559396c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelInput.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -public class TrainedModelInput implements ToXContentObject { - - public static final String NAME = "trained_model_config_input"; - public static final ParseField FIELD_NAMES = new ParseField("field_names"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new TrainedModelInput((List) a[0]) - ); - - static { - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), FIELD_NAMES); - } - - private final List fieldNames; - - public TrainedModelInput(List fieldNames) { - this.fieldNames = fieldNames; - } - - public TrainedModelInput(String... fieldNames) { - this(Arrays.asList(fieldNames)); - } - - public static TrainedModelInput fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - public List getFieldNames() { - return fieldNames; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (fieldNames != null) { - builder.field(FIELD_NAMES.getPreferredName(), fieldNames); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TrainedModelInput that = (TrainedModelInput) o; - return Objects.equals(fieldNames, that.fieldNames); - } - - @Override - public int hashCode() { - return Objects.hash(fieldNames); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelStats.java deleted file mode 100644 index bd45805e70603..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelStats.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.client.ml.inference.trainedmodel.InferenceStats; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.ingest.IngestStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class TrainedModelStats implements ToXContentObject { - - public static final ParseField MODEL_ID = new ParseField("model_id"); - public static final ParseField PIPELINE_COUNT = new ParseField("pipeline_count"); - public static final ParseField INGEST_STATS = new ParseField("ingest"); - public static final ParseField INFERENCE_STATS = new ParseField("inference_stats"); - - private final String modelId; - private final Map ingestStats; - private final int pipelineCount; - private final InferenceStats inferenceStats; - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "trained_model_stats", - true, - args -> new TrainedModelStats((String) args[0], (Map) args[1], (Integer) args[2], (InferenceStats) args[3]) - ); - - static { - PARSER.declareString(constructorArg(), MODEL_ID); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> p.mapOrdered(), INGEST_STATS); - PARSER.declareInt(constructorArg(), PIPELINE_COUNT); - PARSER.declareObject(optionalConstructorArg(), InferenceStats.PARSER, INFERENCE_STATS); - } - - public static TrainedModelStats fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public TrainedModelStats(String modelId, Map ingestStats, int pipelineCount, InferenceStats inferenceStats) { - this.modelId = modelId; - this.ingestStats = ingestStats; - this.pipelineCount = pipelineCount; - this.inferenceStats = inferenceStats; - } - - /** - * The model id for which the stats apply - */ - public String getModelId() { - return modelId; - } - - /** - * Ingest level statistics. See {@link IngestStats#toXContent(XContentBuilder, Params)} for fields and format - * If there are no ingest pipelines referencing the model, then the ingest statistics could be null. - */ - @Nullable - public Map getIngestStats() { - return ingestStats; - } - - /** - * The total number of pipelines that reference the trained model - */ - public int getPipelineCount() { - return pipelineCount; - } - - /** - * Inference statistics - */ - public InferenceStats getInferenceStats() { - return inferenceStats; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(MODEL_ID.getPreferredName(), modelId); - builder.field(PIPELINE_COUNT.getPreferredName(), pipelineCount); - if (ingestStats != null) { - builder.field(INGEST_STATS.getPreferredName(), ingestStats); - } - if (inferenceStats != null) { - builder.field(INFERENCE_STATS.getPreferredName(), inferenceStats); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(modelId, ingestStats, pipelineCount, inferenceStats); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - TrainedModelStats other = (TrainedModelStats) obj; - return Objects.equals(this.modelId, other.modelId) - && Objects.equals(this.ingestStats, other.ingestStats) - && Objects.equals(this.pipelineCount, other.pipelineCount) - && Objects.equals(this.inferenceStats, other.inferenceStats); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelType.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelType.java deleted file mode 100644 index e34c01d880b87..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelType.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference; - -import java.util.Locale; - -public enum TrainedModelType { - TREE_ENSEMBLE, - LANG_IDENT, - PYTORCH; - - public static TrainedModelType fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/CustomWordEmbedding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/CustomWordEmbedding.java deleted file mode 100644 index 3ad8170b3ce9f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/CustomWordEmbedding.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * This is a pre-processor that embeds text into a numerical vector. - * - * It calculates a set of features based on script type, ngram hashes, and most common script values. - * - * The features are then concatenated with specific quantization scales and weights into a vector of length 80. - * - * This is a fork and a port of: https://github.com/google/cld3/blob/06f695f1c8ee530104416aab5dcf2d6a1414a56a/src/embedding_network.cc - */ -public class CustomWordEmbedding implements PreProcessor { - - public static final String NAME = "custom_word_embedding"; - static final ParseField FIELD = new ParseField("field"); - static final ParseField DEST_FIELD = new ParseField("dest_field"); - static final ParseField EMBEDDING_WEIGHTS = new ParseField("embedding_weights"); - static final ParseField EMBEDDING_QUANT_SCALES = new ParseField("embedding_quant_scales"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new CustomWordEmbedding((short[][]) a[0], (byte[][]) a[1], (String) a[2], (String) a[3]) - ); - static { - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> { - List> listOfListOfShorts = parseArrays(EMBEDDING_QUANT_SCALES.getPreferredName(), XContentParser::shortValue, p); - short[][] primitiveShorts = new short[listOfListOfShorts.size()][]; - int i = 0; - for (List shorts : listOfListOfShorts) { - short[] innerShorts = new short[shorts.size()]; - for (int j = 0; j < shorts.size(); j++) { - innerShorts[j] = shorts.get(j); - } - primitiveShorts[i++] = innerShorts; - } - return primitiveShorts; - }, EMBEDDING_QUANT_SCALES, ObjectParser.ValueType.VALUE_ARRAY); - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> { - List values = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_ARRAY) { - values.add(p.binaryValue()); - } - byte[][] primitiveBytes = new byte[values.size()][]; - int i = 0; - for (byte[] bytes : values) { - primitiveBytes[i++] = bytes; - } - return primitiveBytes; - }, EMBEDDING_WEIGHTS, ObjectParser.ValueType.VALUE_ARRAY); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), DEST_FIELD); - } - - private static List> parseArrays( - String fieldName, - CheckedFunction fromParser, - XContentParser p - ) throws IOException { - if (p.currentToken() != XContentParser.Token.START_ARRAY) { - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" + fieldName + "]"); - } - List> values = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_ARRAY) { - if (p.currentToken() != XContentParser.Token.START_ARRAY) { - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" + fieldName + "]"); - } - List innerList = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_ARRAY) { - if (p.currentToken().isValue() == false) { - throw new IllegalStateException( - "expected non-null value but got [" + p.currentToken() + "] " + "for [" + fieldName + "]" - ); - } - innerList.add(fromParser.apply(p)); - } - values.add(innerList); - } - return values; - } - - public static CustomWordEmbedding fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final short[][] embeddingsQuantScales; - private final byte[][] embeddingsWeights; - private final String fieldName; - private final String destField; - - CustomWordEmbedding(short[][] embeddingsQuantScales, byte[][] embeddingsWeights, String fieldName, String destField) { - this.embeddingsQuantScales = embeddingsQuantScales; - this.embeddingsWeights = embeddingsWeights; - this.fieldName = fieldName; - this.destField = destField; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(FIELD.getPreferredName(), fieldName); - builder.field(DEST_FIELD.getPreferredName(), destField); - builder.field(EMBEDDING_QUANT_SCALES.getPreferredName(), embeddingsQuantScales); - builder.field(EMBEDDING_WEIGHTS.getPreferredName(), embeddingsWeights); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CustomWordEmbedding that = (CustomWordEmbedding) o; - return Objects.equals(fieldName, that.fieldName) - && Objects.equals(destField, that.destField) - && Arrays.deepEquals(embeddingsWeights, that.embeddingsWeights) - && Arrays.deepEquals(embeddingsQuantScales, that.embeddingsQuantScales); - } - - @Override - public int hashCode() { - return Objects.hash(fieldName, destField, Arrays.deepHashCode(embeddingsQuantScales), Arrays.deepHashCode(embeddingsWeights)); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java deleted file mode 100644 index 81d3cfa05cb45..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * PreProcessor for frequency encoding a set of categorical values for a given field. - */ -public class FrequencyEncoding implements PreProcessor { - - public static final String NAME = "frequency_encoding"; - public static final ParseField FIELD = new ParseField("field"); - public static final ParseField FEATURE_NAME = new ParseField("feature_name"); - public static final ParseField FREQUENCY_MAP = new ParseField("frequency_map"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new FrequencyEncoding((String) a[0], (String) a[1], (Map) a[2], (Boolean) a[3]) - ); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME); - PARSER.declareObject( - ConstructingObjectParser.constructorArg(), - (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), - FREQUENCY_MAP - ); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - } - - public static FrequencyEncoding fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String field; - private final String featureName; - private final Map frequencyMap; - private final Boolean custom; - - FrequencyEncoding(String field, String featureName, Map frequencyMap, Boolean custom) { - this.field = Objects.requireNonNull(field); - this.featureName = Objects.requireNonNull(featureName); - this.frequencyMap = Collections.unmodifiableMap(Objects.requireNonNull(frequencyMap)); - this.custom = custom; - } - - /** - * @return Field name on which to frequency encode - */ - public String getField() { - return field; - } - - /** - * @return Map of Value: frequency for the frequency encoding - */ - public Map getFrequencyMap() { - return frequencyMap; - } - - /** - * @return The encoded feature name - */ - public String getFeatureName() { - return featureName; - } - - @Override - public String getName() { - return NAME; - } - - public Boolean getCustom() { - return custom; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(FIELD.getPreferredName(), field); - builder.field(FEATURE_NAME.getPreferredName(), featureName); - builder.field(FREQUENCY_MAP.getPreferredName(), frequencyMap); - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - FrequencyEncoding that = (FrequencyEncoding) o; - return Objects.equals(field, that.field) - && Objects.equals(featureName, that.featureName) - && Objects.equals(custom, that.custom) - && Objects.equals(frequencyMap, that.frequencyMap); - } - - @Override - public int hashCode() { - return Objects.hash(field, featureName, frequencyMap, custom); - } - - public Builder builder(String fieldName) { - return new Builder(fieldName); - } - - public static class Builder { - - private String field; - private String featureName; - private Map frequencyMap = new HashMap<>(); - private Boolean custom; - - public Builder(String field) { - this.field = field; - } - - public Builder setField(String field) { - this.field = field; - return this; - } - - public Builder setFeatureName(String featureName) { - this.featureName = featureName; - return this; - } - - public Builder setFrequencyMap(Map frequencyMap) { - this.frequencyMap = new HashMap<>(frequencyMap); - return this; - } - - public Builder addFrequency(String valueName, double frequency) { - this.frequencyMap.put(valueName, frequency); - return this; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public FrequencyEncoding build() { - return new FrequencyEncoding(field, featureName, frequencyMap, custom); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/Multi.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/Multi.java deleted file mode 100644 index bf9f1aba2c057..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/Multi.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * Multi-PreProcessor for chaining together multiple processors - */ -public class Multi implements PreProcessor { - - public static final String NAME = "multi_encoding"; - public static final ParseField PROCESSORS = new ParseField("processors"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new Multi((List) a[0], (Boolean) a[1]) - ); - static { - PARSER.declareNamedObjects( - ConstructingObjectParser.constructorArg(), - (p, c, n) -> p.namedObject(PreProcessor.class, n, null), - (_unused) -> {/* Does not matter client side*/ }, - PROCESSORS - ); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - } - - public static Multi fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List processors; - private final Boolean custom; - - Multi(List processors, Boolean custom) { - this.processors = Objects.requireNonNull(processors, PROCESSORS.getPreferredName()); - this.custom = custom; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, PROCESSORS.getPreferredName(), processors); - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Multi multi = (Multi) o; - return Objects.equals(multi.processors, processors) && Objects.equals(custom, multi.custom); - } - - @Override - public int hashCode() { - return Objects.hash(custom, processors); - } - - public static Builder builder(List processors) { - return new Builder(processors); - } - - public static class Builder { - private final List processors; - private Boolean custom; - - public Builder(List processors) { - this.processors = processors; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public Multi build() { - return new Multi(processors, custom); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/NGram.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/NGram.java deleted file mode 100644 index bd831a6bf8d54..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/NGram.java +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.function.IntFunction; -import java.util.stream.IntStream; - -/** - * PreProcessor for n-gram encoding a string - */ -public class NGram implements PreProcessor { - - public static final String NAME = "n_gram_encoding"; - public static final ParseField FIELD = new ParseField("field"); - public static final ParseField FEATURE_PREFIX = new ParseField("feature_prefix"); - public static final ParseField NGRAMS = new ParseField("n_grams"); - public static final ParseField START = new ParseField("start"); - public static final ParseField LENGTH = new ParseField("length"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser( - NAME, - true, - a -> new NGram((String) a[0], (List) a[1], (Integer) a[2], (Integer) a[3], (Boolean) a[4], (String) a[5]) - ); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareIntArray(ConstructingObjectParser.constructorArg(), NGRAMS); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), START); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), LENGTH); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FEATURE_PREFIX); - } - - public static NGram fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String field; - private final String featurePrefix; - private final List nGrams; - private final Integer start; - private final Integer length; - private final Boolean custom; - - NGram(String field, List nGrams, Integer start, Integer length, Boolean custom, String featurePrefix) { - this.field = field; - this.featurePrefix = featurePrefix; - this.nGrams = nGrams; - this.start = start; - this.length = length; - this.custom = custom; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (field != null) { - builder.field(FIELD.getPreferredName(), field); - } - if (featurePrefix != null) { - builder.field(FEATURE_PREFIX.getPreferredName(), featurePrefix); - } - if (nGrams != null) { - builder.field(NGRAMS.getPreferredName(), nGrams); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (length != null) { - builder.field(LENGTH.getPreferredName(), length); - } - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - public String getField() { - return field; - } - - public String getFeaturePrefix() { - return featurePrefix; - } - - public List getnGrams() { - return nGrams; - } - - public Integer getStart() { - return start; - } - - public Integer getLength() { - return length; - } - - public Boolean getCustom() { - return custom; - } - - public List outputFields() { - return allPossibleNGramOutputFeatureNames(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - NGram nGram = (NGram) o; - return Objects.equals(field, nGram.field) - && Objects.equals(featurePrefix, nGram.featurePrefix) - && Objects.equals(nGrams, nGram.nGrams) - && Objects.equals(start, nGram.start) - && Objects.equals(length, nGram.length) - && Objects.equals(custom, nGram.custom); - } - - @Override - public int hashCode() { - return Objects.hash(field, featurePrefix, start, length, custom, nGrams); - } - - private String nGramFeature(int nGram, int pos) { - return featurePrefix + "." + nGram + pos; - } - - private List allPossibleNGramOutputFeatureNames() { - int totalNgrams = 0; - for (int nGram : nGrams) { - totalNgrams += (length - (nGram - 1)); - } - if (totalNgrams <= 0) { - return Collections.emptyList(); - } - List ngramOutputs = new ArrayList<>(totalNgrams); - - for (int nGram : nGrams) { - IntFunction func = i -> nGramFeature(nGram, i); - IntStream.range(0, (length - (nGram - 1))).mapToObj(func).forEach(ngramOutputs::add); - } - return ngramOutputs; - } - - public static Builder builder(String field) { - return new Builder(field); - } - - public static class Builder { - - private String field; - private String featurePrefix; - private List nGrams; - private Integer start; - private Integer length; - private Boolean custom; - - public Builder(String field) { - this.field = field; - } - - public Builder setField(String field) { - this.field = field; - return this; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public Builder setFeaturePrefix(String featurePrefix) { - this.featurePrefix = featurePrefix; - return this; - } - - public Builder setnGrams(List nGrams) { - this.nGrams = nGrams; - return this; - } - - public Builder setStart(Integer start) { - this.start = start; - return this; - } - - public Builder setLength(Integer length) { - this.length = length; - return this; - } - - public Builder setCustom(Boolean custom) { - this.custom = custom; - return this; - } - - public NGram build() { - return new NGram(field, nGrams, start, length, custom, featurePrefix); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java deleted file mode 100644 index 461c62fd54c0d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * PreProcessor for one hot encoding a set of categorical values for a given field. - */ -public class OneHotEncoding implements PreProcessor { - - public static final String NAME = "one_hot_encoding"; - public static final ParseField FIELD = new ParseField("field"); - public static final ParseField HOT_MAP = new ParseField("hot_map"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new OneHotEncoding((String) a[0], (Map) a[1], (Boolean) a[2]) - ); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HOT_MAP); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - } - - public static OneHotEncoding fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String field; - private final Map hotMap; - private final Boolean custom; - - OneHotEncoding(String field, Map hotMap, Boolean custom) { - this.field = Objects.requireNonNull(field); - this.hotMap = Collections.unmodifiableMap(Objects.requireNonNull(hotMap)); - this.custom = custom; - } - - /** - * @return Field name on which to one hot encode - */ - public String getField() { - return field; - } - - /** - * @return Map of Value: ColumnName for the one hot encoding - */ - public Map getHotMap() { - return hotMap; - } - - @Override - public String getName() { - return NAME; - } - - public Boolean getCustom() { - return custom; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(FIELD.getPreferredName(), field); - builder.field(HOT_MAP.getPreferredName(), hotMap); - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - OneHotEncoding that = (OneHotEncoding) o; - return Objects.equals(field, that.field) && Objects.equals(hotMap, that.hotMap) && Objects.equals(custom, that.custom); - } - - @Override - public int hashCode() { - return Objects.hash(field, hotMap, custom); - } - - public static Builder builder(String field) { - return new Builder(field); - } - - public static class Builder { - - private String field; - private Map hotMap = new HashMap<>(); - private Boolean custom; - - public Builder(String field) { - this.field = field; - } - - public Builder setField(String field) { - this.field = field; - return this; - } - - public Builder setHotMap(Map hotMap) { - this.hotMap = new HashMap<>(hotMap); - return this; - } - - public Builder addOneHot(String valueName, String oneHotFeatureName) { - this.hotMap.put(valueName, oneHotFeatureName); - return this; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public OneHotEncoding build() { - return new OneHotEncoding(field, hotMap, custom); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java deleted file mode 100644 index a9e21874313ad..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.client.ml.inference.NamedXContentObject; - -/** - * Describes a pre-processor for a defined machine learning model - */ -public interface PreProcessor extends NamedXContentObject { - - /** - * @return The name of the pre-processor - */ - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java deleted file mode 100644 index 713b2a67ba3de..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * PreProcessor for target mean encoding a set of categorical values for a given field. - */ -public class TargetMeanEncoding implements PreProcessor { - - public static final String NAME = "target_mean_encoding"; - public static final ParseField FIELD = new ParseField("field"); - public static final ParseField FEATURE_NAME = new ParseField("feature_name"); - public static final ParseField TARGET_MAP = new ParseField("target_map"); - public static final ParseField DEFAULT_VALUE = new ParseField("default_value"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new TargetMeanEncoding((String) a[0], (String) a[1], (Map) a[2], (Double) a[3], (Boolean) a[4]) - ); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME); - PARSER.declareObject( - ConstructingObjectParser.constructorArg(), - (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), - TARGET_MAP - ); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), DEFAULT_VALUE); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - } - - public static TargetMeanEncoding fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String field; - private final String featureName; - private final Map meanMap; - private final double defaultValue; - private final Boolean custom; - - TargetMeanEncoding(String field, String featureName, Map meanMap, Double defaultValue, Boolean custom) { - this.field = Objects.requireNonNull(field); - this.featureName = Objects.requireNonNull(featureName); - this.meanMap = Collections.unmodifiableMap(Objects.requireNonNull(meanMap)); - this.defaultValue = Objects.requireNonNull(defaultValue); - this.custom = custom; - } - - /** - * @return Field name on which to target mean encode - */ - public String getField() { - return field; - } - - /** - * @return Map of Value: targetMean for the target mean encoding - */ - public Map getMeanMap() { - return meanMap; - } - - /** - * @return The default value to set when a previously unobserved value is seen - */ - public double getDefaultValue() { - return defaultValue; - } - - /** - * @return The feature name for the encoded value - */ - public String getFeatureName() { - return featureName; - } - - public Boolean getCustom() { - return custom; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(FIELD.getPreferredName(), field); - builder.field(FEATURE_NAME.getPreferredName(), featureName); - builder.field(TARGET_MAP.getPreferredName(), meanMap); - builder.field(DEFAULT_VALUE.getPreferredName(), defaultValue); - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TargetMeanEncoding that = (TargetMeanEncoding) o; - return Objects.equals(field, that.field) - && Objects.equals(featureName, that.featureName) - && Objects.equals(meanMap, that.meanMap) - && Objects.equals(defaultValue, that.defaultValue) - && Objects.equals(custom, that.custom); - } - - @Override - public int hashCode() { - return Objects.hash(field, featureName, meanMap, defaultValue, custom); - } - - public Builder builder(String fieldName) { - return new Builder(fieldName); - } - - public static class Builder { - - private String field; - private String featureName; - private Map meanMap = new HashMap<>(); - private double defaultValue; - private Boolean custom; - - public Builder(String field) { - this.field = field; - } - - public String getField() { - return field; - } - - public Builder setField(String field) { - this.field = field; - return this; - } - - public Builder setFeatureName(String featureName) { - this.featureName = featureName; - return this; - } - - public Builder setMeanMap(Map meanMap) { - this.meanMap = meanMap; - return this; - } - - public Builder addMeanMapEntry(String valueName, double meanEncoding) { - this.meanMap.put(valueName, meanEncoding); - return this; - } - - public Builder setDefaultValue(double defaultValue) { - this.defaultValue = defaultValue; - return this; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public TargetMeanEncoding build() { - return new TargetMeanEncoding(field, featureName, meanMap, defaultValue, custom); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/FeatureImportance.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/FeatureImportance.java deleted file mode 100644 index d68dfc88488a7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/FeatureImportance.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.results; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class FeatureImportance implements ToXContentObject { - - public static final String IMPORTANCE = "importance"; - public static final String FEATURE_NAME = "feature_name"; - public static final String CLASSES = "classes"; - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "feature_importance", - true, - a -> new FeatureImportance((String) a[0], (Double) a[1], (List) a[2]) - ); - - static { - PARSER.declareString(constructorArg(), new ParseField(FeatureImportance.FEATURE_NAME)); - PARSER.declareDouble(optionalConstructorArg(), new ParseField(FeatureImportance.IMPORTANCE)); - PARSER.declareObjectArray( - optionalConstructorArg(), - (p, c) -> ClassImportance.fromXContent(p), - new ParseField(FeatureImportance.CLASSES) - ); - } - - public static FeatureImportance fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List classImportance; - private final Double importance; - private final String featureName; - - public FeatureImportance(String featureName, Double importance, List classImportance) { - this.featureName = Objects.requireNonNull(featureName); - this.importance = importance; - this.classImportance = classImportance == null ? null : Collections.unmodifiableList(classImportance); - } - - public List getClassImportance() { - return classImportance; - } - - public Double getImportance() { - return importance; - } - - public String getFeatureName() { - return featureName; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FEATURE_NAME, featureName); - if (importance != null) { - builder.field(IMPORTANCE, importance); - } - if (classImportance != null && classImportance.isEmpty() == false) { - builder.field(CLASSES, classImportance); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object object) { - if (object == this) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - FeatureImportance that = (FeatureImportance) object; - return Objects.equals(featureName, that.featureName) - && Objects.equals(importance, that.importance) - && Objects.equals(classImportance, that.classImportance); - } - - @Override - public int hashCode() { - return Objects.hash(featureName, importance, classImportance); - } - - public static class ClassImportance implements ToXContentObject { - - static final String CLASS_NAME = "class_name"; - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "feature_importance_class_importance", - true, - a -> new ClassImportance(a[0], (Double) a[1]) - ); - - static { - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return p.text(); - } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return p.numberValue(); - } else if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { - return p.booleanValue(); - } - throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); - }, new ParseField(CLASS_NAME), ObjectParser.ValueType.VALUE); - PARSER.declareDouble(constructorArg(), new ParseField(FeatureImportance.IMPORTANCE)); - } - - public static ClassImportance fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Object className; - private final double importance; - - public ClassImportance(Object className, double importance) { - this.className = className; - this.importance = importance; - } - - public Object getClassName() { - return className; - } - - public double getImportance() { - return importance; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASS_NAME, className); - builder.field(IMPORTANCE, importance); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ClassImportance that = (ClassImportance) o; - return Double.compare(that.importance, importance) == 0 && Objects.equals(className, that.className); - } - - @Override - public int hashCode() { - return Objects.hash(className, importance); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/TopClassEntry.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/TopClassEntry.java deleted file mode 100644 index 889677f6dd99f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/TopClassEntry.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.results; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class TopClassEntry implements ToXContentObject { - - public static final ParseField CLASS_NAME = new ParseField("class_name"); - public static final ParseField CLASS_PROBABILITY = new ParseField("class_probability"); - public static final ParseField CLASS_SCORE = new ParseField("class_score"); - - public static final String NAME = "top_class"; - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new TopClassEntry(a[0], (Double) a[1], (Double) a[2]) - ); - - static { - PARSER.declareField(constructorArg(), (p, n) -> { - Object o; - XContentParser.Token token = p.currentToken(); - if (token == XContentParser.Token.VALUE_STRING) { - o = p.text(); - } else if (token == XContentParser.Token.VALUE_BOOLEAN) { - o = p.booleanValue(); - } else if (token == XContentParser.Token.VALUE_NUMBER) { - o = p.doubleValue(); - } else { - throw new XContentParseException( - p.getTokenLocation(), - "[" + NAME + "] failed to parse field [" + CLASS_NAME + "] value [" + token + "] is not a string, boolean or number" - ); - } - return o; - }, CLASS_NAME, ObjectParser.ValueType.VALUE); - PARSER.declareDouble(constructorArg(), CLASS_PROBABILITY); - PARSER.declareDouble(constructorArg(), CLASS_SCORE); - } - - public static TopClassEntry fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final Object classification; - private final double probability; - private final double score; - - public TopClassEntry(Object classification, double probability, double score) { - this.classification = Objects.requireNonNull(classification); - this.probability = probability; - this.score = score; - } - - public Object getClassification() { - return classification; - } - - public double getProbability() { - return probability; - } - - public double getScore() { - return score; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(CLASS_NAME.getPreferredName(), classification); - builder.field(CLASS_PROBABILITY.getPreferredName(), probability); - builder.field(CLASS_SCORE.getPreferredName(), score); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object object) { - if (object == this) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - TopClassEntry that = (TopClassEntry) object; - return Objects.equals(classification, that.classification) && probability == that.probability && score == that.score; - } - - @Override - public int hashCode() { - return Objects.hash(classification, probability, score); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ClassificationConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ClassificationConfig.java deleted file mode 100644 index 1e63677bfd70b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ClassificationConfig.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class ClassificationConfig implements InferenceConfig { - - public static final ParseField NAME = new ParseField("classification"); - - public static final ParseField RESULTS_FIELD = new ParseField("results_field"); - public static final ParseField NUM_TOP_CLASSES = new ParseField("num_top_classes"); - public static final ParseField TOP_CLASSES_RESULTS_FIELD = new ParseField("top_classes_results_field"); - public static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); - - private final Integer numTopClasses; - private final String topClassesResultsField; - private final String resultsField; - private final Integer numTopFeatureImportanceValues; - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - args -> new ClassificationConfig((Integer) args[0], (String) args[1], (String) args[2], (Integer) args[3]) - ); - - static { - PARSER.declareInt(optionalConstructorArg(), NUM_TOP_CLASSES); - PARSER.declareString(optionalConstructorArg(), RESULTS_FIELD); - PARSER.declareString(optionalConstructorArg(), TOP_CLASSES_RESULTS_FIELD); - PARSER.declareInt(optionalConstructorArg(), NUM_TOP_FEATURE_IMPORTANCE_VALUES); - } - - public static ClassificationConfig fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public ClassificationConfig() { - this(null, null, null, null); - } - - public ClassificationConfig(Integer numTopClasses, String resultsField, String topClassesResultsField, Integer featureImportance) { - this.numTopClasses = numTopClasses; - this.topClassesResultsField = topClassesResultsField; - this.resultsField = resultsField; - this.numTopFeatureImportanceValues = featureImportance; - } - - public Integer getNumTopClasses() { - return numTopClasses; - } - - public String getTopClassesResultsField() { - return topClassesResultsField; - } - - public String getResultsField() { - return resultsField; - } - - public Integer getNumTopFeatureImportanceValues() { - return numTopFeatureImportanceValues; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ClassificationConfig that = (ClassificationConfig) o; - return Objects.equals(numTopClasses, that.numTopClasses) - && Objects.equals(topClassesResultsField, that.topClassesResultsField) - && Objects.equals(resultsField, that.resultsField) - && Objects.equals(numTopFeatureImportanceValues, that.numTopFeatureImportanceValues); - } - - @Override - public int hashCode() { - return Objects.hash(numTopClasses, topClassesResultsField, resultsField, numTopFeatureImportanceValues); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (numTopClasses != null) { - builder.field(NUM_TOP_CLASSES.getPreferredName(), numTopClasses); - } - if (topClassesResultsField != null) { - builder.field(TOP_CLASSES_RESULTS_FIELD.getPreferredName(), topClassesResultsField); - } - if (resultsField != null) { - builder.field(RESULTS_FIELD.getPreferredName(), resultsField); - } - if (numTopFeatureImportanceValues != null) { - builder.field(NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName(), numTopFeatureImportanceValues); - } - builder.endObject(); - return builder; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/IndexLocation.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/IndexLocation.java deleted file mode 100644 index 44bcac9e67554..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/IndexLocation.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class IndexLocation implements TrainedModelLocation { - - public static final String INDEX = "index"; - private static final ParseField NAME = new ParseField("name"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - INDEX, - true, - a -> new IndexLocation((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); - } - - public static IndexLocation fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final String index; - - public IndexLocation(String index) { - this.index = Objects.requireNonNull(index); - } - - public String getIndex() { - return index; - } - - @Override - public String getName() { - return INDEX; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(NAME.getPreferredName(), index); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - IndexLocation that = (IndexLocation) o; - return Objects.equals(index, that.index); - } - - @Override - public int hashCode() { - return Objects.hash(index); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceConfig.java deleted file mode 100644 index 1b444cc14cbe2..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceConfig.java +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.client.ml.inference.NamedXContentObject; - -public interface InferenceConfig extends NamedXContentObject { - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceStats.java deleted file mode 100644 index d668f7a2aa6fc..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceStats.java +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class InferenceStats implements ToXContentObject { - - public static final String NAME = "inference_stats"; - public static final ParseField MISSING_ALL_FIELDS_COUNT = new ParseField("missing_all_fields_count"); - public static final ParseField INFERENCE_COUNT = new ParseField("inference_count"); - public static final ParseField CACHE_MISS_COUNT = new ParseField("cache_miss_count"); - public static final ParseField FAILURE_COUNT = new ParseField("failure_count"); - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new InferenceStats((Long) a[0], (Long) a[1], (Long) a[2], (Long) a[3], (Instant) a[4]) - ); - static { - PARSER.declareLong(ConstructingObjectParser.constructorArg(), MISSING_ALL_FIELDS_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), INFERENCE_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), FAILURE_COUNT); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), CACHE_MISS_COUNT); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - } - - private final long missingAllFieldsCount; - private final long inferenceCount; - private final long failureCount; - private final long cacheMissCount; - private final Instant timeStamp; - - private InferenceStats(Long missingAllFieldsCount, Long inferenceCount, Long failureCount, Long cacheMissCount, Instant instant) { - this( - unboxOrZero(missingAllFieldsCount), - unboxOrZero(inferenceCount), - unboxOrZero(failureCount), - unboxOrZero(cacheMissCount), - instant - ); - } - - public InferenceStats(long missingAllFieldsCount, long inferenceCount, long failureCount, long cacheMissCount, Instant timeStamp) { - this.missingAllFieldsCount = missingAllFieldsCount; - this.inferenceCount = inferenceCount; - this.failureCount = failureCount; - this.cacheMissCount = cacheMissCount; - this.timeStamp = timeStamp == null - ? Instant.ofEpochMilli(Instant.now().toEpochMilli()) - : Instant.ofEpochMilli(timeStamp.toEpochMilli()); - } - - /** - * How many times this model attempted to infer with all its fields missing - */ - public long getMissingAllFieldsCount() { - return missingAllFieldsCount; - } - - /** - * How many inference calls were made against this model - */ - public long getInferenceCount() { - return inferenceCount; - } - - /** - * How many inference failures occurred. - */ - public long getFailureCount() { - return failureCount; - } - - /** - * How many cache misses occurred when inferring this model - */ - public long getCacheMissCount() { - return cacheMissCount; - } - - /** - * The timestamp of these statistics. - */ - public Instant getTimeStamp() { - return timeStamp; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FAILURE_COUNT.getPreferredName(), failureCount); - builder.field(INFERENCE_COUNT.getPreferredName(), inferenceCount); - builder.field(CACHE_MISS_COUNT.getPreferredName(), cacheMissCount); - builder.field(MISSING_ALL_FIELDS_COUNT.getPreferredName(), missingAllFieldsCount); - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timeStamp.toEpochMilli()); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - InferenceStats that = (InferenceStats) o; - return missingAllFieldsCount == that.missingAllFieldsCount - && inferenceCount == that.inferenceCount - && failureCount == that.failureCount - && cacheMissCount == that.cacheMissCount - && Objects.equals(timeStamp, that.timeStamp); - } - - @Override - public int hashCode() { - return Objects.hash(missingAllFieldsCount, inferenceCount, failureCount, cacheMissCount, timeStamp); - } - - @Override - public String toString() { - return "InferenceStats{" - + "missingAllFieldsCount=" - + missingAllFieldsCount - + ", inferenceCount=" - + inferenceCount - + ", failureCount=" - + failureCount - + ", cacheMissCount=" - + cacheMissCount - + ", timeStamp=" - + timeStamp - + '}'; - } - - private static long unboxOrZero(@Nullable Long value) { - return value == null ? 0L : value; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/RegressionConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/RegressionConfig.java deleted file mode 100644 index 496cceb4e5a17..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/RegressionConfig.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class RegressionConfig implements InferenceConfig { - - public static final ParseField NAME = new ParseField("regression"); - public static final ParseField RESULTS_FIELD = new ParseField("results_field"); - public static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - args -> new RegressionConfig((String) args[0], (Integer) args[1]) - ); - - static { - PARSER.declareString(optionalConstructorArg(), RESULTS_FIELD); - PARSER.declareInt(optionalConstructorArg(), NUM_TOP_FEATURE_IMPORTANCE_VALUES); - } - - public static RegressionConfig fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String resultsField; - private final Integer numTopFeatureImportanceValues; - - public RegressionConfig() { - this(null, null); - } - - public RegressionConfig(String resultsField, Integer numTopFeatureImportanceValues) { - this.resultsField = resultsField; - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - } - - public Integer getNumTopFeatureImportanceValues() { - return numTopFeatureImportanceValues; - } - - public String getResultsField() { - return resultsField; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (resultsField != null) { - builder.field(RESULTS_FIELD.getPreferredName(), resultsField); - } - if (numTopFeatureImportanceValues != null) { - builder.field(NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName(), numTopFeatureImportanceValues); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - RegressionConfig that = (RegressionConfig) o; - return Objects.equals(this.resultsField, that.resultsField) - && Objects.equals(this.numTopFeatureImportanceValues, that.numTopFeatureImportanceValues); - } - - @Override - public int hashCode() { - return Objects.hash(resultsField, numTopFeatureImportanceValues); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TargetType.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TargetType.java deleted file mode 100644 index 4f5ce1aecadcc..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TargetType.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.xcontent.ParseField; - -import java.util.Locale; - -public enum TargetType { - - REGRESSION, - CLASSIFICATION; - - public static final ParseField TARGET_TYPE = new ParseField("target_type"); - - public static TargetType fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java deleted file mode 100644 index 76d5538708f52..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.client.ml.inference.NamedXContentObject; - -import java.util.List; - -public interface TrainedModel extends NamedXContentObject { - - /** - * @return List of featureNames expected by the model. In the order that they are expected - */ - List getFeatureNames(); - - /** - * @return The name of the model - */ - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModelLocation.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModelLocation.java deleted file mode 100644 index cb86b0d121c1f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModelLocation.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.client.ml.inference.NamedXContentObject; - -public interface TrainedModelLocation extends NamedXContentObject {} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java deleted file mode 100644 index c19e50b46c824..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper; -import org.elasticsearch.client.ml.inference.trainedmodel.TargetType; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -public class Ensemble implements TrainedModel { - - public static final String NAME = "ensemble"; - public static final ParseField FEATURE_NAMES = new ParseField("feature_names"); - public static final ParseField TRAINED_MODELS = new ParseField("trained_models"); - public static final ParseField AGGREGATE_OUTPUT = new ParseField("aggregate_output"); - public static final ParseField CLASSIFICATION_LABELS = new ParseField("classification_labels"); - public static final ParseField CLASSIFICATION_WEIGHTS = new ParseField("classification_weights"); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, Ensemble.Builder::new); - - static { - PARSER.declareStringArray(Ensemble.Builder::setFeatureNames, FEATURE_NAMES); - PARSER.declareNamedObjects( - Ensemble.Builder::setTrainedModels, - (p, c, n) -> p.namedObject(TrainedModel.class, n, null), - (ensembleBuilder) -> { /* Noop does not matter client side */ }, - TRAINED_MODELS - ); - PARSER.declareNamedObject( - Ensemble.Builder::setOutputAggregator, - (p, c, n) -> p.namedObject(OutputAggregator.class, n, null), - AGGREGATE_OUTPUT - ); - PARSER.declareString(Ensemble.Builder::setTargetType, TargetType.TARGET_TYPE); - PARSER.declareStringArray(Ensemble.Builder::setClassificationLabels, CLASSIFICATION_LABELS); - PARSER.declareDoubleArray(Ensemble.Builder::setClassificationWeights, CLASSIFICATION_WEIGHTS); - } - - public static Ensemble fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - private final List featureNames; - private final List models; - private final OutputAggregator outputAggregator; - private final TargetType targetType; - private final List classificationLabels; - private final double[] classificationWeights; - - Ensemble( - List featureNames, - List models, - @Nullable OutputAggregator outputAggregator, - TargetType targetType, - @Nullable List classificationLabels, - @Nullable double[] classificationWeights - ) { - this.featureNames = featureNames; - this.models = models; - this.outputAggregator = outputAggregator; - this.targetType = targetType; - this.classificationLabels = classificationLabels; - this.classificationWeights = classificationWeights; - } - - @Override - public List getFeatureNames() { - return featureNames; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (featureNames != null && featureNames.isEmpty() == false) { - builder.field(FEATURE_NAMES.getPreferredName(), featureNames); - } - if (models != null) { - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, TRAINED_MODELS.getPreferredName(), models); - } - if (outputAggregator != null) { - NamedXContentObjectHelper.writeNamedObjects( - builder, - params, - false, - AGGREGATE_OUTPUT.getPreferredName(), - Collections.singletonList(outputAggregator) - ); - } - if (targetType != null) { - builder.field(TargetType.TARGET_TYPE.getPreferredName(), targetType); - } - if (classificationLabels != null) { - builder.field(CLASSIFICATION_LABELS.getPreferredName(), classificationLabels); - } - if (classificationWeights != null) { - builder.field(CLASSIFICATION_WEIGHTS.getPreferredName(), classificationWeights); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Ensemble that = (Ensemble) o; - return Objects.equals(featureNames, that.featureNames) - && Objects.equals(models, that.models) - && Objects.equals(targetType, that.targetType) - && Objects.equals(classificationLabels, that.classificationLabels) - && Arrays.equals(classificationWeights, that.classificationWeights) - && Objects.equals(outputAggregator, that.outputAggregator); - } - - @Override - public int hashCode() { - return Objects.hash( - featureNames, - models, - outputAggregator, - classificationLabels, - targetType, - Arrays.hashCode(classificationWeights) - ); - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - private List featureNames = Collections.emptyList(); - private List trainedModels; - private OutputAggregator outputAggregator; - private TargetType targetType; - private List classificationLabels; - private double[] classificationWeights; - - public Builder setFeatureNames(List featureNames) { - this.featureNames = featureNames; - return this; - } - - public Builder setTrainedModels(List trainedModels) { - this.trainedModels = trainedModels; - return this; - } - - public Builder setOutputAggregator(OutputAggregator outputAggregator) { - this.outputAggregator = outputAggregator; - return this; - } - - public Builder setTargetType(TargetType targetType) { - this.targetType = targetType; - return this; - } - - public Builder setClassificationLabels(List classificationLabels) { - this.classificationLabels = classificationLabels; - return this; - } - - public Builder setClassificationWeights(List classificationWeights) { - this.classificationWeights = classificationWeights.stream().mapToDouble(Double::doubleValue).toArray(); - return this; - } - - private void setTargetType(String targetType) { - this.targetType = TargetType.fromString(targetType); - } - - public Ensemble build() { - return new Ensemble(featureNames, trainedModels, outputAggregator, targetType, classificationLabels, classificationWeights); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Exponent.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Exponent.java deleted file mode 100644 index 22fc6f7ef3f55..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Exponent.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class Exponent implements OutputAggregator { - - public static final String NAME = "exponent"; - public static final ParseField WEIGHTS = new ParseField("weights"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new Exponent((List) a[0]) - ); - static { - PARSER.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), WEIGHTS); - } - - public static Exponent fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List weights; - - public Exponent(List weights) { - this.weights = weights; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (weights != null) { - builder.field(WEIGHTS.getPreferredName(), weights); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Exponent that = (Exponent) o; - return Objects.equals(weights, that.weights); - } - - @Override - public int hashCode() { - return Objects.hash(weights); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/LogisticRegression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/LogisticRegression.java deleted file mode 100644 index 19693a728d2ee..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/LogisticRegression.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class LogisticRegression implements OutputAggregator { - - public static final String NAME = "logistic_regression"; - public static final ParseField WEIGHTS = new ParseField("weights"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new LogisticRegression((List) a[0]) - ); - static { - PARSER.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), WEIGHTS); - } - - public static LogisticRegression fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List weights; - - public LogisticRegression(List weights) { - this.weights = weights; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (weights != null) { - builder.field(WEIGHTS.getPreferredName(), weights); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - LogisticRegression that = (LogisticRegression) o; - return Objects.equals(weights, that.weights); - } - - @Override - public int hashCode() { - return Objects.hash(weights); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/OutputAggregator.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/OutputAggregator.java deleted file mode 100644 index 8a0f50d3a8bb6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/OutputAggregator.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.client.ml.inference.NamedXContentObject; - -public interface OutputAggregator extends NamedXContentObject { - /** - * @return The name of the output aggregator - */ - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedMode.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedMode.java deleted file mode 100644 index 422dfb0a21fc1..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedMode.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class WeightedMode implements OutputAggregator { - - public static final String NAME = "weighted_mode"; - public static final ParseField WEIGHTS = new ParseField("weights"); - public static final ParseField NUM_CLASSES = new ParseField("num_classes"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new WeightedMode((Integer) a[0], (List) a[1]) - ); - static { - PARSER.declareInt(ConstructingObjectParser.constructorArg(), NUM_CLASSES); - PARSER.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), WEIGHTS); - } - - public static WeightedMode fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List weights; - private final int numClasses; - - public WeightedMode(int numClasses, List weights) { - this.weights = weights; - this.numClasses = numClasses; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (weights != null) { - builder.field(WEIGHTS.getPreferredName(), weights); - } - builder.field(NUM_CLASSES.getPreferredName(), numClasses); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - WeightedMode that = (WeightedMode) o; - return Objects.equals(weights, that.weights) && numClasses == that.numClasses; - } - - @Override - public int hashCode() { - return Objects.hash(weights, numClasses); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedSum.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedSum.java deleted file mode 100644 index a36c13b138f78..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedSum.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class WeightedSum implements OutputAggregator { - - public static final String NAME = "weighted_sum"; - public static final ParseField WEIGHTS = new ParseField("weights"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new WeightedSum((List) a[0]) - ); - - static { - PARSER.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), WEIGHTS); - } - - public static WeightedSum fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List weights; - - public WeightedSum(List weights) { - this.weights = weights; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (weights != null) { - builder.field(WEIGHTS.getPreferredName(), weights); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - WeightedSum that = (WeightedSum) o; - return Objects.equals(weights, that.weights); - } - - @Override - public int hashCode() { - return Objects.hash(weights); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangIdentNeuralNetwork.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangIdentNeuralNetwork.java deleted file mode 100644 index 89f5625331cd7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangIdentNeuralNetwork.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.langident; - -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Shallow, fully connected, feed forward NN modeled after and ported from https://github.com/google/cld3 - */ -public class LangIdentNeuralNetwork implements TrainedModel { - - public static final String NAME = "lang_ident_neural_network"; - public static final ParseField EMBEDDED_VECTOR_FEATURE_NAME = new ParseField("embedded_vector_feature_name"); - public static final ParseField HIDDEN_LAYER = new ParseField("hidden_layer"); - public static final ParseField SOFTMAX_LAYER = new ParseField("softmax_layer"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new LangIdentNeuralNetwork((String) a[0], (LangNetLayer) a[1], (LangNetLayer) a[2]) - ); - - static { - PARSER.declareString(constructorArg(), EMBEDDED_VECTOR_FEATURE_NAME); - PARSER.declareObject(constructorArg(), LangNetLayer.PARSER::apply, HIDDEN_LAYER); - PARSER.declareObject(constructorArg(), LangNetLayer.PARSER::apply, SOFTMAX_LAYER); - } - - public static LangIdentNeuralNetwork fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final LangNetLayer hiddenLayer; - private final LangNetLayer softmaxLayer; - private final String embeddedVectorFeatureName; - - LangIdentNeuralNetwork(String embeddedVectorFeatureName, LangNetLayer hiddenLayer, LangNetLayer softmaxLayer) { - this.embeddedVectorFeatureName = embeddedVectorFeatureName; - this.hiddenLayer = hiddenLayer; - this.softmaxLayer = softmaxLayer; - } - - @Override - public List getFeatureNames() { - return Collections.singletonList(embeddedVectorFeatureName); - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(EMBEDDED_VECTOR_FEATURE_NAME.getPreferredName(), embeddedVectorFeatureName); - builder.field(HIDDEN_LAYER.getPreferredName(), hiddenLayer); - builder.field(SOFTMAX_LAYER.getPreferredName(), softmaxLayer); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - LangIdentNeuralNetwork that = (LangIdentNeuralNetwork) o; - return Objects.equals(embeddedVectorFeatureName, that.embeddedVectorFeatureName) - && Objects.equals(hiddenLayer, that.hiddenLayer) - && Objects.equals(softmaxLayer, that.softmaxLayer); - } - - @Override - public int hashCode() { - return Objects.hash(embeddedVectorFeatureName, hiddenLayer, softmaxLayer); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangNetLayer.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangNetLayer.java deleted file mode 100644 index 9737a577725f7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangNetLayer.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.langident; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Represents a single layer in the compressed Lang Net - */ -public class LangNetLayer implements ToXContentObject { - - public static final ParseField NAME = new ParseField("lang_net_layer"); - - private static final ParseField NUM_ROWS = new ParseField("num_rows"); - private static final ParseField NUM_COLS = new ParseField("num_cols"); - private static final ParseField WEIGHTS = new ParseField("weights"); - private static final ParseField BIAS = new ParseField("bias"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new LangNetLayer((List) a[0], (int) a[1], (int) a[2], (List) a[3]) - ); - - static { - PARSER.declareDoubleArray(constructorArg(), WEIGHTS); - PARSER.declareInt(constructorArg(), NUM_COLS); - PARSER.declareInt(constructorArg(), NUM_ROWS); - PARSER.declareDoubleArray(constructorArg(), BIAS); - } - - private final double[] weights; - private final int weightRows; - private final int weightCols; - private final double[] bias; - - private LangNetLayer(List weights, int numCols, int numRows, List bias) { - this( - weights.stream().mapToDouble(Double::doubleValue).toArray(), - numCols, - numRows, - bias.stream().mapToDouble(Double::doubleValue).toArray() - ); - } - - LangNetLayer(double[] weights, int numCols, int numRows, double[] bias) { - this.weights = weights; - this.weightCols = numCols; - this.weightRows = numRows; - this.bias = bias; - } - - double[] getWeights() { - return weights; - } - - int getWeightRows() { - return weightRows; - } - - int getWeightCols() { - return weightCols; - } - - double[] getBias() { - return bias; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(NUM_COLS.getPreferredName(), weightCols); - builder.field(NUM_ROWS.getPreferredName(), weightRows); - builder.field(WEIGHTS.getPreferredName(), weights); - builder.field(BIAS.getPreferredName(), bias); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - LangNetLayer that = (LangNetLayer) o; - return Arrays.equals(weights, that.weights) - && Arrays.equals(bias, that.bias) - && Objects.equals(weightCols, that.weightCols) - && Objects.equals(weightRows, that.weightRows); - } - - @Override - public int hashCode() { - return Objects.hash(Arrays.hashCode(weights), Arrays.hashCode(bias), weightCols, weightRows); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java deleted file mode 100644 index 7d0b633693e7d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.tree; - -import org.elasticsearch.client.ml.inference.trainedmodel.TargetType; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -public class Tree implements TrainedModel { - - public static final String NAME = "tree"; - - public static final ParseField FEATURE_NAMES = new ParseField("feature_names"); - public static final ParseField TREE_STRUCTURE = new ParseField("tree_structure"); - public static final ParseField CLASSIFICATION_LABELS = new ParseField("classification_labels"); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, Builder::new); - - static { - PARSER.declareStringArray(Builder::setFeatureNames, FEATURE_NAMES); - PARSER.declareObjectArray(Builder::setNodes, (p, c) -> TreeNode.fromXContent(p), TREE_STRUCTURE); - PARSER.declareString(Builder::setTargetType, TargetType.TARGET_TYPE); - PARSER.declareStringArray(Builder::setClassificationLabels, CLASSIFICATION_LABELS); - } - - public static Tree fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - private final List featureNames; - private final List nodes; - private final TargetType targetType; - private final List classificationLabels; - - Tree(List featureNames, List nodes, TargetType targetType, List classificationLabels) { - this.featureNames = featureNames; - this.nodes = nodes; - this.targetType = targetType; - this.classificationLabels = classificationLabels; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public List getFeatureNames() { - return featureNames; - } - - public List getNodes() { - return nodes; - } - - @Nullable - public List getClassificationLabels() { - return classificationLabels; - } - - public TargetType getTargetType() { - return targetType; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (featureNames != null) { - builder.field(FEATURE_NAMES.getPreferredName(), featureNames); - } - if (nodes != null) { - builder.field(TREE_STRUCTURE.getPreferredName(), nodes); - } - if (classificationLabels != null) { - builder.field(CLASSIFICATION_LABELS.getPreferredName(), classificationLabels); - } - if (targetType != null) { - builder.field(TargetType.TARGET_TYPE.getPreferredName(), targetType.toString()); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Tree that = (Tree) o; - return Objects.equals(featureNames, that.featureNames) - && Objects.equals(classificationLabels, that.classificationLabels) - && Objects.equals(targetType, that.targetType) - && Objects.equals(nodes, that.nodes); - } - - @Override - public int hashCode() { - return Objects.hash(featureNames, nodes, targetType, classificationLabels); - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - private List featureNames; - private ArrayList nodes; - private int numNodes; - private TargetType targetType; - private List classificationLabels; - - public Builder() { - nodes = new ArrayList<>(); - // allocate space in the root node and set to a leaf - nodes.add(null); - addLeaf(0, 0.0); - numNodes = 1; - } - - public Builder setFeatureNames(List featureNames) { - this.featureNames = featureNames; - return this; - } - - public Builder addNode(TreeNode.Builder node) { - nodes.add(node); - return this; - } - - public Builder setNodes(List nodes) { - this.nodes = new ArrayList<>(nodes); - return this; - } - - public Builder setNodes(TreeNode.Builder... nodes) { - return setNodes(Arrays.asList(nodes)); - } - - public Builder setTargetType(TargetType targetType) { - this.targetType = targetType; - return this; - } - - public Builder setClassificationLabels(List classificationLabels) { - this.classificationLabels = classificationLabels; - return this; - } - - private void setTargetType(String targetType) { - this.targetType = TargetType.fromString(targetType); - } - - /** - * Add a decision node. Space for the child nodes is allocated - * @param nodeIndex Where to place the node. This is either 0 (root) or an existing child node index - * @param featureIndex The feature index the decision is made on - * @param isDefaultLeft Default left branch if the feature is missing - * @param decisionThreshold The decision threshold - * @return The created node - */ - public TreeNode.Builder addJunction(int nodeIndex, int featureIndex, boolean isDefaultLeft, double decisionThreshold) { - int leftChild = numNodes++; - int rightChild = numNodes++; - nodes.ensureCapacity(nodeIndex + 1); - for (int i = nodes.size(); i < nodeIndex + 1; i++) { - nodes.add(null); - } - - TreeNode.Builder node = TreeNode.builder(nodeIndex) - .setDefaultLeft(isDefaultLeft) - .setLeftChild(leftChild) - .setRightChild(rightChild) - .setSplitFeature(featureIndex) - .setThreshold(decisionThreshold); - nodes.set(nodeIndex, node); - - // allocate space for the child nodes - while (nodes.size() <= rightChild) { - nodes.add(null); - } - - return node; - } - - /** - * Sets the node at {@code nodeIndex} to a leaf node. - * @param nodeIndex The index as allocated by a call to {@link #addJunction(int, int, boolean, double)} - * @param value The prediction value - * @return this - */ - public Builder addLeaf(int nodeIndex, double value) { - for (int i = nodes.size(); i < nodeIndex + 1; i++) { - nodes.add(null); - } - nodes.set(nodeIndex, TreeNode.builder(nodeIndex).setLeafValue(Collections.singletonList(value))); - return this; - } - - public Tree build() { - return new Tree( - featureNames, - nodes.stream().map(TreeNode.Builder::build).collect(Collectors.toList()), - targetType, - classificationLabels - ); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java deleted file mode 100644 index cb7d9a0f8f211..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java +++ /dev/null @@ -1,286 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.tree; - -import org.elasticsearch.client.ml.job.config.Operator; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class TreeNode implements ToXContentObject { - - public static final String NAME = "tree_node"; - - public static final ParseField DECISION_TYPE = new ParseField("decision_type"); - public static final ParseField THRESHOLD = new ParseField("threshold"); - public static final ParseField LEFT_CHILD = new ParseField("left_child"); - public static final ParseField RIGHT_CHILD = new ParseField("right_child"); - public static final ParseField DEFAULT_LEFT = new ParseField("default_left"); - public static final ParseField SPLIT_FEATURE = new ParseField("split_feature"); - public static final ParseField NODE_INDEX = new ParseField("node_index"); - public static final ParseField SPLIT_GAIN = new ParseField("split_gain"); - public static final ParseField LEAF_VALUE = new ParseField("leaf_value"); - public static final ParseField NUMBER_SAMPLES = new ParseField("number_samples"); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, Builder::new); - static { - PARSER.declareDouble(Builder::setThreshold, THRESHOLD); - PARSER.declareField(Builder::setOperator, p -> Operator.fromString(p.text()), DECISION_TYPE, ObjectParser.ValueType.STRING); - PARSER.declareInt(Builder::setLeftChild, LEFT_CHILD); - PARSER.declareInt(Builder::setRightChild, RIGHT_CHILD); - PARSER.declareBoolean(Builder::setDefaultLeft, DEFAULT_LEFT); - PARSER.declareInt(Builder::setSplitFeature, SPLIT_FEATURE); - PARSER.declareInt(Builder::setNodeIndex, NODE_INDEX); - PARSER.declareDouble(Builder::setSplitGain, SPLIT_GAIN); - PARSER.declareDoubleArray(Builder::setLeafValue, LEAF_VALUE); - PARSER.declareLong(Builder::setNumberSamples, NUMBER_SAMPLES); - } - - public static Builder fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Operator operator; - private final Double threshold; - private final Integer splitFeature; - private final int nodeIndex; - private final Double splitGain; - private final List leafValue; - private final Boolean defaultLeft; - private final Integer leftChild; - private final Integer rightChild; - private final Long numberSamples; - - TreeNode( - Operator operator, - Double threshold, - Integer splitFeature, - int nodeIndex, - Double splitGain, - List leafValue, - Boolean defaultLeft, - Integer leftChild, - Integer rightChild, - Long numberSamples - ) { - this.operator = operator; - this.threshold = threshold; - this.splitFeature = splitFeature; - this.nodeIndex = nodeIndex; - this.splitGain = splitGain; - this.leafValue = leafValue; - this.defaultLeft = defaultLeft; - this.leftChild = leftChild; - this.rightChild = rightChild; - this.numberSamples = numberSamples; - } - - public Operator getOperator() { - return operator; - } - - public Double getThreshold() { - return threshold; - } - - public Integer getSplitFeature() { - return splitFeature; - } - - public Integer getNodeIndex() { - return nodeIndex; - } - - public Double getSplitGain() { - return splitGain; - } - - public List getLeafValue() { - return leafValue; - } - - public Boolean isDefaultLeft() { - return defaultLeft; - } - - public Integer getLeftChild() { - return leftChild; - } - - public Integer getRightChild() { - return rightChild; - } - - public Long getNumberSamples() { - return numberSamples; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - addOptionalField(builder, DECISION_TYPE, operator); - addOptionalField(builder, THRESHOLD, threshold); - addOptionalField(builder, SPLIT_FEATURE, splitFeature); - addOptionalField(builder, SPLIT_GAIN, splitGain); - addOptionalField(builder, NODE_INDEX, nodeIndex); - addOptionalField(builder, LEAF_VALUE, leafValue); - addOptionalField(builder, DEFAULT_LEFT, defaultLeft); - addOptionalField(builder, LEFT_CHILD, leftChild); - addOptionalField(builder, RIGHT_CHILD, rightChild); - addOptionalField(builder, NUMBER_SAMPLES, numberSamples); - builder.endObject(); - return builder; - } - - private void addOptionalField(XContentBuilder builder, ParseField field, Object value) throws IOException { - if (value != null) { - builder.field(field.getPreferredName(), value); - } - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TreeNode that = (TreeNode) o; - return Objects.equals(operator, that.operator) - && Objects.equals(threshold, that.threshold) - && Objects.equals(splitFeature, that.splitFeature) - && Objects.equals(nodeIndex, that.nodeIndex) - && Objects.equals(splitGain, that.splitGain) - && Objects.equals(leafValue, that.leafValue) - && Objects.equals(defaultLeft, that.defaultLeft) - && Objects.equals(leftChild, that.leftChild) - && Objects.equals(rightChild, that.rightChild) - && Objects.equals(numberSamples, that.numberSamples); - } - - @Override - public int hashCode() { - return Objects.hash( - operator, - threshold, - splitFeature, - splitGain, - nodeIndex, - leafValue, - defaultLeft, - leftChild, - rightChild, - numberSamples - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static Builder builder(int nodeIndex) { - return new Builder(nodeIndex); - } - - public static class Builder { - private Operator operator; - private Double threshold; - private Integer splitFeature; - private int nodeIndex; - private Double splitGain; - private List leafValue; - private Boolean defaultLeft; - private Integer leftChild; - private Integer rightChild; - private Long numberSamples; - - public Builder(int nodeIndex) { - this.nodeIndex = nodeIndex; - } - - private Builder() {} - - public Builder setOperator(Operator operator) { - this.operator = operator; - return this; - } - - public Builder setThreshold(Double threshold) { - this.threshold = threshold; - return this; - } - - public Builder setSplitFeature(Integer splitFeature) { - this.splitFeature = splitFeature; - return this; - } - - public Builder setNodeIndex(int nodeIndex) { - this.nodeIndex = nodeIndex; - return this; - } - - public Builder setSplitGain(Double splitGain) { - this.splitGain = splitGain; - return this; - } - - public Builder setLeafValue(List leafValue) { - this.leafValue = leafValue; - return this; - } - - public Builder setDefaultLeft(Boolean defaultLeft) { - this.defaultLeft = defaultLeft; - return this; - } - - public Builder setLeftChild(Integer leftChild) { - this.leftChild = leftChild; - return this; - } - - public Integer getLeftChild() { - return leftChild; - } - - public Builder setRightChild(Integer rightChild) { - this.rightChild = rightChild; - return this; - } - - public Integer getRightChild() { - return rightChild; - } - - public Builder setNumberSamples(Long numberSamples) { - this.numberSamples = numberSamples; - return this; - } - - public TreeNode build() { - return new TreeNode( - operator, - threshold, - splitFeature, - nodeIndex, - splitGain, - leafValue, - defaultLeft, - leftChild, - rightChild, - numberSamples - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisConfig.java deleted file mode 100644 index 09b8ef16eeda4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisConfig.java +++ /dev/null @@ -1,446 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; -import java.util.Set; -import java.util.function.Function; - -/** - * Analysis configuration options that describe which fields are - * analyzed and which functions are used to detect anomalies. - *

    - * The configuration can contain multiple detectors, a new anomaly detector will - * be created for each detector configuration. The fields - * bucketSpan, summaryCountFieldName and categorizationFieldName - * apply to all detectors. - *

    - * If a value has not been set it will be null - * Object wrappers are used around integral types & booleans so they can take - * null values. - */ -public class AnalysisConfig implements ToXContentObject { - /** - * Serialisation names - */ - public static final ParseField ANALYSIS_CONFIG = new ParseField("analysis_config"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField CATEGORIZATION_FIELD_NAME = new ParseField("categorization_field_name"); - public static final ParseField CATEGORIZATION_FILTERS = new ParseField("categorization_filters"); - public static final ParseField CATEGORIZATION_ANALYZER = CategorizationAnalyzerConfig.CATEGORIZATION_ANALYZER; - public static final ParseField PER_PARTITION_CATEGORIZATION = new ParseField("per_partition_categorization"); - public static final ParseField LATENCY = new ParseField("latency"); - public static final ParseField SUMMARY_COUNT_FIELD_NAME = new ParseField("summary_count_field_name"); - public static final ParseField DETECTORS = new ParseField("detectors"); - public static final ParseField INFLUENCERS = new ParseField("influencers"); - public static final ParseField MULTIVARIATE_BY_FIELDS = new ParseField("multivariate_by_fields"); - public static final ParseField MODEL_PRUNE_WINDOW = new ParseField("model_prune_window"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - ANALYSIS_CONFIG.getPreferredName(), - true, - a -> new AnalysisConfig.Builder((List) a[0]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> (Detector.PARSER).apply(p, c).build(), DETECTORS); - PARSER.declareString( - (builder, val) -> builder.setBucketSpan(TimeValue.parseTimeValue(val, BUCKET_SPAN.getPreferredName())), - BUCKET_SPAN - ); - PARSER.declareString(Builder::setCategorizationFieldName, CATEGORIZATION_FIELD_NAME); - PARSER.declareStringArray(Builder::setCategorizationFilters, CATEGORIZATION_FILTERS); - // This one is nasty - the syntax for analyzers takes either names or objects at many levels, hence it's not - // possible to simply declare whether the field is a string or object and a completely custom parser is required - PARSER.declareField( - Builder::setCategorizationAnalyzerConfig, - (p, c) -> CategorizationAnalyzerConfig.buildFromXContentFragment(p), - CATEGORIZATION_ANALYZER, - ObjectParser.ValueType.OBJECT_OR_STRING - ); - PARSER.declareObject( - Builder::setPerPartitionCategorizationConfig, - PerPartitionCategorizationConfig.PARSER, - PER_PARTITION_CATEGORIZATION - ); - PARSER.declareString((builder, val) -> builder.setLatency(TimeValue.parseTimeValue(val, LATENCY.getPreferredName())), LATENCY); - PARSER.declareString(Builder::setSummaryCountFieldName, SUMMARY_COUNT_FIELD_NAME); - PARSER.declareStringArray(Builder::setInfluencers, INFLUENCERS); - PARSER.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS); - PARSER.declareString( - (builder, val) -> builder.setModelPruneWindow(TimeValue.parseTimeValue(val, MODEL_PRUNE_WINDOW.getPreferredName())), - MODEL_PRUNE_WINDOW - ); - } - - /** - * These values apply to all detectors - */ - private final TimeValue bucketSpan; - private final String categorizationFieldName; - private final List categorizationFilters; - private final CategorizationAnalyzerConfig categorizationAnalyzerConfig; - private final PerPartitionCategorizationConfig perPartitionCategorizationConfig; - private final TimeValue latency; - private final String summaryCountFieldName; - private final List detectors; - private final List influencers; - private final Boolean multivariateByFields; - private final TimeValue modelPruneWindow; - - private AnalysisConfig( - TimeValue bucketSpan, - String categorizationFieldName, - List categorizationFilters, - CategorizationAnalyzerConfig categorizationAnalyzerConfig, - PerPartitionCategorizationConfig perPartitionCategorizationConfig, - TimeValue latency, - String summaryCountFieldName, - List detectors, - List influencers, - Boolean multivariateByFields, - TimeValue modelPruneWindow - ) { - this.detectors = Collections.unmodifiableList(detectors); - this.bucketSpan = bucketSpan; - this.latency = latency; - this.categorizationFieldName = categorizationFieldName; - this.categorizationAnalyzerConfig = categorizationAnalyzerConfig; - this.perPartitionCategorizationConfig = perPartitionCategorizationConfig; - this.categorizationFilters = categorizationFilters == null ? null : Collections.unmodifiableList(categorizationFilters); - this.summaryCountFieldName = summaryCountFieldName; - this.influencers = Collections.unmodifiableList(influencers); - this.multivariateByFields = multivariateByFields; - this.modelPruneWindow = modelPruneWindow; - } - - /** - * The analysis bucket span - * - * @return The bucketspan or null if not set - */ - public TimeValue getBucketSpan() { - return bucketSpan; - } - - public String getCategorizationFieldName() { - return categorizationFieldName; - } - - public List getCategorizationFilters() { - return categorizationFilters; - } - - public CategorizationAnalyzerConfig getCategorizationAnalyzerConfig() { - return categorizationAnalyzerConfig; - } - - public PerPartitionCategorizationConfig getPerPartitionCategorizationConfig() { - return perPartitionCategorizationConfig; - } - - /** - * The latency interval during which out-of-order records should be handled. - * - * @return The latency interval or null if not set - */ - public TimeValue getLatency() { - return latency; - } - - /** - * The name of the field that contains counts for pre-summarised input - * - * @return The field name or null if not set - */ - public String getSummaryCountFieldName() { - return summaryCountFieldName; - } - - /** - * The list of analysis detectors. In a valid configuration the list should - * contain at least 1 {@link Detector} - * - * @return The Detectors used in this job - */ - public List getDetectors() { - return detectors; - } - - /** - * The list of influence field names - */ - public List getInfluencers() { - return influencers; - } - - public Boolean getMultivariateByFields() { - return multivariateByFields; - } - - public TimeValue getModelPruneWindow() { - return modelPruneWindow; - } - - private static void addIfNotNull(Set fields, String field) { - if (field != null) { - fields.add(field); - } - } - - public List fields() { - return collectNonNullAndNonEmptyDetectorFields(Detector::getFieldName); - } - - private List collectNonNullAndNonEmptyDetectorFields(Function fieldGetter) { - Set fields = new HashSet<>(); - - for (Detector d : getDetectors()) { - addIfNotNull(fields, fieldGetter.apply(d)); - } - - // remove empty strings - fields.remove(""); - - return new ArrayList<>(fields); - } - - public List byFields() { - return collectNonNullAndNonEmptyDetectorFields(Detector::getByFieldName); - } - - public List overFields() { - return collectNonNullAndNonEmptyDetectorFields(Detector::getOverFieldName); - } - - public List partitionFields() { - return collectNonNullAndNonEmptyDetectorFields(Detector::getPartitionFieldName); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (bucketSpan != null) { - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan.getStringRep()); - } - if (categorizationFieldName != null) { - builder.field(CATEGORIZATION_FIELD_NAME.getPreferredName(), categorizationFieldName); - } - if (categorizationFilters != null) { - builder.field(CATEGORIZATION_FILTERS.getPreferredName(), categorizationFilters); - } - if (categorizationAnalyzerConfig != null) { - // This cannot be builder.field(CATEGORIZATION_ANALYZER.getPreferredName(), categorizationAnalyzerConfig, params); - // because that always writes categorizationAnalyzerConfig as an object, and in the case of a global analyzer it - // gets written as a single string. - categorizationAnalyzerConfig.toXContent(builder, params); - } - if (perPartitionCategorizationConfig != null) { - builder.field(PER_PARTITION_CATEGORIZATION.getPreferredName(), perPartitionCategorizationConfig); - } - if (latency != null) { - builder.field(LATENCY.getPreferredName(), latency.getStringRep()); - } - if (summaryCountFieldName != null) { - builder.field(SUMMARY_COUNT_FIELD_NAME.getPreferredName(), summaryCountFieldName); - } - builder.startArray(DETECTORS.getPreferredName()); - for (Detector detector : detectors) { - detector.toXContent(builder, params); - } - builder.endArray(); - builder.field(INFLUENCERS.getPreferredName(), influencers); - if (multivariateByFields != null) { - builder.field(MULTIVARIATE_BY_FIELDS.getPreferredName(), multivariateByFields); - } - if (modelPruneWindow != null) { - builder.field(MODEL_PRUNE_WINDOW.getPreferredName(), modelPruneWindow.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - AnalysisConfig that = (AnalysisConfig) object; - return Objects.equals(latency, that.latency) - && Objects.equals(bucketSpan, that.bucketSpan) - && Objects.equals(categorizationFieldName, that.categorizationFieldName) - && Objects.equals(categorizationFilters, that.categorizationFilters) - && Objects.equals(categorizationAnalyzerConfig, that.categorizationAnalyzerConfig) - && Objects.equals(perPartitionCategorizationConfig, that.perPartitionCategorizationConfig) - && Objects.equals(summaryCountFieldName, that.summaryCountFieldName) - && Objects.equals(detectors, that.detectors) - && Objects.equals(influencers, that.influencers) - && Objects.equals(multivariateByFields, that.multivariateByFields) - && Objects.equals(modelPruneWindow, that.modelPruneWindow); - } - - @Override - public int hashCode() { - return Objects.hash( - bucketSpan, - categorizationFieldName, - categorizationFilters, - categorizationAnalyzerConfig, - perPartitionCategorizationConfig, - latency, - summaryCountFieldName, - detectors, - influencers, - multivariateByFields, - modelPruneWindow - ); - } - - public static Builder builder(List detectors) { - return new Builder(detectors); - } - - public static class Builder { - - private List detectors; - private TimeValue bucketSpan; - private TimeValue latency; - private String categorizationFieldName; - private List categorizationFilters; - private CategorizationAnalyzerConfig categorizationAnalyzerConfig; - private PerPartitionCategorizationConfig perPartitionCategorizationConfig; - private String summaryCountFieldName; - private List influencers = new ArrayList<>(); - private Boolean multivariateByFields; - private TimeValue modelPruneWindow; - - public Builder(List detectors) { - setDetectors(detectors); - } - - public Builder(AnalysisConfig analysisConfig) { - this.detectors = new ArrayList<>(analysisConfig.detectors); - this.bucketSpan = analysisConfig.bucketSpan; - this.latency = analysisConfig.latency; - this.categorizationFieldName = analysisConfig.categorizationFieldName; - this.categorizationFilters = analysisConfig.categorizationFilters == null - ? null - : new ArrayList<>(analysisConfig.categorizationFilters); - this.categorizationAnalyzerConfig = analysisConfig.categorizationAnalyzerConfig; - this.perPartitionCategorizationConfig = analysisConfig.perPartitionCategorizationConfig; - this.summaryCountFieldName = analysisConfig.summaryCountFieldName; - this.influencers = new ArrayList<>(analysisConfig.influencers); - this.multivariateByFields = analysisConfig.multivariateByFields; - this.modelPruneWindow = analysisConfig.modelPruneWindow; - } - - public Builder setDetectors(List detectors) { - Objects.requireNonNull(detectors, "[" + DETECTORS.getPreferredName() + "] must not be null"); - // We always assign sequential IDs to the detectors that are correct for this analysis config - int detectorIndex = 0; - List sequentialIndexDetectors = new ArrayList<>(detectors.size()); - for (Detector origDetector : detectors) { - Detector.Builder builder = new Detector.Builder(origDetector); - builder.setDetectorIndex(detectorIndex++); - sequentialIndexDetectors.add(builder.build()); - } - this.detectors = sequentialIndexDetectors; - return this; - } - - public Builder setDetector(int detectorIndex, Detector detector) { - detectors.set(detectorIndex, detector); - return this; - } - - public Builder setBucketSpan(TimeValue bucketSpan) { - this.bucketSpan = bucketSpan; - return this; - } - - public Builder setLatency(TimeValue latency) { - this.latency = latency; - return this; - } - - public Builder setCategorizationFieldName(String categorizationFieldName) { - this.categorizationFieldName = categorizationFieldName; - return this; - } - - public Builder setCategorizationFilters(List categorizationFilters) { - this.categorizationFilters = categorizationFilters; - return this; - } - - public Builder setCategorizationAnalyzerConfig(CategorizationAnalyzerConfig categorizationAnalyzerConfig) { - this.categorizationAnalyzerConfig = categorizationAnalyzerConfig; - return this; - } - - public Builder setPerPartitionCategorizationConfig(PerPartitionCategorizationConfig perPartitionCategorizationConfig) { - this.perPartitionCategorizationConfig = perPartitionCategorizationConfig; - return this; - } - - public Builder setSummaryCountFieldName(String summaryCountFieldName) { - this.summaryCountFieldName = summaryCountFieldName; - return this; - } - - public Builder setInfluencers(List influencers) { - this.influencers = Objects.requireNonNull(influencers, INFLUENCERS.getPreferredName()); - return this; - } - - public Builder setMultivariateByFields(Boolean multivariateByFields) { - this.multivariateByFields = multivariateByFields; - return this; - } - - public Builder setModelPruneWindow(TimeValue modelPruneWindow) { - this.modelPruneWindow = modelPruneWindow; - return this; - } - - public AnalysisConfig build() { - - return new AnalysisConfig( - bucketSpan, - categorizationFieldName, - categorizationFilters, - categorizationAnalyzerConfig, - perPartitionCategorizationConfig, - latency, - summaryCountFieldName, - detectors, - influencers, - multivariateByFields, - modelPruneWindow - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisLimits.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisLimits.java deleted file mode 100644 index f4172c843dd39..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisLimits.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Analysis limits for autodetect. In particular, - * this is a collection of parameters that allow limiting - * the resources used by the job. - */ -public class AnalysisLimits implements ToXContentObject { - - /** - * Serialisation field names - */ - public static final ParseField MODEL_MEMORY_LIMIT = new ParseField("model_memory_limit"); - public static final ParseField CATEGORIZATION_EXAMPLES_LIMIT = new ParseField("categorization_examples_limit"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "analysis_limits", - true, - a -> new AnalysisLimits((Long) a[0], (Long) a[1]) - ); - - static { - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()).getMb(); - } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return p.longValue(); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, MODEL_MEMORY_LIMIT, ObjectParser.ValueType.VALUE); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), CATEGORIZATION_EXAMPLES_LIMIT); - } - - /** - * The model memory limit in MiBs. - * It is initialised to null, which implies that the server-side default will be used. - */ - private final Long modelMemoryLimit; - - /** - * It is initialised to null. - * A value of null will result in the server-side default being used. - */ - private final Long categorizationExamplesLimit; - - public AnalysisLimits(Long categorizationExamplesLimit) { - this(null, categorizationExamplesLimit); - } - - public AnalysisLimits(Long modelMemoryLimit, Long categorizationExamplesLimit) { - this.modelMemoryLimit = modelMemoryLimit; - this.categorizationExamplesLimit = categorizationExamplesLimit; - } - - /** - * Maximum size of the model in MB before the anomaly detector - * will drop new samples to prevent the model using any more - * memory. - * - * @return The set memory limit or null if not set - */ - @Nullable - public Long getModelMemoryLimit() { - return modelMemoryLimit; - } - - /** - * Gets the limit to the number of examples that are stored per category - * - * @return the limit or null if not set - */ - @Nullable - public Long getCategorizationExamplesLimit() { - return categorizationExamplesLimit; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (modelMemoryLimit != null) { - builder.field(MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit + "mb"); - } - if (categorizationExamplesLimit != null) { - builder.field(CATEGORIZATION_EXAMPLES_LIMIT.getPreferredName(), categorizationExamplesLimit); - } - builder.endObject(); - return builder; - } - - /** - * Overridden equality test - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof AnalysisLimits == false) { - return false; - } - - AnalysisLimits that = (AnalysisLimits) other; - return Objects.equals(this.modelMemoryLimit, that.modelMemoryLimit) - && Objects.equals(this.categorizationExamplesLimit, that.categorizationExamplesLimit); - } - - @Override - public int hashCode() { - return Objects.hash(modelMemoryLimit, categorizationExamplesLimit); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/CategorizationAnalyzerConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/CategorizationAnalyzerConfig.java deleted file mode 100644 index 8782bacc83f32..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/CategorizationAnalyzerConfig.java +++ /dev/null @@ -1,347 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.action.admin.indices.RestAnalyzeAction; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentFragment; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * Configuration for the categorization analyzer. - * - * The syntax is a subset of what can be supplied to the {@linkplain RestAnalyzeAction _analyze endpoint}. - * To summarize, the first option is to specify the name of an out-of-the-box analyzer: - * - * "categorization_analyzer" : "standard" - * - * - * The second option is to specify a custom analyzer by combining the char_filters, tokenizer - * and token_filters fields. In turn, each of these can be specified as the name of an out-of-the-box - * one or as an object defining a custom one. For example: - * - * "char_filters" : [ - * "html_strip", - * { "type" : "pattern_replace", "pattern": "SQL: .*" } - * ], - * "tokenizer" : "thai", - * "token_filters" : [ - * "lowercase", - * { "type" : "pattern_replace", "pattern": "^[0-9].*" } - * ] - * - */ -public class CategorizationAnalyzerConfig implements ToXContentFragment { - - public static final ParseField CATEGORIZATION_ANALYZER = new ParseField("categorization_analyzer"); - private static final ParseField TOKENIZER = AnalyzeAction.Fields.TOKENIZER; - private static final ParseField TOKEN_FILTERS = AnalyzeAction.Fields.TOKEN_FILTERS; - private static final ParseField CHAR_FILTERS = AnalyzeAction.Fields.CHAR_FILTERS; - - /** - * This method is only used in the unit tests - in production code this config is always parsed as a fragment. - */ - static CategorizationAnalyzerConfig buildFromXContentObject(XContentParser parser) throws IOException { - - if (parser.nextToken() != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("Expected start object but got [" + parser.currentToken() + "]"); - } - if (parser.nextToken() != XContentParser.Token.FIELD_NAME) { - throw new IllegalArgumentException("Expected field name but got [" + parser.currentToken() + "]"); - } - parser.nextToken(); - CategorizationAnalyzerConfig categorizationAnalyzerConfig = buildFromXContentFragment(parser); - parser.nextToken(); - return categorizationAnalyzerConfig; - } - - /** - * Parse a categorization_analyzer configuration. A custom parser is needed due to the - * complexity of the format, with many elements able to be specified as either the name of a built-in - * element or an object containing a custom definition. - */ - static CategorizationAnalyzerConfig buildFromXContentFragment(XContentParser parser) throws IOException { - - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(); - - XContentParser.Token token = parser.currentToken(); - if (token == XContentParser.Token.VALUE_STRING) { - builder.setAnalyzer(parser.text()); - } else if (token != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("[" + CATEGORIZATION_ANALYZER + "] should be analyzer's name or settings [" + token + "]"); - } else { - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (CHAR_FILTERS.match(currentFieldName, parser.getDeprecationHandler()) - && token == XContentParser.Token.START_ARRAY) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.VALUE_STRING) { - builder.addCharFilter(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - builder.addCharFilter(parser.map()); - } else { - throw new IllegalArgumentException( - "[" - + currentFieldName - + "] in [" - + CATEGORIZATION_ANALYZER - + "] array element should contain char_filter's name or settings [" - + token - + "]" - ); - } - } - } else if (TOKENIZER.match(currentFieldName, parser.getDeprecationHandler())) { - if (token == XContentParser.Token.VALUE_STRING) { - builder.setTokenizer(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - builder.setTokenizer(parser.map()); - } else { - throw new IllegalArgumentException( - "[" - + currentFieldName - + "] in [" - + CATEGORIZATION_ANALYZER - + "] should be tokenizer's name or settings [" - + token - + "]" - ); - } - } else if (TOKEN_FILTERS.match(currentFieldName, parser.getDeprecationHandler()) - && token == XContentParser.Token.START_ARRAY) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.VALUE_STRING) { - builder.addTokenFilter(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - builder.addTokenFilter(parser.map()); - } else { - throw new IllegalArgumentException( - "[" - + currentFieldName - + "] in [" - + CATEGORIZATION_ANALYZER - + "] array element should contain token_filter's name or settings [" - + token - + "]" - ); - } - } - } - } - } - - return builder.build(); - } - - /** - * Simple store of either a name of a built-in analyzer element or a custom definition. - */ - public static final class NameOrDefinition implements ToXContentFragment { - - // Exactly one of these two members is not null - public final String name; - public final Settings definition; - - NameOrDefinition(String name) { - this.name = Objects.requireNonNull(name); - this.definition = null; - } - - NameOrDefinition(ParseField field, Map definition) { - this.name = null; - Objects.requireNonNull(definition); - try { - this.definition = Settings.builder().loadFromMap(definition).build(); - } catch (Exception e) { - throw new IllegalArgumentException("Failed to parse [" + definition + "] in [" + field.getPreferredName() + "]", e); - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (definition == null) { - builder.value(name); - } else { - builder.startObject(); - definition.toXContent(builder, params); - builder.endObject(); - } - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - NameOrDefinition that = (NameOrDefinition) o; - return Objects.equals(name, that.name) && Objects.equals(definition, that.definition); - } - - @Override - public int hashCode() { - return Objects.hash(name, definition); - } - - @Override - public String toString() { - if (definition == null) { - return name; - } else { - return definition.toDelimitedString(';'); - } - } - } - - private final String analyzer; - private final List charFilters; - private final NameOrDefinition tokenizer; - private final List tokenFilters; - - private CategorizationAnalyzerConfig( - String analyzer, - List charFilters, - NameOrDefinition tokenizer, - List tokenFilters - ) { - this.analyzer = analyzer; - this.charFilters = Collections.unmodifiableList(charFilters); - this.tokenizer = tokenizer; - this.tokenFilters = Collections.unmodifiableList(tokenFilters); - } - - public String getAnalyzer() { - return analyzer; - } - - public List getCharFilters() { - return charFilters; - } - - public NameOrDefinition getTokenizer() { - return tokenizer; - } - - public List getTokenFilters() { - return tokenFilters; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (analyzer != null) { - builder.field(CATEGORIZATION_ANALYZER.getPreferredName(), analyzer); - } else { - builder.startObject(CATEGORIZATION_ANALYZER.getPreferredName()); - if (charFilters.isEmpty() == false) { - builder.startArray(CHAR_FILTERS.getPreferredName()); - for (NameOrDefinition charFilter : charFilters) { - charFilter.toXContent(builder, params); - } - builder.endArray(); - } - if (tokenizer != null) { - builder.field(TOKENIZER.getPreferredName(), tokenizer); - } - if (tokenFilters.isEmpty() == false) { - builder.startArray(TOKEN_FILTERS.getPreferredName()); - for (NameOrDefinition tokenFilter : tokenFilters) { - tokenFilter.toXContent(builder, params); - } - builder.endArray(); - } - builder.endObject(); - } - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CategorizationAnalyzerConfig that = (CategorizationAnalyzerConfig) o; - return Objects.equals(analyzer, that.analyzer) - && Objects.equals(charFilters, that.charFilters) - && Objects.equals(tokenizer, that.tokenizer) - && Objects.equals(tokenFilters, that.tokenFilters); - } - - @Override - public int hashCode() { - return Objects.hash(analyzer, charFilters, tokenizer, tokenFilters); - } - - public static class Builder { - - private String analyzer; - private List charFilters = new ArrayList<>(); - private NameOrDefinition tokenizer; - private List tokenFilters = new ArrayList<>(); - - public Builder() {} - - public Builder(CategorizationAnalyzerConfig categorizationAnalyzerConfig) { - this.analyzer = categorizationAnalyzerConfig.analyzer; - this.charFilters = new ArrayList<>(categorizationAnalyzerConfig.charFilters); - this.tokenizer = categorizationAnalyzerConfig.tokenizer; - this.tokenFilters = new ArrayList<>(categorizationAnalyzerConfig.tokenFilters); - } - - public Builder setAnalyzer(String analyzer) { - this.analyzer = analyzer; - return this; - } - - public Builder addCharFilter(String charFilter) { - this.charFilters.add(new NameOrDefinition(charFilter)); - return this; - } - - public Builder addCharFilter(Map charFilter) { - this.charFilters.add(new NameOrDefinition(CHAR_FILTERS, charFilter)); - return this; - } - - public Builder setTokenizer(String tokenizer) { - this.tokenizer = new NameOrDefinition(tokenizer); - return this; - } - - public Builder setTokenizer(Map tokenizer) { - this.tokenizer = new NameOrDefinition(TOKENIZER, tokenizer); - return this; - } - - public Builder addTokenFilter(String tokenFilter) { - this.tokenFilters.add(new NameOrDefinition(tokenFilter)); - return this; - } - - public Builder addTokenFilter(Map tokenFilter) { - this.tokenFilters.add(new NameOrDefinition(TOKEN_FILTERS, tokenFilter)); - return this; - } - - /** - * Create a config - */ - public CategorizationAnalyzerConfig build() { - return new CategorizationAnalyzerConfig(analyzer, charFilters, tokenizer, tokenFilters); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DataDescription.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DataDescription.java deleted file mode 100644 index d460cf9bd81a4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DataDescription.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -/** - * Describes the format of the data used in the job and how it should - * be interpreted by the ML job. - *

    - * {@link #getTimeField()} is the name of the field containing the timestamp and - * {@link #getTimeFormat()} is the format code for the date string in as described by - * {@link java.time.format.DateTimeFormatter}. - */ -public class DataDescription implements ToXContentObject { - /** - * Enum of the acceptable data formats. - */ - public enum DataFormat { - XCONTENT; - - /** - * Case-insensitive from string method. - * Works with either XCONTENT, XContent, etc. - * - * @param value String representation - * @return The data format - */ - public static DataFormat forString(String value) { - return DataFormat.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - private static final ParseField DATA_DESCRIPTION_FIELD = new ParseField("data_description"); - private static final ParseField TIME_FIELD_NAME_FIELD = new ParseField("time_field"); - private static final ParseField TIME_FORMAT_FIELD = new ParseField("time_format"); - - /** - * Special time format string for epoch times (seconds) - */ - public static final String EPOCH = "epoch"; - - /** - * Special time format string for epoch times (milli-seconds) - */ - public static final String EPOCH_MS = "epoch_ms"; - - /** - * By default autodetect expects the timestamp in a field with this name - */ - public static final String DEFAULT_TIME_FIELD = "time"; - - private final String timeFieldName; - private final String timeFormat; - - public static final ObjectParser PARSER = new ObjectParser<>( - DATA_DESCRIPTION_FIELD.getPreferredName(), - true, - Builder::new - ); - - static { - PARSER.declareString(Builder::setTimeField, TIME_FIELD_NAME_FIELD); - PARSER.declareString(Builder::setTimeFormat, TIME_FORMAT_FIELD); - } - - public DataDescription(String timeFieldName, String timeFormat) { - this.timeFieldName = timeFieldName; - this.timeFormat = timeFormat; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(TIME_FIELD_NAME_FIELD.getPreferredName(), timeFieldName); - builder.field(TIME_FORMAT_FIELD.getPreferredName(), timeFormat); - builder.endObject(); - return builder; - } - - /** - * The format of the data to be processed. - * Always {@link DataDescription.DataFormat#XCONTENT} - * - * @return The data format - */ - public DataFormat getFormat() { - return DataFormat.XCONTENT; - } - - /** - * The name of the field containing the timestamp - * - * @return A String if set or null - */ - public String getTimeField() { - return timeFieldName; - } - - /** - * Either {@value #EPOCH}, {@value #EPOCH_MS} or a SimpleDateTime format string. - * If not set (is null or an empty string) or set to - * {@value #EPOCH_MS} (the default) then the date is assumed to be in - * milliseconds from the epoch. - * - * @return A String if set or null - */ - public String getTimeFormat() { - return timeFormat; - } - - /** - * Overridden equality test - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof DataDescription == false) { - return false; - } - - DataDescription that = (DataDescription) other; - - return Objects.equals(this.timeFieldName, that.timeFieldName) && Objects.equals(this.timeFormat, that.timeFormat); - } - - @Override - public int hashCode() { - return Objects.hash(timeFieldName, timeFormat); - } - - public static class Builder { - - private String timeFieldName = DEFAULT_TIME_FIELD; - private String timeFormat = EPOCH_MS; - - public Builder setFormat(DataFormat format) { - Objects.requireNonNull(format); - return this; - } - - public Builder setTimeField(String fieldName) { - timeFieldName = Objects.requireNonNull(fieldName); - return this; - } - - public Builder setTimeFormat(String format) { - timeFormat = Objects.requireNonNull(format); - return this; - } - - public DataDescription build() { - return new DataDescription(timeFieldName, timeFormat); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DefaultDetectorDescription.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DefaultDetectorDescription.java deleted file mode 100644 index 66ea72f928d54..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DefaultDetectorDescription.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.common.Strings; - -public final class DefaultDetectorDescription { - private static final String BY_TOKEN = " by "; - private static final String OVER_TOKEN = " over "; - - private static final String USE_NULL_OPTION = " usenull="; - private static final String PARTITION_FIELD_OPTION = " partitionfield="; - private static final String EXCLUDE_FREQUENT_OPTION = " excludefrequent="; - - private DefaultDetectorDescription() {} - - /** - * Returns the default description for the given {@code detector} - * - * @param detector the {@code Detector} for which a default description is requested - * @return the default description - */ - public static String of(Detector detector) { - StringBuilder sb = new StringBuilder(); - appendOn(detector, sb); - return sb.toString(); - } - - /** - * Appends to the given {@code StringBuilder} the default description - * for the given {@code detector} - * - * @param detector the {@code Detector} for which a default description is requested - * @param sb the {@code StringBuilder} to append to - */ - public static void appendOn(Detector detector, StringBuilder sb) { - if (isNotNullOrEmpty(detector.getFunction().getFullName())) { - sb.append(detector.getFunction()); - if (isNotNullOrEmpty(detector.getFieldName())) { - sb.append('(').append(quoteField(detector.getFieldName())).append(')'); - } - } else if (isNotNullOrEmpty(detector.getFieldName())) { - sb.append(quoteField(detector.getFieldName())); - } - - if (isNotNullOrEmpty(detector.getByFieldName())) { - sb.append(BY_TOKEN).append(quoteField(detector.getByFieldName())); - } - - if (isNotNullOrEmpty(detector.getOverFieldName())) { - sb.append(OVER_TOKEN).append(quoteField(detector.getOverFieldName())); - } - - if (detector.isUseNull()) { - sb.append(USE_NULL_OPTION).append(detector.isUseNull()); - } - - if (isNotNullOrEmpty(detector.getPartitionFieldName())) { - sb.append(PARTITION_FIELD_OPTION).append(quoteField(detector.getPartitionFieldName())); - } - - if (detector.getExcludeFrequent() != null) { - sb.append(EXCLUDE_FREQUENT_OPTION).append(detector.getExcludeFrequent()); - } - } - - private static String quoteField(String field) { - if (field.matches("\\w*")) { - return field; - } else { - return "\"" + field.replace("\\", "\\\\").replace("\"", "\\\"") + "\""; - } - } - - private static boolean isNotNullOrEmpty(String arg) { - return Strings.isNullOrEmpty(arg) == false; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectionRule.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectionRule.java deleted file mode 100644 index e23cad0c024aa..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectionRule.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.EnumSet; -import java.util.List; -import java.util.Objects; - -public class DetectionRule implements ToXContentObject { - - public static final ParseField DETECTION_RULE_FIELD = new ParseField("detection_rule"); - public static final ParseField ACTIONS_FIELD = new ParseField("actions"); - public static final ParseField SCOPE_FIELD = new ParseField("scope"); - public static final ParseField CONDITIONS_FIELD = new ParseField("conditions"); - - public static final ObjectParser PARSER = new ObjectParser<>( - DETECTION_RULE_FIELD.getPreferredName(), - true, - Builder::new - ); - - static { - PARSER.declareStringArray(Builder::setActions, ACTIONS_FIELD); - PARSER.declareObject(Builder::setScope, RuleScope.parser(), SCOPE_FIELD); - PARSER.declareObjectArray(Builder::setConditions, RuleCondition.PARSER, CONDITIONS_FIELD); - } - - private final EnumSet actions; - private final RuleScope scope; - private final List conditions; - - private DetectionRule(EnumSet actions, RuleScope scope, List conditions) { - this.actions = Objects.requireNonNull(actions); - this.scope = Objects.requireNonNull(scope); - this.conditions = Collections.unmodifiableList(conditions); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ACTIONS_FIELD.getPreferredName(), actions); - if (scope.isEmpty() == false) { - builder.field(SCOPE_FIELD.getPreferredName(), scope); - } - if (conditions.isEmpty() == false) { - builder.field(CONDITIONS_FIELD.getPreferredName(), conditions); - } - builder.endObject(); - return builder; - } - - public EnumSet getActions() { - return actions; - } - - public RuleScope getScope() { - return scope; - } - - public List getConditions() { - return conditions; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj instanceof DetectionRule == false) { - return false; - } - - DetectionRule other = (DetectionRule) obj; - return Objects.equals(actions, other.actions) && Objects.equals(scope, other.scope) && Objects.equals(conditions, other.conditions); - } - - @Override - public int hashCode() { - return Objects.hash(actions, scope, conditions); - } - - public static class Builder { - private EnumSet actions = EnumSet.of(RuleAction.SKIP_RESULT); - private RuleScope scope = new RuleScope(); - private List conditions = Collections.emptyList(); - - public Builder(RuleScope.Builder scope) { - this.scope = scope.build(); - } - - public Builder(List conditions) { - this.conditions = Objects.requireNonNull(conditions); - } - - Builder() {} - - public Builder setActions(List actions) { - this.actions.clear(); - actions.stream().map(RuleAction::fromString).forEach(this.actions::add); - return this; - } - - public Builder setActions(EnumSet actions) { - this.actions = Objects.requireNonNull(actions, ACTIONS_FIELD.getPreferredName()); - return this; - } - - public Builder setActions(RuleAction... actions) { - this.actions.clear(); - Arrays.stream(actions).forEach(this.actions::add); - return this; - } - - public Builder setScope(RuleScope scope) { - this.scope = Objects.requireNonNull(scope); - return this; - } - - public Builder setConditions(List conditions) { - this.conditions = Objects.requireNonNull(conditions); - return this; - } - - public DetectionRule build() { - return new DetectionRule(actions, scope, conditions); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java deleted file mode 100644 index f20d67a238008..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java +++ /dev/null @@ -1,377 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Locale; -import java.util.Objects; - -/** - * Defines the fields and functions used in the analysis. A combination of field_name, - * by_field_name and over_field_name can be used depending on the specific - * function chosen. For more information see the - * create anomaly detection - * jobs API and detector functions. - */ -public class Detector implements ToXContentObject { - - public enum ExcludeFrequent { - ALL, - NONE, - BY, - OVER; - - /** - * Case-insensitive from string method. - * Works with either ALL, All, etc. - * - * @param value String representation - * @return The data format - */ - public static ExcludeFrequent forString(String value) { - return valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - public static final ParseField DETECTOR_DESCRIPTION_FIELD = new ParseField("detector_description"); - public static final ParseField FUNCTION_FIELD = new ParseField("function"); - public static final ParseField FIELD_NAME_FIELD = new ParseField("field_name"); - public static final ParseField BY_FIELD_NAME_FIELD = new ParseField("by_field_name"); - public static final ParseField OVER_FIELD_NAME_FIELD = new ParseField("over_field_name"); - public static final ParseField PARTITION_FIELD_NAME_FIELD = new ParseField("partition_field_name"); - public static final ParseField USE_NULL_FIELD = new ParseField("use_null"); - public static final ParseField EXCLUDE_FREQUENT_FIELD = new ParseField("exclude_frequent"); - public static final ParseField CUSTOM_RULES_FIELD = new ParseField("custom_rules"); - public static final ParseField DETECTOR_INDEX = new ParseField("detector_index"); - - public static final ObjectParser PARSER = new ObjectParser<>("detector", true, Builder::new); - - static { - PARSER.declareString(Builder::setDetectorDescription, DETECTOR_DESCRIPTION_FIELD); - PARSER.declareString(Builder::setFunction, FUNCTION_FIELD); - PARSER.declareString(Builder::setFieldName, FIELD_NAME_FIELD); - PARSER.declareString(Builder::setByFieldName, BY_FIELD_NAME_FIELD); - PARSER.declareString(Builder::setOverFieldName, OVER_FIELD_NAME_FIELD); - PARSER.declareString(Builder::setPartitionFieldName, PARTITION_FIELD_NAME_FIELD); - PARSER.declareBoolean(Builder::setUseNull, USE_NULL_FIELD); - PARSER.declareString(Builder::setExcludeFrequent, ExcludeFrequent::forString, EXCLUDE_FREQUENT_FIELD); - PARSER.declareObjectArray(Builder::setRules, (p, c) -> DetectionRule.PARSER.apply(p, c).build(), CUSTOM_RULES_FIELD); - PARSER.declareInt(Builder::setDetectorIndex, DETECTOR_INDEX); - } - - private final String detectorDescription; - private final DetectorFunction function; - private final String fieldName; - private final String byFieldName; - private final String overFieldName; - private final String partitionFieldName; - private final boolean useNull; - private final ExcludeFrequent excludeFrequent; - private final List rules; - private final int detectorIndex; - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DETECTOR_DESCRIPTION_FIELD.getPreferredName(), detectorDescription); - builder.field(FUNCTION_FIELD.getPreferredName(), function); - if (fieldName != null) { - builder.field(FIELD_NAME_FIELD.getPreferredName(), fieldName); - } - if (byFieldName != null) { - builder.field(BY_FIELD_NAME_FIELD.getPreferredName(), byFieldName); - } - if (overFieldName != null) { - builder.field(OVER_FIELD_NAME_FIELD.getPreferredName(), overFieldName); - } - if (partitionFieldName != null) { - builder.field(PARTITION_FIELD_NAME_FIELD.getPreferredName(), partitionFieldName); - } - if (useNull) { - builder.field(USE_NULL_FIELD.getPreferredName(), useNull); - } - if (excludeFrequent != null) { - builder.field(EXCLUDE_FREQUENT_FIELD.getPreferredName(), excludeFrequent); - } - if (rules.isEmpty() == false) { - builder.field(CUSTOM_RULES_FIELD.getPreferredName(), rules); - } - // negative means unknown - if (detectorIndex >= 0) { - builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex); - } - builder.endObject(); - return builder; - } - - private Detector( - String detectorDescription, - DetectorFunction function, - String fieldName, - String byFieldName, - String overFieldName, - String partitionFieldName, - boolean useNull, - ExcludeFrequent excludeFrequent, - List rules, - int detectorIndex - ) { - this.function = function; - this.fieldName = fieldName; - this.byFieldName = byFieldName; - this.overFieldName = overFieldName; - this.partitionFieldName = partitionFieldName; - this.useNull = useNull; - this.excludeFrequent = excludeFrequent; - this.rules = Collections.unmodifiableList(rules); - this.detectorDescription = detectorDescription != null ? detectorDescription : DefaultDetectorDescription.of(this); - this.detectorIndex = detectorIndex; - } - - public String getDetectorDescription() { - return detectorDescription; - } - - /** - * The analysis function used e.g. count, rare, min etc. - * - * @return The function or null if not set - */ - public DetectorFunction getFunction() { - return function; - } - - /** - * The Analysis field - * - * @return The field to analyse - */ - public String getFieldName() { - return fieldName; - } - - /** - * The 'by' field or null if not set. - * - * @return The 'by' field - */ - public String getByFieldName() { - return byFieldName; - } - - /** - * The 'over' field or null if not set. - * - * @return The 'over' field - */ - public String getOverFieldName() { - return overFieldName; - } - - /** - * Segments the analysis along another field to have completely - * independent baselines for each instance of partitionfield - * - * @return The Partition Field - */ - public String getPartitionFieldName() { - return partitionFieldName; - } - - /** - * Where there isn't a value for the 'by' or 'over' field should a new - * series be used as the 'null' series. - * - * @return true if the 'null' series should be created - */ - public boolean isUseNull() { - return useNull; - } - - /** - * Excludes frequently-occurring metrics from the analysis; - * can apply to 'by' field, 'over' field, or both - * - * @return the value that the user set - */ - public ExcludeFrequent getExcludeFrequent() { - return excludeFrequent; - } - - public List getRules() { - return rules; - } - - /** - * @return the detector index or a negative number if unknown - */ - public int getDetectorIndex() { - return detectorIndex; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof Detector == false) { - return false; - } - - Detector that = (Detector) other; - - return Objects.equals(this.detectorDescription, that.detectorDescription) - && Objects.equals(this.function, that.function) - && Objects.equals(this.fieldName, that.fieldName) - && Objects.equals(this.byFieldName, that.byFieldName) - && Objects.equals(this.overFieldName, that.overFieldName) - && Objects.equals(this.partitionFieldName, that.partitionFieldName) - && Objects.equals(this.useNull, that.useNull) - && Objects.equals(this.excludeFrequent, that.excludeFrequent) - && Objects.equals(this.rules, that.rules) - && this.detectorIndex == that.detectorIndex; - } - - @Override - public int hashCode() { - return Objects.hash( - detectorDescription, - function, - fieldName, - byFieldName, - overFieldName, - partitionFieldName, - useNull, - excludeFrequent, - rules, - detectorIndex - ); - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - - private String detectorDescription; - private DetectorFunction function; - private String fieldName; - private String byFieldName; - private String overFieldName; - private String partitionFieldName; - private boolean useNull = false; - private ExcludeFrequent excludeFrequent; - private List rules = Collections.emptyList(); - // negative means unknown - private int detectorIndex = -1; - - public Builder() {} - - public Builder(Detector detector) { - detectorDescription = detector.detectorDescription; - function = detector.function; - fieldName = detector.fieldName; - byFieldName = detector.byFieldName; - overFieldName = detector.overFieldName; - partitionFieldName = detector.partitionFieldName; - useNull = detector.useNull; - excludeFrequent = detector.excludeFrequent; - rules = new ArrayList<>(detector.rules); - detectorIndex = detector.detectorIndex; - } - - public Builder(String function, String fieldName) { - this(DetectorFunction.fromString(function), fieldName); - } - - public Builder(DetectorFunction function, String fieldName) { - this.function = function; - this.fieldName = fieldName; - } - - public Builder setDetectorDescription(String detectorDescription) { - this.detectorDescription = detectorDescription; - return this; - } - - public Builder setFunction(String function) { - this.function = DetectorFunction.fromString(function); - return this; - } - - public Builder setFieldName(String fieldName) { - this.fieldName = fieldName; - return this; - } - - public Builder setByFieldName(String byFieldName) { - this.byFieldName = byFieldName; - return this; - } - - public Builder setOverFieldName(String overFieldName) { - this.overFieldName = overFieldName; - return this; - } - - public Builder setPartitionFieldName(String partitionFieldName) { - this.partitionFieldName = partitionFieldName; - return this; - } - - public Builder setUseNull(boolean useNull) { - this.useNull = useNull; - return this; - } - - public Builder setExcludeFrequent(ExcludeFrequent excludeFrequent) { - this.excludeFrequent = excludeFrequent; - return this; - } - - public Builder setRules(List rules) { - this.rules = rules; - return this; - } - - public Builder setDetectorIndex(int detectorIndex) { - this.detectorIndex = detectorIndex; - return this; - } - - public Detector build() { - return new Detector( - detectorDescription, - function, - fieldName, - byFieldName, - overFieldName, - partitionFieldName, - useNull, - excludeFrequent, - rules, - detectorIndex - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectorFunction.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectorFunction.java deleted file mode 100644 index c33ffffd34f1a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectorFunction.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import java.util.Arrays; -import java.util.Collections; -import java.util.Locale; -import java.util.Set; -import java.util.stream.Collectors; - -public enum DetectorFunction { - - COUNT, - LOW_COUNT, - HIGH_COUNT, - NON_ZERO_COUNT("nzc"), - LOW_NON_ZERO_COUNT("low_nzc"), - HIGH_NON_ZERO_COUNT("high_nzc"), - DISTINCT_COUNT("dc"), - LOW_DISTINCT_COUNT("low_dc"), - HIGH_DISTINCT_COUNT("high_dc"), - RARE, - FREQ_RARE, - INFO_CONTENT, - LOW_INFO_CONTENT, - HIGH_INFO_CONTENT, - METRIC, - MEAN, - LOW_MEAN, - HIGH_MEAN, - AVG, - LOW_AVG, - HIGH_AVG, - MEDIAN, - LOW_MEDIAN, - HIGH_MEDIAN, - MIN, - MAX, - SUM, - LOW_SUM, - HIGH_SUM, - NON_NULL_SUM, - LOW_NON_NULL_SUM, - HIGH_NON_NULL_SUM, - VARP, - LOW_VARP, - HIGH_VARP, - TIME_OF_DAY, - TIME_OF_WEEK, - LAT_LONG; - - private Set shortcuts; - - DetectorFunction() { - shortcuts = Collections.emptySet(); - } - - DetectorFunction(String... shortcuts) { - this.shortcuts = Arrays.stream(shortcuts).collect(Collectors.toSet()); - } - - public String getFullName() { - return name().toLowerCase(Locale.ROOT); - } - - @Override - public String toString() { - return getFullName(); - } - - public static DetectorFunction fromString(String op) { - for (DetectorFunction function : values()) { - if (function.getFullName().equals(op) || function.shortcuts.contains(op)) { - return function; - } - } - throw new IllegalArgumentException("Unknown detector function [" + op + "]"); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/FilterRef.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/FilterRef.java deleted file mode 100644 index e9c0fbece98c3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/FilterRef.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -public class FilterRef implements ToXContentObject { - - public static final ParseField FILTER_REF_FIELD = new ParseField("filter_ref"); - public static final ParseField FILTER_ID = new ParseField("filter_id"); - public static final ParseField FILTER_TYPE = new ParseField("filter_type"); - - public enum FilterType { - INCLUDE, - EXCLUDE; - - public static FilterType fromString(String value) { - return valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - FILTER_REF_FIELD.getPreferredName(), - true, - a -> new FilterRef((String) a[0], (FilterType) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FILTER_ID); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FilterType::fromString, FILTER_TYPE); - } - - private final String filterId; - private final FilterType filterType; - - public FilterRef(String filterId, FilterType filterType) { - this.filterId = Objects.requireNonNull(filterId); - this.filterType = filterType == null ? FilterType.INCLUDE : filterType; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FILTER_ID.getPreferredName(), filterId); - builder.field(FILTER_TYPE.getPreferredName(), filterType); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj instanceof FilterRef == false) { - return false; - } - - FilterRef other = (FilterRef) obj; - return Objects.equals(filterId, other.filterId) && Objects.equals(filterType, other.filterType); - } - - @Override - public int hashCode() { - return Objects.hash(filterId, filterType); - } - - public String getFilterId() { - return filterId; - } - - public FilterType getFilterType() { - return filterType; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java deleted file mode 100644 index cfea39be07735..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java +++ /dev/null @@ -1,627 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * This class represents a configured and created Job. The creation time is set - * to the time the object was constructed and the finished time and last - * data time fields are {@code null} until the job has seen some data or it is - * finished respectively. - */ -public class Job implements ToXContentObject { - - public static final String ANOMALY_DETECTOR_JOB_TYPE = "anomaly_detector"; - - /* - * Field names used in serialization - */ - public static final ParseField ID = new ParseField("job_id"); - public static final ParseField JOB_TYPE = new ParseField("job_type"); - public static final ParseField GROUPS = new ParseField("groups"); - public static final ParseField ANALYSIS_CONFIG = AnalysisConfig.ANALYSIS_CONFIG; - public static final ParseField ANALYSIS_LIMITS = new ParseField("analysis_limits"); - public static final ParseField CREATE_TIME = new ParseField("create_time"); - public static final ParseField CUSTOM_SETTINGS = new ParseField("custom_settings"); - public static final ParseField DATA_DESCRIPTION = new ParseField("data_description"); - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField FINISHED_TIME = new ParseField("finished_time"); - public static final ParseField MODEL_PLOT_CONFIG = new ParseField("model_plot_config"); - public static final ParseField RENORMALIZATION_WINDOW_DAYS = new ParseField("renormalization_window_days"); - public static final ParseField BACKGROUND_PERSIST_INTERVAL = new ParseField("background_persist_interval"); - public static final ParseField MODEL_SNAPSHOT_RETENTION_DAYS = new ParseField("model_snapshot_retention_days"); - public static final ParseField DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS = new ParseField("daily_model_snapshot_retention_after_days"); - public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days"); - public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id"); - public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name"); - public static final ParseField DELETING = new ParseField("deleting"); - public static final ParseField ALLOW_LAZY_OPEN = new ParseField("allow_lazy_open"); - - public static final ObjectParser PARSER = new ObjectParser<>("job_details", true, Builder::new); - - static { - PARSER.declareString(Builder::setId, ID); - PARSER.declareString(Builder::setJobType, JOB_TYPE); - PARSER.declareStringArray(Builder::setGroups, GROUPS); - PARSER.declareStringOrNull(Builder::setDescription, DESCRIPTION); - PARSER.declareField( - Builder::setCreateTime, - (p) -> TimeUtil.parseTimeField(p, CREATE_TIME.getPreferredName()), - CREATE_TIME, - ValueType.VALUE - ); - PARSER.declareField( - Builder::setFinishedTime, - (p) -> TimeUtil.parseTimeField(p, FINISHED_TIME.getPreferredName()), - FINISHED_TIME, - ValueType.VALUE - ); - PARSER.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSER, ANALYSIS_CONFIG); - PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, ANALYSIS_LIMITS); - PARSER.declareObject(Builder::setDataDescription, DataDescription.PARSER, DATA_DESCRIPTION); - PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSER, MODEL_PLOT_CONFIG); - PARSER.declareLong(Builder::setRenormalizationWindowDays, RENORMALIZATION_WINDOW_DAYS); - PARSER.declareString( - (builder, val) -> builder.setBackgroundPersistInterval( - TimeValue.parseTimeValue(val, BACKGROUND_PERSIST_INTERVAL.getPreferredName()) - ), - BACKGROUND_PERSIST_INTERVAL - ); - PARSER.declareLong(Builder::setResultsRetentionDays, RESULTS_RETENTION_DAYS); - PARSER.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS); - PARSER.declareLong(Builder::setDailyModelSnapshotRetentionAfterDays, DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS); - PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.mapOrdered(), CUSTOM_SETTINGS, ValueType.OBJECT); - PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); - PARSER.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); - PARSER.declareBoolean(Builder::setDeleting, DELETING); - PARSER.declareBoolean(Builder::setAllowLazyOpen, ALLOW_LAZY_OPEN); - } - - private final String jobId; - private final String jobType; - - private final List groups; - private final String description; - private final Date createTime; - private final Date finishedTime; - private final AnalysisConfig analysisConfig; - private final AnalysisLimits analysisLimits; - private final DataDescription dataDescription; - private final ModelPlotConfig modelPlotConfig; - private final Long renormalizationWindowDays; - private final TimeValue backgroundPersistInterval; - private final Long modelSnapshotRetentionDays; - private final Long dailyModelSnapshotRetentionAfterDays; - private final Long resultsRetentionDays; - private final Map customSettings; - private final String modelSnapshotId; - private final String resultsIndexName; - private final Boolean deleting; - private final Boolean allowLazyOpen; - - private Job( - String jobId, - String jobType, - List groups, - String description, - Date createTime, - Date finishedTime, - AnalysisConfig analysisConfig, - AnalysisLimits analysisLimits, - DataDescription dataDescription, - ModelPlotConfig modelPlotConfig, - Long renormalizationWindowDays, - TimeValue backgroundPersistInterval, - Long modelSnapshotRetentionDays, - Long dailyModelSnapshotRetentionAfterDays, - Long resultsRetentionDays, - Map customSettings, - String modelSnapshotId, - String resultsIndexName, - Boolean deleting, - Boolean allowLazyOpen - ) { - - this.jobId = jobId; - this.jobType = jobType; - this.groups = Collections.unmodifiableList(groups); - this.description = description; - this.createTime = createTime; - this.finishedTime = finishedTime; - this.analysisConfig = analysisConfig; - this.analysisLimits = analysisLimits; - this.dataDescription = dataDescription; - this.modelPlotConfig = modelPlotConfig; - this.renormalizationWindowDays = renormalizationWindowDays; - this.backgroundPersistInterval = backgroundPersistInterval; - this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; - this.dailyModelSnapshotRetentionAfterDays = dailyModelSnapshotRetentionAfterDays; - this.resultsRetentionDays = resultsRetentionDays; - this.customSettings = customSettings == null ? null : Collections.unmodifiableMap(customSettings); - this.modelSnapshotId = modelSnapshotId; - this.resultsIndexName = resultsIndexName; - this.deleting = deleting; - this.allowLazyOpen = allowLazyOpen; - } - - /** - * Return the Job Id. - * - * @return The job Id string - */ - public String getId() { - return jobId; - } - - public String getJobType() { - return jobType; - } - - public List getGroups() { - return groups; - } - - /** - * Private version of getResultsIndexName so that a job can be built from another - * job and pass index name validation - * - * @return The job's index name, minus prefix - */ - private String getResultsIndexNameNoPrefix() { - return resultsIndexName; - } - - /** - * The job description - * - * @return job description - */ - public String getDescription() { - return description; - } - - /** - * The Job creation time. This name is preferred when serialising to the - * REST API. - * - * @return The date the job was created - */ - public Date getCreateTime() { - return createTime; - } - - /** - * The time the job was finished or null if not finished. - * - * @return The date the job was last retired or null - */ - public Date getFinishedTime() { - return finishedTime; - } - - /** - * The analysis configuration object - * - * @return The AnalysisConfig - */ - public AnalysisConfig getAnalysisConfig() { - return analysisConfig; - } - - /** - * The analysis options object - * - * @return The AnalysisLimits - */ - public AnalysisLimits getAnalysisLimits() { - return analysisLimits; - } - - public ModelPlotConfig getModelPlotConfig() { - return modelPlotConfig; - } - - /** - * If not set the input data is assumed to be csv with a '_time' field in - * epoch format. - * - * @return A DataDescription or null - * @see DataDescription - */ - public DataDescription getDataDescription() { - return dataDescription; - } - - /** - * The duration of the renormalization window in days - * - * @return renormalization window in days - */ - public Long getRenormalizationWindowDays() { - return renormalizationWindowDays; - } - - /** - * The background persistence interval - * - * @return background persistence interval - */ - public TimeValue getBackgroundPersistInterval() { - return backgroundPersistInterval; - } - - public Long getModelSnapshotRetentionDays() { - return modelSnapshotRetentionDays; - } - - public Long getDailyModelSnapshotRetentionAfterDays() { - return dailyModelSnapshotRetentionAfterDays; - } - - public Long getResultsRetentionDays() { - return resultsRetentionDays; - } - - public Map getCustomSettings() { - return customSettings; - } - - public String getModelSnapshotId() { - return modelSnapshotId; - } - - public Boolean getDeleting() { - return deleting; - } - - public Boolean getAllowLazyOpen() { - return allowLazyOpen; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - final String humanReadableSuffix = "_string"; - - builder.field(ID.getPreferredName(), jobId); - builder.field(JOB_TYPE.getPreferredName(), jobType); - - if (groups.isEmpty() == false) { - builder.field(GROUPS.getPreferredName(), groups); - } - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - if (createTime != null) { - builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + humanReadableSuffix, createTime.getTime()); - } - if (finishedTime != null) { - builder.timeField( - FINISHED_TIME.getPreferredName(), - FINISHED_TIME.getPreferredName() + humanReadableSuffix, - finishedTime.getTime() - ); - } - builder.field(ANALYSIS_CONFIG.getPreferredName(), analysisConfig, params); - if (analysisLimits != null) { - builder.field(ANALYSIS_LIMITS.getPreferredName(), analysisLimits, params); - } - if (dataDescription != null) { - builder.field(DATA_DESCRIPTION.getPreferredName(), dataDescription, params); - } - if (modelPlotConfig != null) { - builder.field(MODEL_PLOT_CONFIG.getPreferredName(), modelPlotConfig, params); - } - if (renormalizationWindowDays != null) { - builder.field(RENORMALIZATION_WINDOW_DAYS.getPreferredName(), renormalizationWindowDays); - } - if (backgroundPersistInterval != null) { - builder.field(BACKGROUND_PERSIST_INTERVAL.getPreferredName(), backgroundPersistInterval.getStringRep()); - } - if (modelSnapshotRetentionDays != null) { - builder.field(MODEL_SNAPSHOT_RETENTION_DAYS.getPreferredName(), modelSnapshotRetentionDays); - } - if (dailyModelSnapshotRetentionAfterDays != null) { - builder.field(DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS.getPreferredName(), dailyModelSnapshotRetentionAfterDays); - } - if (resultsRetentionDays != null) { - builder.field(RESULTS_RETENTION_DAYS.getPreferredName(), resultsRetentionDays); - } - if (customSettings != null) { - builder.field(CUSTOM_SETTINGS.getPreferredName(), customSettings); - } - if (modelSnapshotId != null) { - builder.field(MODEL_SNAPSHOT_ID.getPreferredName(), modelSnapshotId); - } - if (resultsIndexName != null) { - builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName); - } - if (deleting != null) { - builder.field(DELETING.getPreferredName(), deleting); - } - if (allowLazyOpen != null) { - builder.field(ALLOW_LAZY_OPEN.getPreferredName(), allowLazyOpen); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - Job that = (Job) other; - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.jobType, that.jobType) - && Objects.equals(this.groups, that.groups) - && Objects.equals(this.description, that.description) - && Objects.equals(this.createTime, that.createTime) - && Objects.equals(this.finishedTime, that.finishedTime) - && Objects.equals(this.analysisConfig, that.analysisConfig) - && Objects.equals(this.analysisLimits, that.analysisLimits) - && Objects.equals(this.dataDescription, that.dataDescription) - && Objects.equals(this.modelPlotConfig, that.modelPlotConfig) - && Objects.equals(this.renormalizationWindowDays, that.renormalizationWindowDays) - && Objects.equals(this.backgroundPersistInterval, that.backgroundPersistInterval) - && Objects.equals(this.modelSnapshotRetentionDays, that.modelSnapshotRetentionDays) - && Objects.equals(this.dailyModelSnapshotRetentionAfterDays, that.dailyModelSnapshotRetentionAfterDays) - && Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays) - && Objects.equals(this.customSettings, that.customSettings) - && Objects.equals(this.modelSnapshotId, that.modelSnapshotId) - && Objects.equals(this.resultsIndexName, that.resultsIndexName) - && Objects.equals(this.deleting, that.deleting) - && Objects.equals(this.allowLazyOpen, that.allowLazyOpen); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - jobType, - groups, - description, - createTime, - finishedTime, - analysisConfig, - analysisLimits, - dataDescription, - modelPlotConfig, - renormalizationWindowDays, - backgroundPersistInterval, - modelSnapshotRetentionDays, - dailyModelSnapshotRetentionAfterDays, - resultsRetentionDays, - customSettings, - modelSnapshotId, - resultsIndexName, - deleting, - allowLazyOpen - ); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - - public static Builder builder(String id) { - return new Builder(id); - } - - public static class Builder { - - private String id; - private String jobType = ANOMALY_DETECTOR_JOB_TYPE; - private List groups = Collections.emptyList(); - private String description; - private AnalysisConfig analysisConfig; - private AnalysisLimits analysisLimits; - private DataDescription dataDescription; - private Date createTime; - private Date finishedTime; - private ModelPlotConfig modelPlotConfig; - private Long renormalizationWindowDays; - private TimeValue backgroundPersistInterval; - private Long modelSnapshotRetentionDays; - private Long dailyModelSnapshotRetentionAfterDays; - private Long resultsRetentionDays; - private Map customSettings; - private String modelSnapshotId; - private String resultsIndexName; - private Boolean deleting; - private Boolean allowLazyOpen; - - private Builder() {} - - public Builder(String id) { - this.id = id; - } - - public Builder(Job job) { - this.id = job.getId(); - this.jobType = job.getJobType(); - this.groups = new ArrayList<>(job.getGroups()); - this.description = job.getDescription(); - this.analysisConfig = job.getAnalysisConfig(); - this.analysisLimits = job.getAnalysisLimits(); - this.dataDescription = job.getDataDescription(); - this.createTime = job.getCreateTime(); - this.finishedTime = job.getFinishedTime(); - this.modelPlotConfig = job.getModelPlotConfig(); - this.renormalizationWindowDays = job.getRenormalizationWindowDays(); - this.backgroundPersistInterval = job.getBackgroundPersistInterval(); - this.modelSnapshotRetentionDays = job.getModelSnapshotRetentionDays(); - this.dailyModelSnapshotRetentionAfterDays = job.getDailyModelSnapshotRetentionAfterDays(); - this.resultsRetentionDays = job.getResultsRetentionDays(); - this.customSettings = job.getCustomSettings() == null ? null : new LinkedHashMap<>(job.getCustomSettings()); - this.modelSnapshotId = job.getModelSnapshotId(); - this.resultsIndexName = job.getResultsIndexNameNoPrefix(); - this.deleting = job.getDeleting(); - this.allowLazyOpen = job.getAllowLazyOpen(); - } - - public Builder setId(String id) { - this.id = id; - return this; - } - - public String getId() { - return id; - } - - public Builder setJobType(String jobType) { - this.jobType = jobType; - return this; - } - - public Builder setGroups(List groups) { - this.groups = groups == null ? Collections.emptyList() : groups; - return this; - } - - public Builder setCustomSettings(Map customSettings) { - this.customSettings = customSettings; - return this; - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setAnalysisConfig(AnalysisConfig.Builder configBuilder) { - analysisConfig = Objects.requireNonNull(configBuilder, ANALYSIS_CONFIG.getPreferredName()).build(); - return this; - } - - public Builder setAnalysisLimits(AnalysisLimits analysisLimits) { - this.analysisLimits = Objects.requireNonNull(analysisLimits, ANALYSIS_LIMITS.getPreferredName()); - return this; - } - - Builder setCreateTime(Date createTime) { - this.createTime = createTime; - return this; - } - - Builder setFinishedTime(Date finishedTime) { - this.finishedTime = finishedTime; - return this; - } - - public Builder setDataDescription(DataDescription.Builder descriptionBuilder) { - dataDescription = Objects.requireNonNull(descriptionBuilder, DATA_DESCRIPTION.getPreferredName()).build(); - return this; - } - - public Builder setModelPlotConfig(ModelPlotConfig modelPlotConfig) { - this.modelPlotConfig = modelPlotConfig; - return this; - } - - public Builder setBackgroundPersistInterval(TimeValue backgroundPersistInterval) { - this.backgroundPersistInterval = backgroundPersistInterval; - return this; - } - - public Builder setRenormalizationWindowDays(Long renormalizationWindowDays) { - this.renormalizationWindowDays = renormalizationWindowDays; - return this; - } - - public Builder setModelSnapshotRetentionDays(Long modelSnapshotRetentionDays) { - this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; - return this; - } - - public Builder setDailyModelSnapshotRetentionAfterDays(Long dailyModelSnapshotRetentionAfterDays) { - this.dailyModelSnapshotRetentionAfterDays = dailyModelSnapshotRetentionAfterDays; - return this; - } - - public Builder setResultsRetentionDays(Long resultsRetentionDays) { - this.resultsRetentionDays = resultsRetentionDays; - return this; - } - - public Builder setModelSnapshotId(String modelSnapshotId) { - this.modelSnapshotId = modelSnapshotId; - return this; - } - - public Builder setResultsIndexName(String resultsIndexName) { - this.resultsIndexName = resultsIndexName; - return this; - } - - Builder setDeleting(Boolean deleting) { - this.deleting = deleting; - return this; - } - - Builder setAllowLazyOpen(Boolean allowLazyOpen) { - this.allowLazyOpen = allowLazyOpen; - return this; - } - - /** - * Builds a job. - * - * @return The job - */ - public Job build() { - Objects.requireNonNull(id, "[" + ID.getPreferredName() + "] must not be null"); - Objects.requireNonNull(jobType, "[" + JOB_TYPE.getPreferredName() + "] must not be null"); - return new Job( - id, - jobType, - groups, - description, - createTime, - finishedTime, - analysisConfig, - analysisLimits, - dataDescription, - modelPlotConfig, - renormalizationWindowDays, - backgroundPersistInterval, - modelSnapshotRetentionDays, - dailyModelSnapshotRetentionAfterDays, - resultsRetentionDays, - customSettings, - modelSnapshotId, - resultsIndexName, - deleting, - allowLazyOpen - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java deleted file mode 100644 index 1a248ef137d53..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import java.util.Locale; - -/** - * Jobs whether running or complete are in one of these states. - * When a job is created it is initialised in the state closed - * i.e. it is not running. - */ -public enum JobState { - - CLOSING, - CLOSED, - OPENED, - FAILED, - OPENING; - - public static JobState fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - public String value() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java deleted file mode 100644 index f0d70a2509a39..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java +++ /dev/null @@ -1,588 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * POJO for updating an existing Machine Learning {@link Job} - */ -public class JobUpdate implements ToXContentObject { - public static final ParseField DETECTORS = new ParseField("detectors"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "job_update", - true, - args -> new Builder((String) args[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), Job.ID); - PARSER.declareStringArray(Builder::setGroups, Job.GROUPS); - PARSER.declareStringOrNull(Builder::setDescription, Job.DESCRIPTION); - PARSER.declareObjectArray(Builder::setDetectorUpdates, DetectorUpdate.PARSER, DETECTORS); - PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSER, Job.MODEL_PLOT_CONFIG); - PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, Job.ANALYSIS_LIMITS); - PARSER.declareString( - (builder, val) -> builder.setBackgroundPersistInterval( - TimeValue.parseTimeValue(val, Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName()) - ), - Job.BACKGROUND_PERSIST_INTERVAL - ); - PARSER.declareLong(Builder::setRenormalizationWindowDays, Job.RENORMALIZATION_WINDOW_DAYS); - PARSER.declareLong(Builder::setResultsRetentionDays, Job.RESULTS_RETENTION_DAYS); - PARSER.declareLong(Builder::setModelSnapshotRetentionDays, Job.MODEL_SNAPSHOT_RETENTION_DAYS); - PARSER.declareLong(Builder::setDailyModelSnapshotRetentionAfterDays, Job.DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS); - PARSER.declareStringArray(Builder::setCategorizationFilters, AnalysisConfig.CATEGORIZATION_FILTERS); - PARSER.declareObject( - Builder::setPerPartitionCategorizationConfig, - PerPartitionCategorizationConfig.PARSER, - AnalysisConfig.PER_PARTITION_CATEGORIZATION - ); - PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), Job.CUSTOM_SETTINGS, ObjectParser.ValueType.OBJECT); - PARSER.declareBoolean(Builder::setAllowLazyOpen, Job.ALLOW_LAZY_OPEN); - PARSER.declareString( - (builder, val) -> builder.setModelPruneWindow( - TimeValue.parseTimeValue(val, AnalysisConfig.MODEL_PRUNE_WINDOW.getPreferredName()) - ), - AnalysisConfig.MODEL_PRUNE_WINDOW - ); - } - - private final String jobId; - private final List groups; - private final String description; - private final List detectorUpdates; - private final ModelPlotConfig modelPlotConfig; - private final AnalysisLimits analysisLimits; - private final Long renormalizationWindowDays; - private final TimeValue backgroundPersistInterval; - private final Long modelSnapshotRetentionDays; - private final Long dailyModelSnapshotRetentionAfterDays; - private final Long resultsRetentionDays; - private final List categorizationFilters; - private final PerPartitionCategorizationConfig perPartitionCategorizationConfig; - private final Map customSettings; - private final Boolean allowLazyOpen; - private final TimeValue modelPruneWindow; - - private JobUpdate( - String jobId, - @Nullable List groups, - @Nullable String description, - @Nullable List detectorUpdates, - @Nullable ModelPlotConfig modelPlotConfig, - @Nullable AnalysisLimits analysisLimits, - @Nullable TimeValue backgroundPersistInterval, - @Nullable Long renormalizationWindowDays, - @Nullable Long resultsRetentionDays, - @Nullable Long modelSnapshotRetentionDays, - @Nullable Long dailyModelSnapshotRetentionAfterDays, - @Nullable List categorizationFilters, - @Nullable PerPartitionCategorizationConfig perPartitionCategorizationConfig, - @Nullable Map customSettings, - @Nullable Boolean allowLazyOpen, - @Nullable TimeValue modelPruneWindow - ) { - this.jobId = jobId; - this.groups = groups; - this.description = description; - this.detectorUpdates = detectorUpdates; - this.modelPlotConfig = modelPlotConfig; - this.analysisLimits = analysisLimits; - this.renormalizationWindowDays = renormalizationWindowDays; - this.backgroundPersistInterval = backgroundPersistInterval; - this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; - this.dailyModelSnapshotRetentionAfterDays = dailyModelSnapshotRetentionAfterDays; - this.resultsRetentionDays = resultsRetentionDays; - this.categorizationFilters = categorizationFilters; - this.perPartitionCategorizationConfig = perPartitionCategorizationConfig; - this.customSettings = customSettings; - this.allowLazyOpen = allowLazyOpen; - this.modelPruneWindow = modelPruneWindow; - } - - public String getJobId() { - return jobId; - } - - public List getGroups() { - return groups; - } - - public String getDescription() { - return description; - } - - public List getDetectorUpdates() { - return detectorUpdates; - } - - public ModelPlotConfig getModelPlotConfig() { - return modelPlotConfig; - } - - public AnalysisLimits getAnalysisLimits() { - return analysisLimits; - } - - public Long getRenormalizationWindowDays() { - return renormalizationWindowDays; - } - - public TimeValue getBackgroundPersistInterval() { - return backgroundPersistInterval; - } - - public Long getModelSnapshotRetentionDays() { - return modelSnapshotRetentionDays; - } - - public Long getResultsRetentionDays() { - return resultsRetentionDays; - } - - public List getCategorizationFilters() { - return categorizationFilters; - } - - public PerPartitionCategorizationConfig getPerPartitionCategorizationConfig() { - return perPartitionCategorizationConfig; - } - - public Map getCustomSettings() { - return customSettings; - } - - public Boolean getAllowLazyOpen() { - return allowLazyOpen; - } - - public TimeValue getModelPruneWindow() { - return modelPruneWindow; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (groups != null) { - builder.field(Job.GROUPS.getPreferredName(), groups); - } - if (description != null) { - builder.field(Job.DESCRIPTION.getPreferredName(), description); - } - if (detectorUpdates != null) { - builder.field(DETECTORS.getPreferredName(), detectorUpdates); - } - if (modelPlotConfig != null) { - builder.field(Job.MODEL_PLOT_CONFIG.getPreferredName(), modelPlotConfig); - } - if (analysisLimits != null) { - builder.field(Job.ANALYSIS_LIMITS.getPreferredName(), analysisLimits); - } - if (renormalizationWindowDays != null) { - builder.field(Job.RENORMALIZATION_WINDOW_DAYS.getPreferredName(), renormalizationWindowDays); - } - if (backgroundPersistInterval != null) { - builder.field(Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName(), backgroundPersistInterval); - } - if (modelSnapshotRetentionDays != null) { - builder.field(Job.MODEL_SNAPSHOT_RETENTION_DAYS.getPreferredName(), modelSnapshotRetentionDays); - } - if (dailyModelSnapshotRetentionAfterDays != null) { - builder.field(Job.DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS.getPreferredName(), dailyModelSnapshotRetentionAfterDays); - } - if (resultsRetentionDays != null) { - builder.field(Job.RESULTS_RETENTION_DAYS.getPreferredName(), resultsRetentionDays); - } - if (categorizationFilters != null) { - builder.field(AnalysisConfig.CATEGORIZATION_FILTERS.getPreferredName(), categorizationFilters); - } - if (perPartitionCategorizationConfig != null) { - builder.field(AnalysisConfig.PER_PARTITION_CATEGORIZATION.getPreferredName(), perPartitionCategorizationConfig); - } - if (customSettings != null) { - builder.field(Job.CUSTOM_SETTINGS.getPreferredName(), customSettings); - } - if (allowLazyOpen != null) { - builder.field(Job.ALLOW_LAZY_OPEN.getPreferredName(), allowLazyOpen); - } - if (modelPruneWindow != null) { - builder.field(AnalysisConfig.MODEL_PRUNE_WINDOW.getPreferredName(), modelPruneWindow); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - JobUpdate that = (JobUpdate) other; - - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.groups, that.groups) - && Objects.equals(this.description, that.description) - && Objects.equals(this.detectorUpdates, that.detectorUpdates) - && Objects.equals(this.modelPlotConfig, that.modelPlotConfig) - && Objects.equals(this.analysisLimits, that.analysisLimits) - && Objects.equals(this.renormalizationWindowDays, that.renormalizationWindowDays) - && Objects.equals(this.backgroundPersistInterval, that.backgroundPersistInterval) - && Objects.equals(this.modelSnapshotRetentionDays, that.modelSnapshotRetentionDays) - && Objects.equals(this.dailyModelSnapshotRetentionAfterDays, that.dailyModelSnapshotRetentionAfterDays) - && Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays) - && Objects.equals(this.categorizationFilters, that.categorizationFilters) - && Objects.equals(this.perPartitionCategorizationConfig, that.perPartitionCategorizationConfig) - && Objects.equals(this.customSettings, that.customSettings) - && Objects.equals(this.allowLazyOpen, that.allowLazyOpen) - && Objects.equals(this.modelPruneWindow, that.modelPruneWindow); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - groups, - description, - detectorUpdates, - modelPlotConfig, - analysisLimits, - renormalizationWindowDays, - backgroundPersistInterval, - modelSnapshotRetentionDays, - dailyModelSnapshotRetentionAfterDays, - resultsRetentionDays, - categorizationFilters, - perPartitionCategorizationConfig, - customSettings, - allowLazyOpen, - modelPruneWindow - ); - } - - public static class DetectorUpdate implements ToXContentObject { - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "detector_update", - true, - a -> new DetectorUpdate((int) a[0], (String) a[1], (List) a[2]) - ); - - static { - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), Detector.DETECTOR_INDEX); - PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), Job.DESCRIPTION); - PARSER.declareObjectArray( - ConstructingObjectParser.optionalConstructorArg(), - (parser, parseFieldMatcher) -> DetectionRule.PARSER.apply(parser, parseFieldMatcher).build(), - Detector.CUSTOM_RULES_FIELD - ); - } - - private final int detectorIndex; - private final String description; - private final List rules; - - /** - * A detector update to apply to the Machine Learning Job - * - * @param detectorIndex The identifier of the detector to update. - * @param description The new description for the detector. - * @param rules The new list of rules for the detector. - */ - public DetectorUpdate(int detectorIndex, String description, List rules) { - this.detectorIndex = detectorIndex; - this.description = description; - this.rules = rules; - } - - public int getDetectorIndex() { - return detectorIndex; - } - - public String getDescription() { - return description; - } - - public List getRules() { - return rules; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - builder.field(Detector.DETECTOR_INDEX.getPreferredName(), detectorIndex); - if (description != null) { - builder.field(Job.DESCRIPTION.getPreferredName(), description); - } - if (rules != null) { - builder.field(Detector.CUSTOM_RULES_FIELD.getPreferredName(), rules); - } - builder.endObject(); - - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(detectorIndex, description, rules); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DetectorUpdate that = (DetectorUpdate) other; - return this.detectorIndex == that.detectorIndex - && Objects.equals(this.description, that.description) - && Objects.equals(this.rules, that.rules); - } - } - - public static class Builder { - - private final String jobId; - private List groups; - private String description; - private List detectorUpdates; - private ModelPlotConfig modelPlotConfig; - private AnalysisLimits analysisLimits; - private Long renormalizationWindowDays; - private TimeValue backgroundPersistInterval; - private Long modelSnapshotRetentionDays; - private Long dailyModelSnapshotRetentionAfterDays; - private Long resultsRetentionDays; - private List categorizationFilters; - private PerPartitionCategorizationConfig perPartitionCategorizationConfig; - private Map customSettings; - private Boolean allowLazyOpen; - private TimeValue modelPruneWindow; - - /** - * New {@link JobUpdate.Builder} object for the existing job - * - * @param jobId non-null `jobId` for referencing an exising {@link Job} - */ - public Builder(String jobId) { - this.jobId = jobId; - } - - /** - * Set the job groups - * - * Updates the {@link Job#groups} setting - * - * @param groups A list of group names - */ - public Builder setGroups(List groups) { - this.groups = groups; - return this; - } - - /** - * Set the job description - * - * Updates the {@link Job#description} setting - * - * @param description the desired Machine Learning job description - */ - public Builder setDescription(String description) { - this.description = description; - return this; - } - - /** - * The detector updates to apply to the job - * - * Updates the {@link AnalysisConfig#detectors} setting - * - * @param detectorUpdates list of {@link JobUpdate.DetectorUpdate} objects - */ - public Builder setDetectorUpdates(List detectorUpdates) { - this.detectorUpdates = detectorUpdates; - return this; - } - - /** - * Enables/disables the model plot config setting through {@link ModelPlotConfig#enabled} - * - * Updates the {@link Job#modelPlotConfig} setting - * - * @param modelPlotConfig {@link ModelPlotConfig} object with updated fields - */ - public Builder setModelPlotConfig(ModelPlotConfig modelPlotConfig) { - this.modelPlotConfig = modelPlotConfig; - return this; - } - - /** - * Sets new {@link AnalysisLimits} for the {@link Job} - * - * Updates the {@link Job#analysisLimits} setting - * - * @param analysisLimits Updates to {@link AnalysisLimits} - */ - public Builder setAnalysisLimits(AnalysisLimits analysisLimits) { - this.analysisLimits = analysisLimits; - return this; - } - - /** - * Advanced configuration option. The period over which adjustments to the score are applied, as new data is seen - * - * Updates the {@link Job#renormalizationWindowDays} setting - * - * @param renormalizationWindowDays number of renormalization window days - */ - public Builder setRenormalizationWindowDays(Long renormalizationWindowDays) { - this.renormalizationWindowDays = renormalizationWindowDays; - return this; - } - - /** - * Advanced configuration option. The time between each periodic persistence of the model - * - * Updates the {@link Job#backgroundPersistInterval} setting - * - * @param backgroundPersistInterval the time between background persistence - */ - public Builder setBackgroundPersistInterval(TimeValue backgroundPersistInterval) { - this.backgroundPersistInterval = backgroundPersistInterval; - return this; - } - - /** - * The time in days that model snapshots are retained for the job. - * - * Updates the {@link Job#modelSnapshotRetentionDays} setting - * - * @param modelSnapshotRetentionDays number of days to keep a model snapshot - */ - public Builder setModelSnapshotRetentionDays(Long modelSnapshotRetentionDays) { - this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; - return this; - } - - /** - * The time in days after which only one model snapshot per day is retained for the job. - * - * Updates the {@link Job#dailyModelSnapshotRetentionAfterDays} setting - * - * @param dailyModelSnapshotRetentionAfterDays number of days to keep a model snapshot - */ - public Builder setDailyModelSnapshotRetentionAfterDays(Long dailyModelSnapshotRetentionAfterDays) { - this.dailyModelSnapshotRetentionAfterDays = dailyModelSnapshotRetentionAfterDays; - return this; - } - - /** - * Advanced configuration option. The number of days for which job results are retained - * - * Updates the {@link Job#resultsRetentionDays} setting - * - * @param resultsRetentionDays number of days to keep results. - */ - public Builder setResultsRetentionDays(Long resultsRetentionDays) { - this.resultsRetentionDays = resultsRetentionDays; - return this; - } - - /** - * Sets the categorization filters on the {@link Job} - * - * Updates the {@link AnalysisConfig#categorizationFilters} setting. - * Requires {@link AnalysisConfig#categorizationFieldName} to have been set on the existing Job. - * - * @param categorizationFilters list of categorization filters for the Job's {@link AnalysisConfig} - */ - public Builder setCategorizationFilters(List categorizationFilters) { - this.categorizationFilters = categorizationFilters; - return this; - } - - /** - * Sets the per-partition categorization options on the {@link Job} - * - * Updates the {@link AnalysisConfig#perPartitionCategorizationConfig} setting. - * Requires {@link AnalysisConfig#perPartitionCategorizationConfig} to have been set on the existing Job. - * - * @param perPartitionCategorizationConfig per-partition categorization options for the Job's {@link AnalysisConfig} - */ - public Builder setPerPartitionCategorizationConfig(PerPartitionCategorizationConfig perPartitionCategorizationConfig) { - this.perPartitionCategorizationConfig = perPartitionCategorizationConfig; - return this; - } - - /** - * Contains custom meta data about the job. - * - * Updates the {@link Job#customSettings} setting - * - * @param customSettings custom settings map for the job - */ - public Builder setCustomSettings(Map customSettings) { - this.customSettings = customSettings; - return this; - } - - public Builder setAllowLazyOpen(boolean allowLazyOpen) { - this.allowLazyOpen = allowLazyOpen; - return this; - } - - public Builder setModelPruneWindow(TimeValue modelPruneWindow) { - this.modelPruneWindow = modelPruneWindow; - return this; - } - - public JobUpdate build() { - return new JobUpdate( - jobId, - groups, - description, - detectorUpdates, - modelPlotConfig, - analysisLimits, - backgroundPersistInterval, - renormalizationWindowDays, - resultsRetentionDays, - modelSnapshotRetentionDays, - dailyModelSnapshotRetentionAfterDays, - categorizationFilters, - perPartitionCategorizationConfig, - customSettings, - allowLazyOpen, - modelPruneWindow - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/MlFilter.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/MlFilter.java deleted file mode 100644 index 7079ff69ea106..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/MlFilter.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.SortedSet; -import java.util.TreeSet; - -/** - * An MlFilter Object - * - * A filter contains a list of strings. - * It can be used by one or more jobs. - * - * Specifically, filters are referenced in the custom_rules property of detector configuration objects. - */ -public class MlFilter implements ToXContentObject { - - public static final ParseField TYPE = new ParseField("type"); - public static final ParseField ID = new ParseField("filter_id"); - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField ITEMS = new ParseField("items"); - - // For QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("filters"); - - public static final ObjectParser PARSER = new ObjectParser<>(TYPE.getPreferredName(), true, Builder::new); - - static { - PARSER.declareString((builder, s) -> {}, TYPE); - PARSER.declareString(Builder::setId, ID); - PARSER.declareStringOrNull(Builder::setDescription, DESCRIPTION); - PARSER.declareStringArray(Builder::setItems, ITEMS); - } - - private final String id; - private final String description; - private final SortedSet items; - - private MlFilter(String id, String description, SortedSet items) { - this.id = Objects.requireNonNull(id); - this.description = description; - this.items = Collections.unmodifiableSortedSet(items); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ID.getPreferredName(), id); - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - builder.field(ITEMS.getPreferredName(), items); - // Don't include TYPE as it's fixed - builder.endObject(); - return builder; - } - - public String getId() { - return id; - } - - public String getDescription() { - return description; - } - - public SortedSet getItems() { - return items; - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj instanceof MlFilter == false) { - return false; - } - - MlFilter other = (MlFilter) obj; - return id.equals(other.id) && Objects.equals(description, other.description) && items.equals(other.items); - } - - @Override - public int hashCode() { - return Objects.hash(id, description, items); - } - - /** - * Creates a new Builder object for creating an MlFilter object - * @param filterId The ID of the filter to create - */ - public static Builder builder(String filterId) { - return new Builder().setId(filterId); - } - - public static class Builder { - - private String id; - private String description; - private SortedSet items = new TreeSet<>(); - - private Builder() {} - - /** - * Set the ID of the filter - * @param id The id desired - */ - public Builder setId(String id) { - this.id = Objects.requireNonNull(id); - return this; - } - - @Nullable - public String getId() { - return id; - } - - /** - * Set the description of the filter - * @param description The description desired - */ - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setItems(SortedSet items) { - this.items = Objects.requireNonNull(items); - return this; - } - - public Builder setItems(List items) { - this.items = new TreeSet<>(items); - return this; - } - - /** - * The items of the filter. - * - * A wildcard * can be used at the beginning or the end of an item. Up to 10000 items are allowed in each filter. - * - * @param items String list of items to be applied in the filter - */ - public Builder setItems(String... items) { - setItems(Arrays.asList(items)); - return this; - } - - public MlFilter build() { - return new MlFilter(id, description, items); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/ModelPlotConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/ModelPlotConfig.java deleted file mode 100644 index 4581409d64989..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/ModelPlotConfig.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class ModelPlotConfig implements ToXContentObject { - - private static final ParseField TYPE_FIELD = new ParseField("model_plot_config"); - private static final ParseField ENABLED_FIELD = new ParseField("enabled"); - private static final ParseField TERMS_FIELD = new ParseField("terms"); - private static final ParseField ANNOTATIONS_ENABLED_FIELD = new ParseField("annotations_enabled"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - TYPE_FIELD.getPreferredName(), - true, - a -> new ModelPlotConfig((boolean) a[0], (String) a[1], (Boolean) a[2]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TERMS_FIELD); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ANNOTATIONS_ENABLED_FIELD); - } - - private final boolean enabled; - private final String terms; - private final Boolean annotationsEnabled; - - public ModelPlotConfig(boolean enabled, String terms, Boolean annotationsEnabled) { - this.enabled = enabled; - this.terms = terms; - this.annotationsEnabled = annotationsEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ENABLED_FIELD.getPreferredName(), enabled); - if (terms != null) { - builder.field(TERMS_FIELD.getPreferredName(), terms); - } - if (annotationsEnabled != null) { - builder.field(ANNOTATIONS_ENABLED_FIELD.getPreferredName(), annotationsEnabled); - } - builder.endObject(); - return builder; - } - - public boolean isEnabled() { - return enabled; - } - - public String getTerms() { - return this.terms; - } - - public Boolean annotationsEnabled() { - return annotationsEnabled; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof ModelPlotConfig == false) { - return false; - } - - ModelPlotConfig that = (ModelPlotConfig) other; - return this.enabled == that.enabled - && Objects.equals(this.terms, that.terms) - && Objects.equals(this.annotationsEnabled, that.annotationsEnabled); - } - - @Override - public int hashCode() { - return Objects.hash(enabled, terms, annotationsEnabled); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Operator.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Operator.java deleted file mode 100644 index 3d7ac2af70a66..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Operator.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ParseField; - -import java.util.Locale; - -/** - * Enum representing logical comparisons on doubles - */ -public enum Operator { - GT { - @Override - public boolean test(double lhs, double rhs) { - return Double.compare(lhs, rhs) > 0; - } - }, - GTE { - @Override - public boolean test(double lhs, double rhs) { - return Double.compare(lhs, rhs) >= 0; - } - }, - LT { - @Override - public boolean test(double lhs, double rhs) { - return Double.compare(lhs, rhs) < 0; - } - }, - LTE { - @Override - public boolean test(double lhs, double rhs) { - return Double.compare(lhs, rhs) <= 0; - } - }; - // EQ was considered but given the oddity of such a - // condition and the fact that it would be a numerically - // unstable condition, it was rejected. - - public static final ParseField OPERATOR_FIELD = new ParseField("operator"); - - public boolean test(double lhs, double rhs) { - return false; - } - - public static Operator fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/PerPartitionCategorizationConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/PerPartitionCategorizationConfig.java deleted file mode 100644 index 86cd40f45c601..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/PerPartitionCategorizationConfig.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class PerPartitionCategorizationConfig implements ToXContentObject { - - public static final ParseField TYPE_FIELD = new ParseField("per_partition_categorization"); - public static final ParseField ENABLED_FIELD = new ParseField("enabled"); - public static final ParseField STOP_ON_WARN = new ParseField("stop_on_warn"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - TYPE_FIELD.getPreferredName(), - true, - a -> new PerPartitionCategorizationConfig((boolean) a[0], (Boolean) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), STOP_ON_WARN); - } - - private final boolean enabled; - private final boolean stopOnWarn; - - public PerPartitionCategorizationConfig() { - this(false, null); - } - - public PerPartitionCategorizationConfig(boolean enabled, Boolean stopOnWarn) { - this.enabled = enabled; - this.stopOnWarn = (stopOnWarn == null) ? false : stopOnWarn; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(ENABLED_FIELD.getPreferredName(), enabled); - if (enabled) { - builder.field(STOP_ON_WARN.getPreferredName(), stopOnWarn); - } - builder.endObject(); - return builder; - } - - public boolean isEnabled() { - return enabled; - } - - public boolean isStopOnWarn() { - return stopOnWarn; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof PerPartitionCategorizationConfig == false) { - return false; - } - - PerPartitionCategorizationConfig that = (PerPartitionCategorizationConfig) other; - return this.enabled == that.enabled && this.stopOnWarn == that.stopOnWarn; - } - - @Override - public int hashCode() { - return Objects.hash(enabled, stopOnWarn); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleAction.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleAction.java deleted file mode 100644 index 21d9c99134660..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleAction.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import java.util.Locale; - -public enum RuleAction { - SKIP_RESULT, - SKIP_MODEL_UPDATE; - - /** - * Case-insensitive from string method. - * - * @param value String representation - * @return The rule action - */ - public static RuleAction fromString(String value) { - return RuleAction.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleCondition.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleCondition.java deleted file mode 100644 index d7f9054c23485..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleCondition.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -public class RuleCondition implements ToXContentObject { - - public static final ParseField RULE_CONDITION_FIELD = new ParseField("rule_condition"); - - public static final ParseField APPLIES_TO_FIELD = new ParseField("applies_to"); - public static final ParseField VALUE_FIELD = new ParseField("value"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RULE_CONDITION_FIELD.getPreferredName(), - true, - a -> new RuleCondition((AppliesTo) a[0], (Operator) a[1], (double) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), AppliesTo::fromString, APPLIES_TO_FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), Operator::fromString, Operator.OPERATOR_FIELD); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), VALUE_FIELD); - } - - private final AppliesTo appliesTo; - private final Operator operator; - private final double value; - - public RuleCondition(AppliesTo appliesTo, Operator operator, double value) { - this.appliesTo = appliesTo; - this.operator = operator; - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(APPLIES_TO_FIELD.getPreferredName(), appliesTo); - builder.field(Operator.OPERATOR_FIELD.getPreferredName(), operator); - builder.field(VALUE_FIELD.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public AppliesTo getAppliesTo() { - return appliesTo; - } - - public Operator getOperator() { - return operator; - } - - public double getValue() { - return value; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj instanceof RuleCondition == false) { - return false; - } - - RuleCondition other = (RuleCondition) obj; - return appliesTo == other.appliesTo && operator == other.operator && value == other.value; - } - - @Override - public int hashCode() { - return Objects.hash(appliesTo, operator, value); - } - - public static RuleCondition createTime(Operator operator, long epochSeconds) { - return new RuleCondition(AppliesTo.TIME, operator, epochSeconds); - } - - public enum AppliesTo { - ACTUAL, - TYPICAL, - DIFF_FROM_TYPICAL, - TIME; - - public static AppliesTo fromString(String value) { - return valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java deleted file mode 100644 index 352d240943a9c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ContextParser; -import org.elasticsearch.xcontent.DeprecationHandler; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; - -public class RuleScope implements ToXContentObject { - - public static ContextParser parser() { - return (p, c) -> { - Map unparsedScope = p.map(); - if (unparsedScope.isEmpty()) { - return new RuleScope(); - } - Map scope = new HashMap<>(); - for (Map.Entry entry : unparsedScope.entrySet()) { - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - @SuppressWarnings("unchecked") - Map value = (Map) entry.getValue(); - builder.map(value); - try ( - XContentParser scopeParser = XContentFactory.xContent(builder.contentType()) - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, Strings.toString(builder)) - ) { - scope.put(entry.getKey(), FilterRef.PARSER.parse(scopeParser, null)); - } - } - } - return new RuleScope(scope); - }; - } - - private final Map scope; - - public RuleScope() { - scope = Collections.emptyMap(); - } - - public RuleScope(Map scope) { - this.scope = Collections.unmodifiableMap(scope); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.map(scope); - } - - public boolean isEmpty() { - return scope.isEmpty(); - } - - public Set getReferencedFilters() { - return scope.values().stream().map(FilterRef::getFilterId).collect(Collectors.toSet()); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj instanceof RuleScope == false) { - return false; - } - - RuleScope other = (RuleScope) obj; - return Objects.equals(scope, other.scope); - } - - @Override - public int hashCode() { - return Objects.hash(scope); - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - - private Map scope = new HashMap<>(); - - public Builder() {} - - public Builder(RuleScope otherScope) { - scope = new HashMap<>(otherScope.scope); - } - - public Builder exclude(String field, String filterId) { - scope.put(field, new FilterRef(filterId, FilterRef.FilterType.EXCLUDE)); - return this; - } - - public Builder include(String field, String filterId) { - scope.put(field, new FilterRef(filterId, FilterRef.FilterType.INCLUDE)); - return this; - } - - public RuleScope build() { - return new RuleScope(scope); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/DataCounts.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/DataCounts.java deleted file mode 100644 index 20cc5db284302..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/DataCounts.java +++ /dev/null @@ -1,480 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Date; -import java.util.Objects; - -/** - * Job processed record counts. - *

    - * The getInput... methods return the actual number of - * fields/records sent the API including invalid records. - * The getProcessed... methods are the number sent to the - * Engine. - *

    - * The inputRecordCount field is calculated so it - * should not be set in deserialization but it should be serialised - * so the field is visible. - */ -public class DataCounts implements ToXContentObject { - - public static final ParseField PROCESSED_RECORD_COUNT = new ParseField("processed_record_count"); - public static final ParseField PROCESSED_FIELD_COUNT = new ParseField("processed_field_count"); - public static final ParseField INPUT_BYTES = new ParseField("input_bytes"); - public static final ParseField INPUT_RECORD_COUNT = new ParseField("input_record_count"); - public static final ParseField INPUT_FIELD_COUNT = new ParseField("input_field_count"); - public static final ParseField INVALID_DATE_COUNT = new ParseField("invalid_date_count"); - public static final ParseField MISSING_FIELD_COUNT = new ParseField("missing_field_count"); - public static final ParseField OUT_OF_ORDER_TIME_COUNT = new ParseField("out_of_order_timestamp_count"); - public static final ParseField EMPTY_BUCKET_COUNT = new ParseField("empty_bucket_count"); - public static final ParseField SPARSE_BUCKET_COUNT = new ParseField("sparse_bucket_count"); - public static final ParseField BUCKET_COUNT = new ParseField("bucket_count"); - public static final ParseField EARLIEST_RECORD_TIME = new ParseField("earliest_record_timestamp"); - public static final ParseField LATEST_RECORD_TIME = new ParseField("latest_record_timestamp"); - public static final ParseField LAST_DATA_TIME = new ParseField("last_data_time"); - public static final ParseField LATEST_EMPTY_BUCKET_TIME = new ParseField("latest_empty_bucket_timestamp"); - public static final ParseField LATEST_SPARSE_BUCKET_TIME = new ParseField("latest_sparse_bucket_timestamp"); - public static final ParseField LOG_TIME = new ParseField("log_time"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "data_counts", - true, - a -> new DataCounts( - (String) a[0], - (long) a[1], - (long) a[2], - (long) a[3], - (long) a[4], - (long) a[5], - (long) a[6], - (long) a[7], - (long) a[8], - (long) a[9], - (long) a[10], - (Date) a[11], - (Date) a[12], - (Date) a[13], - (Date) a[14], - (Date) a[15], - (Instant) a[16] - ) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), PROCESSED_RECORD_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), PROCESSED_FIELD_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), INPUT_BYTES); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), INPUT_FIELD_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), INVALID_DATE_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), MISSING_FIELD_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), OUT_OF_ORDER_TIME_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), EMPTY_BUCKET_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), SPARSE_BUCKET_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_COUNT); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, EARLIEST_RECORD_TIME.getPreferredName()), - EARLIEST_RECORD_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, LATEST_RECORD_TIME.getPreferredName()), - LATEST_RECORD_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, LAST_DATA_TIME.getPreferredName()), - LAST_DATA_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, LATEST_EMPTY_BUCKET_TIME.getPreferredName()), - LATEST_EMPTY_BUCKET_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, LATEST_SPARSE_BUCKET_TIME.getPreferredName()), - LATEST_SPARSE_BUCKET_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, LOG_TIME.getPreferredName()), - LOG_TIME, - ValueType.VALUE - ); - } - - private final String jobId; - private long processedRecordCount; - private long processedFieldCount; - private long inputBytes; - private long inputFieldCount; - private long invalidDateCount; - private long missingFieldCount; - private long outOfOrderTimeStampCount; - private long emptyBucketCount; - private long sparseBucketCount; - private long bucketCount; - private Date earliestRecordTimeStamp; - private Date latestRecordTimeStamp; - private Date lastDataTimeStamp; - private Date latestEmptyBucketTimeStamp; - private Date latestSparseBucketTimeStamp; - private Instant logTime; - - public DataCounts( - String jobId, - long processedRecordCount, - long processedFieldCount, - long inputBytes, - long inputFieldCount, - long invalidDateCount, - long missingFieldCount, - long outOfOrderTimeStampCount, - long emptyBucketCount, - long sparseBucketCount, - long bucketCount, - Date earliestRecordTimeStamp, - Date latestRecordTimeStamp, - Date lastDataTimeStamp, - Date latestEmptyBucketTimeStamp, - Date latestSparseBucketTimeStamp, - Instant logTime - ) { - this.jobId = jobId; - this.processedRecordCount = processedRecordCount; - this.processedFieldCount = processedFieldCount; - this.inputBytes = inputBytes; - this.inputFieldCount = inputFieldCount; - this.invalidDateCount = invalidDateCount; - this.missingFieldCount = missingFieldCount; - this.outOfOrderTimeStampCount = outOfOrderTimeStampCount; - this.emptyBucketCount = emptyBucketCount; - this.sparseBucketCount = sparseBucketCount; - this.bucketCount = bucketCount; - this.latestRecordTimeStamp = latestRecordTimeStamp; - this.earliestRecordTimeStamp = earliestRecordTimeStamp; - this.lastDataTimeStamp = lastDataTimeStamp; - this.latestEmptyBucketTimeStamp = latestEmptyBucketTimeStamp; - this.latestSparseBucketTimeStamp = latestSparseBucketTimeStamp; - this.logTime = logTime == null ? null : Instant.ofEpochMilli(logTime.toEpochMilli()); - } - - DataCounts(String jobId) { - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - /** - * Number of records processed by this job. - * This value is the number of records sent passed on to - * the engine i.e. {@linkplain #getInputRecordCount()} minus - * records with bad dates or out of order - * - * @return Number of records processed by this job {@code long} - */ - public long getProcessedRecordCount() { - return processedRecordCount; - } - - /** - * Number of data points (processed record count * the number - * of analysed fields) processed by this job. This count does - * not include the time field. - * - * @return Number of data points processed by this job {@code long} - */ - public long getProcessedFieldCount() { - return processedFieldCount; - } - - /** - * Total number of input records read. - * This = processed record count + date parse error records count - * + out of order record count. - *

    - * Records with missing fields are counted as they are still written. - * - * @return Total number of input records read {@code long} - */ - public long getInputRecordCount() { - return processedRecordCount + outOfOrderTimeStampCount + invalidDateCount; - } - - /** - * The total number of bytes sent to this job. - * This value includes the bytes from any records - * that have been discarded for any reason - * e.g. because the date cannot be read - * - * @return Volume in bytes - */ - public long getInputBytes() { - return inputBytes; - } - - /** - * The total number of fields sent to the job - * including fields that aren't analysed. - * - * @return The total number of fields sent to the job - */ - public long getInputFieldCount() { - return inputFieldCount; - } - - /** - * The number of records with an invalid date field that could - * not be parsed or converted to epoch time. - * - * @return The number of records with an invalid date field - */ - public long getInvalidDateCount() { - return invalidDateCount; - } - - /** - * The number of missing fields that had been - * configured for analysis. - * - * @return The number of missing fields - */ - public long getMissingFieldCount() { - return missingFieldCount; - } - - /** - * The number of records with a timestamp that is - * before the time of the latest record. Records should - * be in ascending chronological order - * - * @return The number of records with a timestamp that is before the time of the latest record - */ - public long getOutOfOrderTimeStampCount() { - return outOfOrderTimeStampCount; - } - - /** - * The number of buckets with no records in it. Used to measure general data fitness and/or - * configuration problems (bucket span). - * - * @return Number of empty buckets processed by this job {@code long} - */ - public long getEmptyBucketCount() { - return emptyBucketCount; - } - - /** - * The number of buckets with few records compared to the overall counts. - * Used to measure general data fitness and/or configuration problems (bucket span). - * - * @return Number of sparse buckets processed by this job {@code long} - */ - public long getSparseBucketCount() { - return sparseBucketCount; - } - - /** - * The number of buckets overall. - * - * @return Number of buckets processed by this job {@code long} - */ - public long getBucketCount() { - return bucketCount; - } - - /** - * The time of the first record seen. - * - * @return The first record time - */ - public Date getEarliestRecordTimeStamp() { - return earliestRecordTimeStamp; - } - - /** - * The time of the latest record seen. - * - * @return Latest record time - */ - public Date getLatestRecordTimeStamp() { - return latestRecordTimeStamp; - } - - /** - * The wall clock time the latest record was seen. - * - * @return Wall clock time of the lastest record - */ - public Date getLastDataTimeStamp() { - return lastDataTimeStamp; - } - - /** - * The time of the latest empty bucket seen. - * - * @return Latest empty bucket time - */ - public Date getLatestEmptyBucketTimeStamp() { - return latestEmptyBucketTimeStamp; - } - - /** - * The time of the latest sparse bucket seen. - * - * @return Latest sparse bucket time - */ - public Date getLatestSparseBucketTimeStamp() { - return latestSparseBucketTimeStamp; - } - - /** - * The wall clock time at the point when this instance was created. - * - * @return The wall clock time - */ - public Instant getLogTime() { - return logTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(PROCESSED_RECORD_COUNT.getPreferredName(), processedRecordCount); - builder.field(PROCESSED_FIELD_COUNT.getPreferredName(), processedFieldCount); - builder.field(INPUT_BYTES.getPreferredName(), inputBytes); - builder.field(INPUT_FIELD_COUNT.getPreferredName(), inputFieldCount); - builder.field(INVALID_DATE_COUNT.getPreferredName(), invalidDateCount); - builder.field(MISSING_FIELD_COUNT.getPreferredName(), missingFieldCount); - builder.field(OUT_OF_ORDER_TIME_COUNT.getPreferredName(), outOfOrderTimeStampCount); - builder.field(EMPTY_BUCKET_COUNT.getPreferredName(), emptyBucketCount); - builder.field(SPARSE_BUCKET_COUNT.getPreferredName(), sparseBucketCount); - builder.field(BUCKET_COUNT.getPreferredName(), bucketCount); - if (earliestRecordTimeStamp != null) { - builder.timeField( - EARLIEST_RECORD_TIME.getPreferredName(), - EARLIEST_RECORD_TIME.getPreferredName() + "_string", - earliestRecordTimeStamp.getTime() - ); - } - if (latestRecordTimeStamp != null) { - builder.timeField( - LATEST_RECORD_TIME.getPreferredName(), - LATEST_RECORD_TIME.getPreferredName() + "_string", - latestRecordTimeStamp.getTime() - ); - } - if (lastDataTimeStamp != null) { - builder.timeField( - LAST_DATA_TIME.getPreferredName(), - LAST_DATA_TIME.getPreferredName() + "_string", - lastDataTimeStamp.getTime() - ); - } - if (latestEmptyBucketTimeStamp != null) { - builder.timeField( - LATEST_EMPTY_BUCKET_TIME.getPreferredName(), - LATEST_EMPTY_BUCKET_TIME.getPreferredName() + "_string", - latestEmptyBucketTimeStamp.getTime() - ); - } - if (latestSparseBucketTimeStamp != null) { - builder.timeField( - LATEST_SPARSE_BUCKET_TIME.getPreferredName(), - LATEST_SPARSE_BUCKET_TIME.getPreferredName() + "_string", - latestSparseBucketTimeStamp.getTime() - ); - } - builder.field(INPUT_RECORD_COUNT.getPreferredName(), getInputRecordCount()); - if (logTime != null) { - builder.timeField(LOG_TIME.getPreferredName(), LOG_TIME.getPreferredName() + "_string", logTime.toEpochMilli()); - } - - builder.endObject(); - return builder; - } - - /** - * Equality test - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DataCounts that = (DataCounts) other; - - return Objects.equals(this.jobId, that.jobId) - && this.processedRecordCount == that.processedRecordCount - && this.processedFieldCount == that.processedFieldCount - && this.inputBytes == that.inputBytes - && this.inputFieldCount == that.inputFieldCount - && this.invalidDateCount == that.invalidDateCount - && this.missingFieldCount == that.missingFieldCount - && this.outOfOrderTimeStampCount == that.outOfOrderTimeStampCount - && this.emptyBucketCount == that.emptyBucketCount - && this.sparseBucketCount == that.sparseBucketCount - && this.bucketCount == that.bucketCount - && Objects.equals(this.latestRecordTimeStamp, that.latestRecordTimeStamp) - && Objects.equals(this.earliestRecordTimeStamp, that.earliestRecordTimeStamp) - && Objects.equals(this.lastDataTimeStamp, that.lastDataTimeStamp) - && Objects.equals(this.latestEmptyBucketTimeStamp, that.latestEmptyBucketTimeStamp) - && Objects.equals(this.latestSparseBucketTimeStamp, that.latestSparseBucketTimeStamp) - && Objects.equals(this.logTime, that.logTime); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - processedRecordCount, - processedFieldCount, - inputBytes, - inputFieldCount, - invalidDateCount, - missingFieldCount, - outOfOrderTimeStampCount, - lastDataTimeStamp, - emptyBucketCount, - sparseBucketCount, - bucketCount, - latestRecordTimeStamp, - earliestRecordTimeStamp, - latestEmptyBucketTimeStamp, - latestSparseBucketTimeStamp, - logTime - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java deleted file mode 100644 index db3a3fa011738..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java +++ /dev/null @@ -1,606 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.results.Result; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Locale; -import java.util.Objects; - -/** - * Provide access to the C++ model size stats for the Java process. - */ -public class ModelSizeStats implements ToXContentObject { - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "model_size_stats"; - public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); - - /** - * Field Names - */ - public static final ParseField MODEL_BYTES_FIELD = new ParseField("model_bytes"); - public static final ParseField PEAK_MODEL_BYTES_FIELD = new ParseField("peak_model_bytes"); - public static final ParseField MODEL_BYTES_EXCEEDED_FIELD = new ParseField("model_bytes_exceeded"); - public static final ParseField MODEL_BYTES_MEMORY_LIMIT_FIELD = new ParseField("model_bytes_memory_limit"); - public static final ParseField TOTAL_BY_FIELD_COUNT_FIELD = new ParseField("total_by_field_count"); - public static final ParseField TOTAL_OVER_FIELD_COUNT_FIELD = new ParseField("total_over_field_count"); - public static final ParseField TOTAL_PARTITION_FIELD_COUNT_FIELD = new ParseField("total_partition_field_count"); - public static final ParseField BUCKET_ALLOCATION_FAILURES_COUNT_FIELD = new ParseField("bucket_allocation_failures_count"); - public static final ParseField MEMORY_STATUS_FIELD = new ParseField("memory_status"); - public static final ParseField ASSIGNMENT_MEMORY_BASIS_FIELD = new ParseField("assignment_memory_basis"); - public static final ParseField CATEGORIZED_DOC_COUNT_FIELD = new ParseField("categorized_doc_count"); - public static final ParseField TOTAL_CATEGORY_COUNT_FIELD = new ParseField("total_category_count"); - public static final ParseField FREQUENT_CATEGORY_COUNT_FIELD = new ParseField("frequent_category_count"); - public static final ParseField RARE_CATEGORY_COUNT_FIELD = new ParseField("rare_category_count"); - public static final ParseField DEAD_CATEGORY_COUNT_FIELD = new ParseField("dead_category_count"); - public static final ParseField FAILED_CATEGORY_COUNT_FIELD = new ParseField("failed_category_count"); - public static final ParseField CATEGORIZATION_STATUS_FIELD = new ParseField("categorization_status"); - public static final ParseField LOG_TIME_FIELD = new ParseField("log_time"); - public static final ParseField TIMESTAMP_FIELD = new ParseField("timestamp"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_VALUE, - true, - a -> new Builder((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareLong(Builder::setModelBytes, MODEL_BYTES_FIELD); - PARSER.declareLong(Builder::setPeakModelBytes, PEAK_MODEL_BYTES_FIELD); - PARSER.declareLong(Builder::setModelBytesExceeded, MODEL_BYTES_EXCEEDED_FIELD); - PARSER.declareLong(Builder::setModelBytesMemoryLimit, MODEL_BYTES_MEMORY_LIMIT_FIELD); - PARSER.declareLong(Builder::setBucketAllocationFailuresCount, BUCKET_ALLOCATION_FAILURES_COUNT_FIELD); - PARSER.declareLong(Builder::setTotalByFieldCount, TOTAL_BY_FIELD_COUNT_FIELD); - PARSER.declareLong(Builder::setTotalOverFieldCount, TOTAL_OVER_FIELD_COUNT_FIELD); - PARSER.declareLong(Builder::setTotalPartitionFieldCount, TOTAL_PARTITION_FIELD_COUNT_FIELD); - PARSER.declareField(Builder::setMemoryStatus, p -> MemoryStatus.fromString(p.text()), MEMORY_STATUS_FIELD, ValueType.STRING); - PARSER.declareField( - Builder::setAssignmentMemoryBasis, - p -> AssignmentMemoryBasis.fromString(p.text()), - ASSIGNMENT_MEMORY_BASIS_FIELD, - ValueType.STRING - ); - PARSER.declareLong(Builder::setCategorizedDocCount, CATEGORIZED_DOC_COUNT_FIELD); - PARSER.declareLong(Builder::setTotalCategoryCount, TOTAL_CATEGORY_COUNT_FIELD); - PARSER.declareLong(Builder::setFrequentCategoryCount, FREQUENT_CATEGORY_COUNT_FIELD); - PARSER.declareLong(Builder::setRareCategoryCount, RARE_CATEGORY_COUNT_FIELD); - PARSER.declareLong(Builder::setDeadCategoryCount, DEAD_CATEGORY_COUNT_FIELD); - PARSER.declareLong(Builder::setFailedCategoryCount, FAILED_CATEGORY_COUNT_FIELD); - PARSER.declareField( - Builder::setCategorizationStatus, - p -> CategorizationStatus.fromString(p.text()), - CATEGORIZATION_STATUS_FIELD, - ValueType.STRING - ); - PARSER.declareField( - Builder::setLogTime, - (p) -> TimeUtil.parseTimeField(p, LOG_TIME_FIELD.getPreferredName()), - LOG_TIME_FIELD, - ValueType.VALUE - ); - PARSER.declareField( - Builder::setTimestamp, - (p) -> TimeUtil.parseTimeField(p, TIMESTAMP_FIELD.getPreferredName()), - TIMESTAMP_FIELD, - ValueType.VALUE - ); - } - - /** - * The status of the memory monitored by the ResourceMonitor. OK is default, - * SOFT_LIMIT means that the models have done some aggressive pruning to - * keep the memory below the limit, and HARD_LIMIT means that samples have - * been dropped - */ - public enum MemoryStatus { - OK, - SOFT_LIMIT, - HARD_LIMIT; - - public static MemoryStatus fromString(String statusName) { - return valueOf(statusName.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - /** - * Where will we get the memory requirement from when assigning this job to - * a node? There are three possibilities: - * 1. The job's model_memory_limit - * 2. The current model memory, i.e. what's reported in model_bytes of this object - * 3. The peak model memory, i.e. what's reported in peak_model_bytes of this object - * The field storing this enum can also be null, which means the - * assignment code will decide on the fly - this was the old behaviour prior - * to 7.11. - */ - public enum AssignmentMemoryBasis { - MODEL_MEMORY_LIMIT, - CURRENT_MODEL_BYTES, - PEAK_MODEL_BYTES; - - public static AssignmentMemoryBasis fromString(String statusName) { - return valueOf(statusName.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - /** - * The status of categorization for a job. OK is default, WARN - * means that inappropriate numbers of categories are being found - */ - public enum CategorizationStatus { - OK, - WARN; - - public static CategorizationStatus fromString(String statusName) { - return valueOf(statusName.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - private final String jobId; - private final long modelBytes; - private final Long peakModelBytes; - private final Long modelBytesExceeded; - private final Long modelBytesMemoryLimit; - private final long totalByFieldCount; - private final long totalOverFieldCount; - private final long totalPartitionFieldCount; - private final long bucketAllocationFailuresCount; - private final MemoryStatus memoryStatus; - private final AssignmentMemoryBasis assignmentMemoryBasis; - private final long categorizedDocCount; - private final long totalCategoryCount; - private final long frequentCategoryCount; - private final long rareCategoryCount; - private final long deadCategoryCount; - private final long failedCategoryCount; - private final CategorizationStatus categorizationStatus; - private final Date timestamp; - private final Date logTime; - - private ModelSizeStats( - String jobId, - long modelBytes, - Long peakModelBytes, - Long modelBytesExceeded, - Long modelBytesMemoryLimit, - long totalByFieldCount, - long totalOverFieldCount, - long totalPartitionFieldCount, - long bucketAllocationFailuresCount, - MemoryStatus memoryStatus, - AssignmentMemoryBasis assignmentMemoryBasis, - long categorizedDocCount, - long totalCategoryCount, - long frequentCategoryCount, - long rareCategoryCount, - long deadCategoryCount, - long failedCategoryCount, - CategorizationStatus categorizationStatus, - Date timestamp, - Date logTime - ) { - this.jobId = jobId; - this.modelBytes = modelBytes; - this.peakModelBytes = peakModelBytes; - this.modelBytesExceeded = modelBytesExceeded; - this.modelBytesMemoryLimit = modelBytesMemoryLimit; - this.totalByFieldCount = totalByFieldCount; - this.totalOverFieldCount = totalOverFieldCount; - this.totalPartitionFieldCount = totalPartitionFieldCount; - this.bucketAllocationFailuresCount = bucketAllocationFailuresCount; - this.memoryStatus = memoryStatus; - this.assignmentMemoryBasis = assignmentMemoryBasis; - this.categorizedDocCount = categorizedDocCount; - this.totalCategoryCount = totalCategoryCount; - this.frequentCategoryCount = frequentCategoryCount; - this.rareCategoryCount = rareCategoryCount; - this.deadCategoryCount = deadCategoryCount; - this.failedCategoryCount = failedCategoryCount; - this.categorizationStatus = categorizationStatus; - this.timestamp = timestamp; - this.logTime = logTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.field(MODEL_BYTES_FIELD.getPreferredName(), modelBytes); - if (peakModelBytes != null) { - builder.field(PEAK_MODEL_BYTES_FIELD.getPreferredName(), peakModelBytes); - } - if (modelBytesExceeded != null) { - builder.field(MODEL_BYTES_EXCEEDED_FIELD.getPreferredName(), modelBytesExceeded); - } - if (modelBytesMemoryLimit != null) { - builder.field(MODEL_BYTES_MEMORY_LIMIT_FIELD.getPreferredName(), modelBytesMemoryLimit); - } - builder.field(TOTAL_BY_FIELD_COUNT_FIELD.getPreferredName(), totalByFieldCount); - builder.field(TOTAL_OVER_FIELD_COUNT_FIELD.getPreferredName(), totalOverFieldCount); - builder.field(TOTAL_PARTITION_FIELD_COUNT_FIELD.getPreferredName(), totalPartitionFieldCount); - builder.field(BUCKET_ALLOCATION_FAILURES_COUNT_FIELD.getPreferredName(), bucketAllocationFailuresCount); - builder.field(MEMORY_STATUS_FIELD.getPreferredName(), memoryStatus); - if (assignmentMemoryBasis != null) { - builder.field(ASSIGNMENT_MEMORY_BASIS_FIELD.getPreferredName(), assignmentMemoryBasis); - } - builder.field(CATEGORIZED_DOC_COUNT_FIELD.getPreferredName(), categorizedDocCount); - builder.field(TOTAL_CATEGORY_COUNT_FIELD.getPreferredName(), totalCategoryCount); - builder.field(FREQUENT_CATEGORY_COUNT_FIELD.getPreferredName(), frequentCategoryCount); - builder.field(RARE_CATEGORY_COUNT_FIELD.getPreferredName(), rareCategoryCount); - builder.field(DEAD_CATEGORY_COUNT_FIELD.getPreferredName(), deadCategoryCount); - builder.field(FAILED_CATEGORY_COUNT_FIELD.getPreferredName(), failedCategoryCount); - builder.field(CATEGORIZATION_STATUS_FIELD.getPreferredName(), categorizationStatus); - builder.timeField(LOG_TIME_FIELD.getPreferredName(), LOG_TIME_FIELD.getPreferredName() + "_string", logTime.getTime()); - if (timestamp != null) { - builder.timeField(TIMESTAMP_FIELD.getPreferredName(), TIMESTAMP_FIELD.getPreferredName() + "_string", timestamp.getTime()); - } - - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public long getModelBytes() { - return modelBytes; - } - - public Long getPeakModelBytes() { - return peakModelBytes; - } - - public Long getModelBytesExceeded() { - return modelBytesExceeded; - } - - public Long getModelBytesMemoryLimit() { - return modelBytesMemoryLimit; - } - - public long getTotalByFieldCount() { - return totalByFieldCount; - } - - public long getTotalPartitionFieldCount() { - return totalPartitionFieldCount; - } - - public long getTotalOverFieldCount() { - return totalOverFieldCount; - } - - public long getBucketAllocationFailuresCount() { - return bucketAllocationFailuresCount; - } - - public MemoryStatus getMemoryStatus() { - return memoryStatus; - } - - @Nullable - public AssignmentMemoryBasis getAssignmentMemoryBasis() { - return assignmentMemoryBasis; - } - - public long getCategorizedDocCount() { - return categorizedDocCount; - } - - public long getTotalCategoryCount() { - return totalCategoryCount; - } - - public long getFrequentCategoryCount() { - return frequentCategoryCount; - } - - public long getRareCategoryCount() { - return rareCategoryCount; - } - - public long getDeadCategoryCount() { - return deadCategoryCount; - } - - public long getFailedCategoryCount() { - return failedCategoryCount; - } - - public CategorizationStatus getCategorizationStatus() { - return categorizationStatus; - } - - /** - * The timestamp of the last processed record when this instance was created. - * - * @return The record time - */ - public Date getTimestamp() { - return timestamp; - } - - /** - * The wall clock time at the point when this instance was created. - * - * @return The wall clock time - */ - public Date getLogTime() { - return logTime; - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - modelBytes, - peakModelBytes, - modelBytesExceeded, - modelBytesMemoryLimit, - totalByFieldCount, - totalOverFieldCount, - totalPartitionFieldCount, - this.bucketAllocationFailuresCount, - memoryStatus, - assignmentMemoryBasis, - categorizedDocCount, - totalCategoryCount, - frequentCategoryCount, - rareCategoryCount, - deadCategoryCount, - failedCategoryCount, - categorizationStatus, - timestamp, - logTime - ); - } - - /** - * Compare all the fields. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - ModelSizeStats that = (ModelSizeStats) other; - - return this.modelBytes == that.modelBytes - && Objects.equals(this.peakModelBytes, that.peakModelBytes) - && Objects.equals(this.modelBytesExceeded, that.modelBytesExceeded) - && Objects.equals(this.modelBytesMemoryLimit, that.modelBytesMemoryLimit) - && this.totalByFieldCount == that.totalByFieldCount - && this.totalOverFieldCount == that.totalOverFieldCount - && this.totalPartitionFieldCount == that.totalPartitionFieldCount - && this.bucketAllocationFailuresCount == that.bucketAllocationFailuresCount - && Objects.equals(this.memoryStatus, that.memoryStatus) - && Objects.equals(this.assignmentMemoryBasis, that.assignmentMemoryBasis) - && this.categorizedDocCount == that.categorizedDocCount - && this.totalCategoryCount == that.totalCategoryCount - && this.frequentCategoryCount == that.frequentCategoryCount - && this.rareCategoryCount == that.rareCategoryCount - && this.deadCategoryCount == that.deadCategoryCount - && this.failedCategoryCount == that.failedCategoryCount - && Objects.equals(this.categorizationStatus, that.categorizationStatus) - && Objects.equals(this.timestamp, that.timestamp) - && Objects.equals(this.logTime, that.logTime) - && Objects.equals(this.jobId, that.jobId); - } - - public static class Builder { - - private final String jobId; - private long modelBytes; - private Long peakModelBytes; - private Long modelBytesExceeded; - private Long modelBytesMemoryLimit; - private long totalByFieldCount; - private long totalOverFieldCount; - private long totalPartitionFieldCount; - private long bucketAllocationFailuresCount; - private MemoryStatus memoryStatus; - private AssignmentMemoryBasis assignmentMemoryBasis; - private long categorizedDocCount; - private long totalCategoryCount; - private long frequentCategoryCount; - private long rareCategoryCount; - private long deadCategoryCount; - private long failedCategoryCount; - private CategorizationStatus categorizationStatus; - private Date timestamp; - private Date logTime; - - public Builder(String jobId) { - this.jobId = jobId; - memoryStatus = MemoryStatus.OK; - categorizationStatus = CategorizationStatus.OK; - logTime = new Date(); - } - - public Builder(ModelSizeStats modelSizeStats) { - this.jobId = modelSizeStats.jobId; - this.modelBytes = modelSizeStats.modelBytes; - this.peakModelBytes = modelSizeStats.peakModelBytes; - this.modelBytesExceeded = modelSizeStats.modelBytesExceeded; - this.modelBytesMemoryLimit = modelSizeStats.modelBytesMemoryLimit; - this.totalByFieldCount = modelSizeStats.totalByFieldCount; - this.totalOverFieldCount = modelSizeStats.totalOverFieldCount; - this.totalPartitionFieldCount = modelSizeStats.totalPartitionFieldCount; - this.bucketAllocationFailuresCount = modelSizeStats.bucketAllocationFailuresCount; - this.memoryStatus = modelSizeStats.memoryStatus; - this.assignmentMemoryBasis = modelSizeStats.assignmentMemoryBasis; - this.categorizedDocCount = modelSizeStats.categorizedDocCount; - this.totalCategoryCount = modelSizeStats.totalCategoryCount; - this.frequentCategoryCount = modelSizeStats.frequentCategoryCount; - this.rareCategoryCount = modelSizeStats.rareCategoryCount; - this.deadCategoryCount = modelSizeStats.deadCategoryCount; - this.failedCategoryCount = modelSizeStats.failedCategoryCount; - this.categorizationStatus = modelSizeStats.categorizationStatus; - this.timestamp = modelSizeStats.timestamp; - this.logTime = modelSizeStats.logTime; - } - - public Builder setModelBytes(long modelBytes) { - this.modelBytes = modelBytes; - return this; - } - - public Builder setPeakModelBytes(long peakModelBytes) { - this.peakModelBytes = peakModelBytes; - return this; - } - - public Builder setModelBytesExceeded(long modelBytesExceeded) { - this.modelBytesExceeded = modelBytesExceeded; - return this; - } - - public Builder setModelBytesMemoryLimit(long modelBytesMemoryLimit) { - this.modelBytesMemoryLimit = modelBytesMemoryLimit; - return this; - } - - public Builder setTotalByFieldCount(long totalByFieldCount) { - this.totalByFieldCount = totalByFieldCount; - return this; - } - - public Builder setTotalPartitionFieldCount(long totalPartitionFieldCount) { - this.totalPartitionFieldCount = totalPartitionFieldCount; - return this; - } - - public Builder setTotalOverFieldCount(long totalOverFieldCount) { - this.totalOverFieldCount = totalOverFieldCount; - return this; - } - - public Builder setBucketAllocationFailuresCount(long bucketAllocationFailuresCount) { - this.bucketAllocationFailuresCount = bucketAllocationFailuresCount; - return this; - } - - public Builder setMemoryStatus(MemoryStatus memoryStatus) { - Objects.requireNonNull(memoryStatus, "[" + MEMORY_STATUS_FIELD.getPreferredName() + "] must not be null"); - this.memoryStatus = memoryStatus; - return this; - } - - public Builder setAssignmentMemoryBasis(AssignmentMemoryBasis assignmentMemoryBasis) { - this.assignmentMemoryBasis = assignmentMemoryBasis; - return this; - } - - public Builder setCategorizedDocCount(long categorizedDocCount) { - this.categorizedDocCount = categorizedDocCount; - return this; - } - - public Builder setTotalCategoryCount(long totalCategoryCount) { - this.totalCategoryCount = totalCategoryCount; - return this; - } - - public Builder setFrequentCategoryCount(long frequentCategoryCount) { - this.frequentCategoryCount = frequentCategoryCount; - return this; - } - - public Builder setRareCategoryCount(long rareCategoryCount) { - this.rareCategoryCount = rareCategoryCount; - return this; - } - - public Builder setDeadCategoryCount(long deadCategoryCount) { - this.deadCategoryCount = deadCategoryCount; - return this; - } - - public Builder setFailedCategoryCount(long failedCategoryCount) { - this.failedCategoryCount = failedCategoryCount; - return this; - } - - public Builder setCategorizationStatus(CategorizationStatus categorizationStatus) { - Objects.requireNonNull(categorizationStatus, "[" + CATEGORIZATION_STATUS_FIELD.getPreferredName() + "] must not be null"); - this.categorizationStatus = categorizationStatus; - return this; - } - - public Builder setTimestamp(Date timestamp) { - this.timestamp = timestamp; - return this; - } - - public Builder setLogTime(Date logTime) { - this.logTime = logTime; - return this; - } - - public ModelSizeStats build() { - return new ModelSizeStats( - jobId, - modelBytes, - peakModelBytes, - modelBytesExceeded, - modelBytesMemoryLimit, - totalByFieldCount, - totalOverFieldCount, - totalPartitionFieldCount, - bucketAllocationFailuresCount, - memoryStatus, - assignmentMemoryBasis, - categorizedDocCount, - totalCategoryCount, - frequentCategoryCount, - rareCategoryCount, - deadCategoryCount, - failedCategoryCount, - categorizationStatus, - timestamp, - logTime - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSnapshot.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSnapshot.java deleted file mode 100644 index e21b8f512a143..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSnapshot.java +++ /dev/null @@ -1,361 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.Version; -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -/** - * ModelSnapshot Result POJO - */ -public class ModelSnapshot implements ToXContentObject { - /** - * Field Names - */ - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField SNAPSHOT_DOC_COUNT = new ParseField("snapshot_doc_count"); - public static final ParseField LATEST_RECORD_TIME = new ParseField("latest_record_time_stamp"); - public static final ParseField LATEST_RESULT_TIME = new ParseField("latest_result_time_stamp"); - public static final ParseField QUANTILES = new ParseField("quantiles"); - public static final ParseField RETAIN = new ParseField("retain"); - public static final ParseField MIN_VERSION = new ParseField("min_version"); - public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id"); - - public static final ObjectParser PARSER = new ObjectParser<>("model_snapshot", true, Builder::new); - - static { - PARSER.declareString(Builder::setJobId, Job.ID); - PARSER.declareString(Builder::setMinVersion, MIN_VERSION); - PARSER.declareField( - Builder::setTimestamp, - (p) -> TimeUtil.parseTimeField(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareString(Builder::setDescription, DESCRIPTION); - PARSER.declareString(Builder::setSnapshotId, SNAPSHOT_ID); - PARSER.declareInt(Builder::setSnapshotDocCount, SNAPSHOT_DOC_COUNT); - PARSER.declareObject(Builder::setModelSizeStats, ModelSizeStats.PARSER, ModelSizeStats.RESULT_TYPE_FIELD); - PARSER.declareField( - Builder::setLatestRecordTimeStamp, - (p) -> TimeUtil.parseTimeField(p, LATEST_RECORD_TIME.getPreferredName()), - LATEST_RECORD_TIME, - ValueType.VALUE - ); - PARSER.declareField( - Builder::setLatestResultTimeStamp, - (p) -> TimeUtil.parseTimeField(p, LATEST_RESULT_TIME.getPreferredName()), - LATEST_RESULT_TIME, - ValueType.VALUE - ); - PARSER.declareObject(Builder::setQuantiles, Quantiles.PARSER, QUANTILES); - PARSER.declareBoolean(Builder::setRetain, RETAIN); - } - - private final String jobId; - - /** - * The minimum version a node should have to be able - * to read this model snapshot. - */ - private final Version minVersion; - - private final Date timestamp; - private final String description; - private final String snapshotId; - private final int snapshotDocCount; - private final ModelSizeStats modelSizeStats; - private final Date latestRecordTimeStamp; - private final Date latestResultTimeStamp; - private final Quantiles quantiles; - private final boolean retain; - - private ModelSnapshot( - String jobId, - Version minVersion, - Date timestamp, - String description, - String snapshotId, - int snapshotDocCount, - ModelSizeStats modelSizeStats, - Date latestRecordTimeStamp, - Date latestResultTimeStamp, - Quantiles quantiles, - boolean retain - ) { - this.jobId = jobId; - this.minVersion = minVersion; - this.timestamp = timestamp; - this.description = description; - this.snapshotId = snapshotId; - this.snapshotDocCount = snapshotDocCount; - this.modelSizeStats = modelSizeStats; - this.latestRecordTimeStamp = latestRecordTimeStamp; - this.latestResultTimeStamp = latestResultTimeStamp; - this.quantiles = quantiles; - this.retain = retain; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(MIN_VERSION.getPreferredName(), minVersion); - if (timestamp != null) { - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - } - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - if (snapshotId != null) { - builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId); - } - builder.field(SNAPSHOT_DOC_COUNT.getPreferredName(), snapshotDocCount); - if (modelSizeStats != null) { - builder.field(ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName(), modelSizeStats); - } - if (latestRecordTimeStamp != null) { - builder.timeField( - LATEST_RECORD_TIME.getPreferredName(), - LATEST_RECORD_TIME.getPreferredName() + "_string", - latestRecordTimeStamp.getTime() - ); - } - if (latestResultTimeStamp != null) { - builder.timeField( - LATEST_RESULT_TIME.getPreferredName(), - LATEST_RESULT_TIME.getPreferredName() + "_string", - latestResultTimeStamp.getTime() - ); - } - if (quantiles != null) { - builder.field(QUANTILES.getPreferredName(), quantiles); - } - builder.field(RETAIN.getPreferredName(), retain); - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public Version getMinVersion() { - return minVersion; - } - - public Date getTimestamp() { - return timestamp; - } - - public String getDescription() { - return description; - } - - public String getSnapshotId() { - return snapshotId; - } - - public int getSnapshotDocCount() { - return snapshotDocCount; - } - - public ModelSizeStats getModelSizeStats() { - return modelSizeStats; - } - - public Quantiles getQuantiles() { - return quantiles; - } - - public boolean getRetain() { - return retain; - } - - public Date getLatestRecordTimeStamp() { - return latestRecordTimeStamp; - } - - public Date getLatestResultTimeStamp() { - return latestResultTimeStamp; - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - minVersion, - timestamp, - description, - snapshotId, - quantiles, - snapshotDocCount, - modelSizeStats, - latestRecordTimeStamp, - latestResultTimeStamp, - retain - ); - } - - /** - * Compare all the fields. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - ModelSnapshot that = (ModelSnapshot) other; - - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.minVersion, that.minVersion) - && Objects.equals(this.timestamp, that.timestamp) - && Objects.equals(this.description, that.description) - && Objects.equals(this.snapshotId, that.snapshotId) - && this.snapshotDocCount == that.snapshotDocCount - && Objects.equals(this.modelSizeStats, that.modelSizeStats) - && Objects.equals(this.quantiles, that.quantiles) - && Objects.equals(this.latestRecordTimeStamp, that.latestRecordTimeStamp) - && Objects.equals(this.latestResultTimeStamp, that.latestResultTimeStamp) - && this.retain == that.retain; - } - - public static class Builder { - private String jobId; - - // Stored snapshot documents created prior to 6.3.0 will have no value for min_version. - private Version minVersion = Version.fromString("6.3.0"); - - private Date timestamp; - private String description; - private String snapshotId; - private int snapshotDocCount; - private ModelSizeStats modelSizeStats; - private Date latestRecordTimeStamp; - private Date latestResultTimeStamp; - private Quantiles quantiles; - private boolean retain; - - public Builder() {} - - public Builder(String jobId) { - this.jobId = jobId; - } - - public Builder(ModelSnapshot modelSnapshot) { - this.jobId = modelSnapshot.jobId; - this.timestamp = modelSnapshot.timestamp; - this.description = modelSnapshot.description; - this.snapshotId = modelSnapshot.snapshotId; - this.snapshotDocCount = modelSnapshot.snapshotDocCount; - this.modelSizeStats = modelSnapshot.modelSizeStats; - this.latestRecordTimeStamp = modelSnapshot.latestRecordTimeStamp; - this.latestResultTimeStamp = modelSnapshot.latestResultTimeStamp; - this.quantiles = modelSnapshot.quantiles; - this.retain = modelSnapshot.retain; - this.minVersion = modelSnapshot.minVersion; - } - - public Builder setJobId(String jobId) { - this.jobId = jobId; - return this; - } - - Builder setMinVersion(Version minVersion) { - this.minVersion = minVersion; - return this; - } - - Builder setMinVersion(String minVersion) { - this.minVersion = Version.fromString(minVersion); - return this; - } - - public Builder setTimestamp(Date timestamp) { - this.timestamp = timestamp; - return this; - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setSnapshotId(String snapshotId) { - this.snapshotId = snapshotId; - return this; - } - - public Builder setSnapshotDocCount(int snapshotDocCount) { - this.snapshotDocCount = snapshotDocCount; - return this; - } - - public Builder setModelSizeStats(ModelSizeStats.Builder modelSizeStats) { - this.modelSizeStats = modelSizeStats.build(); - return this; - } - - public Builder setModelSizeStats(ModelSizeStats modelSizeStats) { - this.modelSizeStats = modelSizeStats; - return this; - } - - public Builder setLatestRecordTimeStamp(Date latestRecordTimeStamp) { - this.latestRecordTimeStamp = latestRecordTimeStamp; - return this; - } - - public Builder setLatestResultTimeStamp(Date latestResultTimeStamp) { - this.latestResultTimeStamp = latestResultTimeStamp; - return this; - } - - public Builder setQuantiles(Quantiles quantiles) { - this.quantiles = quantiles; - return this; - } - - public Builder setRetain(boolean value) { - this.retain = value; - return this; - } - - public ModelSnapshot build() { - return new ModelSnapshot( - jobId, - minVersion, - timestamp, - description, - snapshotId, - snapshotDocCount, - modelSizeStats, - latestRecordTimeStamp, - latestResultTimeStamp, - quantiles, - retain - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/Quantiles.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/Quantiles.java deleted file mode 100644 index 968447bcfa4dd..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/Quantiles.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -/** - * Quantiles Result POJO - */ -public class Quantiles implements ToXContentObject { - - /** - * Field Names - */ - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField QUANTILE_STATE = new ParseField("quantile_state"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "quantiles", - true, - a -> new Quantiles((String) a[0], (Date) a[1], (String) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> new Date(p.longValue()), TIMESTAMP, ValueType.LONG); - PARSER.declareString(ConstructingObjectParser.constructorArg(), QUANTILE_STATE); - } - - private final String jobId; - private final Date timestamp; - private final String quantileState; - - public Quantiles(String jobId, Date timestamp, String quantileState) { - this.jobId = jobId; - this.timestamp = Objects.requireNonNull(timestamp); - this.quantileState = Objects.requireNonNull(quantileState); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (timestamp != null) { - builder.field(TIMESTAMP.getPreferredName(), timestamp.getTime()); - } - if (quantileState != null) { - builder.field(QUANTILE_STATE.getPreferredName(), quantileState); - } - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public Date getTimestamp() { - return timestamp; - } - - public String getQuantileState() { - return quantileState; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, timestamp, quantileState); - } - - /** - * Compare all the fields. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - Quantiles that = (Quantiles) other; - - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.timestamp, that.timestamp) - && Objects.equals(this.quantileState, that.quantileState); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/TimingStats.java deleted file mode 100644 index 60ed9252affde..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/TimingStats.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Stats that give more insight into timing of various operations performed as part of anomaly detection job. - */ -public class TimingStats implements ToXContentObject { - - public static final ParseField BUCKET_COUNT = new ParseField("bucket_count"); - public static final ParseField TOTAL_BUCKET_PROCESSING_TIME_MS = new ParseField("total_bucket_processing_time_ms"); - public static final ParseField MIN_BUCKET_PROCESSING_TIME_MS = new ParseField("minimum_bucket_processing_time_ms"); - public static final ParseField MAX_BUCKET_PROCESSING_TIME_MS = new ParseField("maximum_bucket_processing_time_ms"); - public static final ParseField AVG_BUCKET_PROCESSING_TIME_MS = new ParseField("average_bucket_processing_time_ms"); - public static final ParseField EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_MS = new ParseField( - "exponential_average_bucket_processing_time_ms" - ); - public static final ParseField EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_PER_HOUR_MS = new ParseField( - "exponential_average_bucket_processing_time_per_hour_ms" - ); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("timing_stats", true, args -> { - String jobId = (String) args[0]; - Long bucketCount = (Long) args[1]; - Double totalBucketProcessingTimeMs = (Double) args[2]; - Double minBucketProcessingTimeMs = (Double) args[3]; - Double maxBucketProcessingTimeMs = (Double) args[4]; - Double avgBucketProcessingTimeMs = (Double) args[5]; - Double exponentialAvgBucketProcessingTimeMs = (Double) args[6]; - Double exponentialAvgBucketProcessingTimePerHourMs = (Double) args[7]; - return new TimingStats( - jobId, - getOrDefault(bucketCount, 0L), - getOrDefault(totalBucketProcessingTimeMs, 0.0), - minBucketProcessingTimeMs, - maxBucketProcessingTimeMs, - avgBucketProcessingTimeMs, - exponentialAvgBucketProcessingTimeMs, - exponentialAvgBucketProcessingTimePerHourMs - ); - }); - - static { - PARSER.declareString(constructorArg(), Job.ID); - PARSER.declareLong(optionalConstructorArg(), BUCKET_COUNT); - PARSER.declareDouble(optionalConstructorArg(), TOTAL_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), MIN_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), MAX_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), AVG_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_PER_HOUR_MS); - } - - private final String jobId; - private long bucketCount; - private double totalBucketProcessingTimeMs; - private Double minBucketProcessingTimeMs; - private Double maxBucketProcessingTimeMs; - private Double avgBucketProcessingTimeMs; - private Double exponentialAvgBucketProcessingTimeMs; - private Double exponentialAvgBucketProcessingTimePerHourMs; - - public TimingStats( - String jobId, - long bucketCount, - double totalBucketProcessingTimeMs, - @Nullable Double minBucketProcessingTimeMs, - @Nullable Double maxBucketProcessingTimeMs, - @Nullable Double avgBucketProcessingTimeMs, - @Nullable Double exponentialAvgBucketProcessingTimeMs, - @Nullable Double exponentialAvgBucketProcessingTimePerHourMs - ) { - this.jobId = jobId; - this.bucketCount = bucketCount; - this.totalBucketProcessingTimeMs = totalBucketProcessingTimeMs; - this.minBucketProcessingTimeMs = minBucketProcessingTimeMs; - this.maxBucketProcessingTimeMs = maxBucketProcessingTimeMs; - this.avgBucketProcessingTimeMs = avgBucketProcessingTimeMs; - this.exponentialAvgBucketProcessingTimeMs = exponentialAvgBucketProcessingTimeMs; - this.exponentialAvgBucketProcessingTimePerHourMs = exponentialAvgBucketProcessingTimePerHourMs; - } - - public String getJobId() { - return jobId; - } - - public long getBucketCount() { - return bucketCount; - } - - public double getTotalBucketProcessingTimeMs() { - return totalBucketProcessingTimeMs; - } - - public Double getMinBucketProcessingTimeMs() { - return minBucketProcessingTimeMs; - } - - public Double getMaxBucketProcessingTimeMs() { - return maxBucketProcessingTimeMs; - } - - public Double getAvgBucketProcessingTimeMs() { - return avgBucketProcessingTimeMs; - } - - public Double getExponentialAvgBucketProcessingTimeMs() { - return exponentialAvgBucketProcessingTimeMs; - } - - public Double getExponentialAvgBucketProcessingTimePerHourMs() { - return exponentialAvgBucketProcessingTimePerHourMs; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(BUCKET_COUNT.getPreferredName(), bucketCount); - builder.field(TOTAL_BUCKET_PROCESSING_TIME_MS.getPreferredName(), totalBucketProcessingTimeMs); - if (minBucketProcessingTimeMs != null) { - builder.field(MIN_BUCKET_PROCESSING_TIME_MS.getPreferredName(), minBucketProcessingTimeMs); - } - if (maxBucketProcessingTimeMs != null) { - builder.field(MAX_BUCKET_PROCESSING_TIME_MS.getPreferredName(), maxBucketProcessingTimeMs); - } - if (avgBucketProcessingTimeMs != null) { - builder.field(AVG_BUCKET_PROCESSING_TIME_MS.getPreferredName(), avgBucketProcessingTimeMs); - } - if (exponentialAvgBucketProcessingTimeMs != null) { - builder.field(EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_MS.getPreferredName(), exponentialAvgBucketProcessingTimeMs); - } - if (exponentialAvgBucketProcessingTimePerHourMs != null) { - builder.field( - EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_PER_HOUR_MS.getPreferredName(), - exponentialAvgBucketProcessingTimePerHourMs - ); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - TimingStats that = (TimingStats) o; - return Objects.equals(this.jobId, that.jobId) - && this.bucketCount == that.bucketCount - && this.totalBucketProcessingTimeMs == that.totalBucketProcessingTimeMs - && Objects.equals(this.minBucketProcessingTimeMs, that.minBucketProcessingTimeMs) - && Objects.equals(this.maxBucketProcessingTimeMs, that.maxBucketProcessingTimeMs) - && Objects.equals(this.avgBucketProcessingTimeMs, that.avgBucketProcessingTimeMs) - && Objects.equals(this.exponentialAvgBucketProcessingTimeMs, that.exponentialAvgBucketProcessingTimeMs) - && Objects.equals(this.exponentialAvgBucketProcessingTimePerHourMs, that.exponentialAvgBucketProcessingTimePerHourMs); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - bucketCount, - totalBucketProcessingTimeMs, - minBucketProcessingTimeMs, - maxBucketProcessingTimeMs, - avgBucketProcessingTimeMs, - exponentialAvgBucketProcessingTimeMs, - exponentialAvgBucketProcessingTimePerHourMs - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - private static T getOrDefault(@Nullable T value, T defaultValue) { - return value != null ? value : defaultValue; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyCause.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyCause.java deleted file mode 100644 index 5be75c52b19a6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyCause.java +++ /dev/null @@ -1,322 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.ml.job.config.DetectorFunction; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * Anomaly Cause POJO. - * Used as a nested level inside population anomaly records. - */ -public class AnomalyCause implements ToXContentObject { - - public static final ParseField ANOMALY_CAUSE = new ParseField("anomaly_cause"); - - /** - * Result fields - */ - public static final ParseField PROBABILITY = new ParseField("probability"); - public static final ParseField OVER_FIELD_NAME = new ParseField("over_field_name"); - public static final ParseField OVER_FIELD_VALUE = new ParseField("over_field_value"); - public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name"); - public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value"); - public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlated_by_field_value"); - public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name"); - public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value"); - public static final ParseField FUNCTION = new ParseField("function"); - public static final ParseField FUNCTION_DESCRIPTION = new ParseField("function_description"); - public static final ParseField TYPICAL = new ParseField("typical"); - public static final ParseField ACTUAL = new ParseField("actual"); - public static final ParseField INFLUENCERS = new ParseField("influencers"); - - /** - * Metric Results - */ - public static final ParseField FIELD_NAME = new ParseField("field_name"); - - public static final ObjectParser PARSER = new ObjectParser<>( - ANOMALY_CAUSE.getPreferredName(), - true, - AnomalyCause::new - ); - - static { - PARSER.declareDouble(AnomalyCause::setProbability, PROBABILITY); - PARSER.declareString(AnomalyCause::setByFieldName, BY_FIELD_NAME); - PARSER.declareString(AnomalyCause::setByFieldValue, BY_FIELD_VALUE); - PARSER.declareString(AnomalyCause::setCorrelatedByFieldValue, CORRELATED_BY_FIELD_VALUE); - PARSER.declareString(AnomalyCause::setPartitionFieldName, PARTITION_FIELD_NAME); - PARSER.declareString(AnomalyCause::setPartitionFieldValue, PARTITION_FIELD_VALUE); - PARSER.declareString(AnomalyCause::setFunction, FUNCTION); - PARSER.declareString(AnomalyCause::setFunctionDescription, FUNCTION_DESCRIPTION); - PARSER.declareDoubleArray(AnomalyCause::setTypical, TYPICAL); - PARSER.declareDoubleArray(AnomalyCause::setActual, ACTUAL); - PARSER.declareString(AnomalyCause::setFieldName, FIELD_NAME); - PARSER.declareString(AnomalyCause::setOverFieldName, OVER_FIELD_NAME); - PARSER.declareString(AnomalyCause::setOverFieldValue, OVER_FIELD_VALUE); - PARSER.declareObjectArray(AnomalyCause::setInfluencers, Influence.PARSER, INFLUENCERS); - } - - private double probability; - private String byFieldName; - private String byFieldValue; - private String correlatedByFieldValue; - private String partitionFieldName; - private String partitionFieldValue; - private String function; - private String functionDescription; - private List typical; - private List actual; - private String fieldName; - private String overFieldName; - private String overFieldValue; - - private List influencers; - - AnomalyCause() {} - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(PROBABILITY.getPreferredName(), probability); - if (byFieldName != null) { - builder.field(BY_FIELD_NAME.getPreferredName(), byFieldName); - } - if (byFieldValue != null) { - builder.field(BY_FIELD_VALUE.getPreferredName(), byFieldValue); - } - if (correlatedByFieldValue != null) { - builder.field(CORRELATED_BY_FIELD_VALUE.getPreferredName(), correlatedByFieldValue); - } - if (partitionFieldName != null) { - builder.field(PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName); - } - if (partitionFieldValue != null) { - builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue); - } - if (function != null) { - builder.field(FUNCTION.getPreferredName(), function); - } - if (functionDescription != null) { - builder.field(FUNCTION_DESCRIPTION.getPreferredName(), functionDescription); - } - if (typical != null) { - builder.field(TYPICAL.getPreferredName(), typical); - } - if (actual != null) { - builder.field(ACTUAL.getPreferredName(), actual); - } - if (fieldName != null) { - builder.field(FIELD_NAME.getPreferredName(), fieldName); - } - if (overFieldName != null) { - builder.field(OVER_FIELD_NAME.getPreferredName(), overFieldName); - } - if (overFieldValue != null) { - builder.field(OVER_FIELD_VALUE.getPreferredName(), overFieldValue); - } - if (influencers != null) { - builder.field(INFLUENCERS.getPreferredName(), influencers); - } - builder.endObject(); - return builder; - } - - public double getProbability() { - return probability; - } - - void setProbability(double value) { - probability = value; - } - - public String getByFieldName() { - return byFieldName; - } - - void setByFieldName(String value) { - byFieldName = value; - } - - public String getByFieldValue() { - return byFieldValue; - } - - void setByFieldValue(String value) { - byFieldValue = value; - } - - public String getCorrelatedByFieldValue() { - return correlatedByFieldValue; - } - - void setCorrelatedByFieldValue(String value) { - correlatedByFieldValue = value; - } - - public String getPartitionFieldName() { - return partitionFieldName; - } - - void setPartitionFieldName(String field) { - partitionFieldName = field; - } - - public String getPartitionFieldValue() { - return partitionFieldValue; - } - - void setPartitionFieldValue(String value) { - partitionFieldValue = value; - } - - public String getFunction() { - return function; - } - - void setFunction(String name) { - function = name; - } - - public String getFunctionDescription() { - return functionDescription; - } - - void setFunctionDescription(String functionDescription) { - this.functionDescription = functionDescription; - } - - public List getTypical() { - return typical; - } - - void setTypical(List typical) { - this.typical = Collections.unmodifiableList(typical); - } - - public List getActual() { - return actual; - } - - void setActual(List actual) { - this.actual = Collections.unmodifiableList(actual); - } - - public String getFieldName() { - return fieldName; - } - - void setFieldName(String field) { - fieldName = field; - } - - public String getOverFieldName() { - return overFieldName; - } - - void setOverFieldName(String name) { - overFieldName = name; - } - - public String getOverFieldValue() { - return overFieldValue; - } - - void setOverFieldValue(String value) { - overFieldValue = value; - } - - public List getInfluencers() { - return influencers; - } - - void setInfluencers(List influencers) { - this.influencers = Collections.unmodifiableList(influencers); - } - - @Nullable - public GeoPoint getTypicalGeoPoint() { - if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || typical == null) { - return null; - } - if (typical.size() == 2) { - return new GeoPoint(typical.get(0), typical.get(1)); - } - return null; - } - - @Nullable - public GeoPoint getActualGeoPoint() { - if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || actual == null) { - return null; - } - if (actual.size() == 2) { - return new GeoPoint(actual.get(0), actual.get(1)); - } - return null; - } - - @Override - public int hashCode() { - return Objects.hash( - probability, - actual, - typical, - byFieldName, - byFieldValue, - correlatedByFieldValue, - fieldName, - function, - functionDescription, - overFieldName, - overFieldValue, - partitionFieldName, - partitionFieldValue, - influencers - ); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - AnomalyCause that = (AnomalyCause) other; - - return this.probability == that.probability - && Objects.equals(this.typical, that.typical) - && Objects.equals(this.actual, that.actual) - && Objects.equals(this.function, that.function) - && Objects.equals(this.functionDescription, that.functionDescription) - && Objects.equals(this.fieldName, that.fieldName) - && Objects.equals(this.byFieldName, that.byFieldName) - && Objects.equals(this.byFieldValue, that.byFieldValue) - && Objects.equals(this.correlatedByFieldValue, that.correlatedByFieldValue) - && Objects.equals(this.partitionFieldName, that.partitionFieldName) - && Objects.equals(this.partitionFieldValue, that.partitionFieldValue) - && Objects.equals(this.overFieldName, that.overFieldName) - && Objects.equals(this.overFieldValue, that.overFieldValue) - && Objects.equals(this.influencers, that.influencers); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java deleted file mode 100644 index f3a93703a0275..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java +++ /dev/null @@ -1,476 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.DetectorFunction; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Objects; - -/** - * Anomaly Record POJO. - * Uses the object wrappers Boolean and Double so null values - * can be returned if the members have not been set. - */ -public class AnomalyRecord implements ToXContentObject { - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "record"; - - /** - * Result fields (all detector types) - */ - public static final ParseField PROBABILITY = new ParseField("probability"); - public static final ParseField MULTI_BUCKET_IMPACT = new ParseField("multi_bucket_impact"); - public static final ParseField DETECTOR_INDEX = new ParseField("detector_index"); - public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name"); - public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value"); - public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlated_by_field_value"); - public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name"); - public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value"); - public static final ParseField FUNCTION = new ParseField("function"); - public static final ParseField FUNCTION_DESCRIPTION = new ParseField("function_description"); - public static final ParseField TYPICAL = new ParseField("typical"); - public static final ParseField ACTUAL = new ParseField("actual"); - public static final ParseField INFLUENCERS = new ParseField("influencers"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("records"); - - /** - * Metric Results (including population metrics) - */ - public static final ParseField FIELD_NAME = new ParseField("field_name"); - - /** - * Population results - */ - public static final ParseField OVER_FIELD_NAME = new ParseField("over_field_name"); - public static final ParseField OVER_FIELD_VALUE = new ParseField("over_field_value"); - public static final ParseField CAUSES = new ParseField("causes"); - - /** - * Normalization - */ - public static final ParseField RECORD_SCORE = new ParseField("record_score"); - public static final ParseField INITIAL_RECORD_SCORE = new ParseField("initial_record_score"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_VALUE, - true, - a -> new AnomalyRecord((String) a[0], (Date) a[1], (long) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE); - PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY); - PARSER.declareDouble(AnomalyRecord::setMultiBucketImpact, MULTI_BUCKET_IMPACT); - PARSER.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE); - PARSER.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE); - PARSER.declareInt(AnomalyRecord::setDetectorIndex, DETECTOR_INDEX); - PARSER.declareBoolean(AnomalyRecord::setInterim, Result.IS_INTERIM); - PARSER.declareString(AnomalyRecord::setByFieldName, BY_FIELD_NAME); - PARSER.declareString(AnomalyRecord::setByFieldValue, BY_FIELD_VALUE); - PARSER.declareString(AnomalyRecord::setCorrelatedByFieldValue, CORRELATED_BY_FIELD_VALUE); - PARSER.declareString(AnomalyRecord::setPartitionFieldName, PARTITION_FIELD_NAME); - PARSER.declareString(AnomalyRecord::setPartitionFieldValue, PARTITION_FIELD_VALUE); - PARSER.declareString(AnomalyRecord::setFunction, FUNCTION); - PARSER.declareString(AnomalyRecord::setFunctionDescription, FUNCTION_DESCRIPTION); - PARSER.declareDoubleArray(AnomalyRecord::setTypical, TYPICAL); - PARSER.declareDoubleArray(AnomalyRecord::setActual, ACTUAL); - PARSER.declareString(AnomalyRecord::setFieldName, FIELD_NAME); - PARSER.declareString(AnomalyRecord::setOverFieldName, OVER_FIELD_NAME); - PARSER.declareString(AnomalyRecord::setOverFieldValue, OVER_FIELD_VALUE); - PARSER.declareObjectArray(AnomalyRecord::setCauses, AnomalyCause.PARSER, CAUSES); - PARSER.declareObjectArray(AnomalyRecord::setInfluencers, Influence.PARSER, INFLUENCERS); - } - - private final String jobId; - private int detectorIndex; - private double probability; - private Double multiBucketImpact; - private String byFieldName; - private String byFieldValue; - private String correlatedByFieldValue; - private String partitionFieldName; - private String partitionFieldValue; - private String function; - private String functionDescription; - private List typical; - private List actual; - private boolean isInterim; - - private String fieldName; - - private String overFieldName; - private String overFieldValue; - private List causes; - - private double recordScore; - - private double initialRecordScore; - - private final Date timestamp; - private final long bucketSpan; - - private List influences; - - AnomalyRecord(String jobId, Date timestamp, long bucketSpan) { - this.jobId = jobId; - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.field(PROBABILITY.getPreferredName(), probability); - if (multiBucketImpact != null) { - builder.field(MULTI_BUCKET_IMPACT.getPreferredName(), multiBucketImpact); - } - builder.field(RECORD_SCORE.getPreferredName(), recordScore); - builder.field(INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - if (byFieldName != null) { - builder.field(BY_FIELD_NAME.getPreferredName(), byFieldName); - } - if (byFieldValue != null) { - builder.field(BY_FIELD_VALUE.getPreferredName(), byFieldValue); - } - if (correlatedByFieldValue != null) { - builder.field(CORRELATED_BY_FIELD_VALUE.getPreferredName(), correlatedByFieldValue); - } - if (partitionFieldName != null) { - builder.field(PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName); - } - if (partitionFieldValue != null) { - builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue); - } - if (function != null) { - builder.field(FUNCTION.getPreferredName(), function); - } - if (functionDescription != null) { - builder.field(FUNCTION_DESCRIPTION.getPreferredName(), functionDescription); - } - if (typical != null) { - builder.field(TYPICAL.getPreferredName(), typical); - } - if (actual != null) { - builder.field(ACTUAL.getPreferredName(), actual); - } - if (fieldName != null) { - builder.field(FIELD_NAME.getPreferredName(), fieldName); - } - if (overFieldName != null) { - builder.field(OVER_FIELD_NAME.getPreferredName(), overFieldName); - } - if (overFieldValue != null) { - builder.field(OVER_FIELD_VALUE.getPreferredName(), overFieldValue); - } - if (causes != null) { - builder.field(CAUSES.getPreferredName(), causes); - } - if (influences != null) { - builder.field(INFLUENCERS.getPreferredName(), influences); - } - builder.endObject(); - return builder; - } - - public String getJobId() { - return this.jobId; - } - - public int getDetectorIndex() { - return detectorIndex; - } - - void setDetectorIndex(int detectorIndex) { - this.detectorIndex = detectorIndex; - } - - public double getRecordScore() { - return recordScore; - } - - void setRecordScore(double recordScore) { - this.recordScore = recordScore; - } - - public double getInitialRecordScore() { - return initialRecordScore; - } - - void setInitialRecordScore(double initialRecordScore) { - this.initialRecordScore = initialRecordScore; - } - - public Date getTimestamp() { - return timestamp; - } - - /** - * Bucketspan expressed in seconds - */ - public long getBucketSpan() { - return bucketSpan; - } - - public double getProbability() { - return probability; - } - - void setProbability(double value) { - probability = value; - } - - public double getMultiBucketImpact() { - return multiBucketImpact; - } - - void setMultiBucketImpact(double value) { - multiBucketImpact = value; - } - - public String getByFieldName() { - return byFieldName; - } - - void setByFieldName(String value) { - byFieldName = value; - } - - public String getByFieldValue() { - return byFieldValue; - } - - void setByFieldValue(String value) { - byFieldValue = value; - } - - public String getCorrelatedByFieldValue() { - return correlatedByFieldValue; - } - - void setCorrelatedByFieldValue(String value) { - correlatedByFieldValue = value; - } - - public String getPartitionFieldName() { - return partitionFieldName; - } - - void setPartitionFieldName(String field) { - partitionFieldName = field; - } - - public String getPartitionFieldValue() { - return partitionFieldValue; - } - - void setPartitionFieldValue(String value) { - partitionFieldValue = value; - } - - public String getFunction() { - return function; - } - - void setFunction(String name) { - function = name; - } - - public String getFunctionDescription() { - return functionDescription; - } - - void setFunctionDescription(String functionDescription) { - this.functionDescription = functionDescription; - } - - public List getTypical() { - return typical; - } - - void setTypical(List typical) { - this.typical = Collections.unmodifiableList(typical); - } - - public List getActual() { - return actual; - } - - void setActual(List actual) { - this.actual = Collections.unmodifiableList(actual); - } - - public boolean isInterim() { - return isInterim; - } - - void setInterim(boolean interim) { - this.isInterim = interim; - } - - public String getFieldName() { - return fieldName; - } - - void setFieldName(String field) { - fieldName = field; - } - - public String getOverFieldName() { - return overFieldName; - } - - void setOverFieldName(String name) { - overFieldName = name; - } - - public String getOverFieldValue() { - return overFieldValue; - } - - void setOverFieldValue(String value) { - overFieldValue = value; - } - - public List getCauses() { - return causes; - } - - void setCauses(List causes) { - this.causes = Collections.unmodifiableList(causes); - } - - public List getInfluencers() { - return influences; - } - - void setInfluencers(List influencers) { - this.influences = Collections.unmodifiableList(influencers); - } - - @Nullable - public GeoPoint getTypicalGeoPoint() { - if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || typical == null) { - return null; - } - if (typical.size() == 2) { - return new GeoPoint(typical.get(0), typical.get(1)); - } - return null; - } - - @Nullable - public GeoPoint getActualGeoPoint() { - if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || actual == null) { - return null; - } - if (actual.size() == 2) { - return new GeoPoint(actual.get(0), actual.get(1)); - } - return null; - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - detectorIndex, - bucketSpan, - probability, - multiBucketImpact, - recordScore, - initialRecordScore, - typical, - actual, - function, - functionDescription, - fieldName, - byFieldName, - byFieldValue, - correlatedByFieldValue, - partitionFieldName, - partitionFieldValue, - overFieldName, - overFieldValue, - timestamp, - isInterim, - causes, - influences, - jobId - ); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - AnomalyRecord that = (AnomalyRecord) other; - - return Objects.equals(this.jobId, that.jobId) - && this.detectorIndex == that.detectorIndex - && this.bucketSpan == that.bucketSpan - && this.probability == that.probability - && Objects.equals(this.multiBucketImpact, that.multiBucketImpact) - && this.recordScore == that.recordScore - && this.initialRecordScore == that.initialRecordScore - && Objects.deepEquals(this.typical, that.typical) - && Objects.deepEquals(this.actual, that.actual) - && Objects.equals(this.function, that.function) - && Objects.equals(this.functionDescription, that.functionDescription) - && Objects.equals(this.fieldName, that.fieldName) - && Objects.equals(this.byFieldName, that.byFieldName) - && Objects.equals(this.byFieldValue, that.byFieldValue) - && Objects.equals(this.correlatedByFieldValue, that.correlatedByFieldValue) - && Objects.equals(this.partitionFieldName, that.partitionFieldName) - && Objects.equals(this.partitionFieldValue, that.partitionFieldValue) - && Objects.equals(this.overFieldName, that.overFieldName) - && Objects.equals(this.overFieldValue, that.overFieldValue) - && Objects.equals(this.timestamp, that.timestamp) - && Objects.equals(this.isInterim, that.isInterim) - && Objects.equals(this.causes, that.causes) - && Objects.equals(this.influences, that.influences); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java deleted file mode 100644 index 8d74effaac390..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java +++ /dev/null @@ -1,249 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Objects; - -/** - * Bucket Result POJO - */ -public class Bucket implements ToXContentObject { - - public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score"); - public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score"); - public static final ParseField EVENT_COUNT = new ParseField("event_count"); - public static final ParseField RECORDS = new ParseField("records"); - public static final ParseField BUCKET_INFLUENCERS = new ParseField("bucket_influencers"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField PROCESSING_TIME_MS = new ParseField("processing_time_ms"); - public static final ParseField SCHEDULED_EVENTS = new ParseField("scheduled_events"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("buckets"); - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "bucket"; - public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_VALUE, - true, - a -> new Bucket((String) a[0], (Date) a[1], (long) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareDouble(Bucket::setAnomalyScore, ANOMALY_SCORE); - PARSER.declareDouble(Bucket::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE); - PARSER.declareBoolean(Bucket::setInterim, Result.IS_INTERIM); - PARSER.declareLong(Bucket::setEventCount, EVENT_COUNT); - PARSER.declareObjectArray(Bucket::setRecords, AnomalyRecord.PARSER, RECORDS); - PARSER.declareObjectArray(Bucket::setBucketInfluencers, BucketInfluencer.PARSER, BUCKET_INFLUENCERS); - PARSER.declareLong(Bucket::setProcessingTimeMs, PROCESSING_TIME_MS); - PARSER.declareString((bucket, s) -> {}, Result.RESULT_TYPE); - PARSER.declareStringArray(Bucket::setScheduledEvents, SCHEDULED_EVENTS); - } - - private final String jobId; - private final Date timestamp; - private final long bucketSpan; - private double anomalyScore; - private double initialAnomalyScore; - private List records = new ArrayList<>(); - private long eventCount; - private boolean isInterim; - private List bucketInfluencers = new ArrayList<>(); // Can't use emptyList as might be appended to - private long processingTimeMs; - private List scheduledEvents = Collections.emptyList(); - - Bucket(String jobId, Date timestamp, long bucketSpan) { - this.jobId = jobId; - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(INITIAL_ANOMALY_SCORE.getPreferredName(), initialAnomalyScore); - if (records.isEmpty() == false) { - builder.field(RECORDS.getPreferredName(), records); - } - builder.field(EVENT_COUNT.getPreferredName(), eventCount); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.field(BUCKET_INFLUENCERS.getPreferredName(), bucketInfluencers); - builder.field(PROCESSING_TIME_MS.getPreferredName(), processingTimeMs); - if (scheduledEvents.isEmpty() == false) { - builder.field(SCHEDULED_EVENTS.getPreferredName(), scheduledEvents); - } - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public Date getTimestamp() { - return timestamp; - } - - /** - * Bucketspan expressed in seconds - */ - public long getBucketSpan() { - return bucketSpan; - } - - public double getAnomalyScore() { - return anomalyScore; - } - - void setAnomalyScore(double anomalyScore) { - this.anomalyScore = anomalyScore; - } - - public double getInitialAnomalyScore() { - return initialAnomalyScore; - } - - void setInitialAnomalyScore(double initialAnomalyScore) { - this.initialAnomalyScore = initialAnomalyScore; - } - - /** - * Get all the anomaly records associated with this bucket. - * The records are not part of the bucket document. They will - * only be present when the bucket was retrieved and expanded - * to contain the associated records. - * - * @return the anomaly records for the bucket IF the bucket was expanded. - */ - public List getRecords() { - return records; - } - - void setRecords(List records) { - this.records = Collections.unmodifiableList(records); - } - - /** - * The number of records (events) actually processed in this bucket. - */ - public long getEventCount() { - return eventCount; - } - - void setEventCount(long value) { - eventCount = value; - } - - public boolean isInterim() { - return isInterim; - } - - void setInterim(boolean interim) { - this.isInterim = interim; - } - - public long getProcessingTimeMs() { - return processingTimeMs; - } - - void setProcessingTimeMs(long timeMs) { - processingTimeMs = timeMs; - } - - public List getBucketInfluencers() { - return bucketInfluencers; - } - - void setBucketInfluencers(List bucketInfluencers) { - this.bucketInfluencers = Collections.unmodifiableList(bucketInfluencers); - } - - public List getScheduledEvents() { - return scheduledEvents; - } - - void setScheduledEvents(List scheduledEvents) { - this.scheduledEvents = Collections.unmodifiableList(scheduledEvents); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - timestamp, - eventCount, - initialAnomalyScore, - anomalyScore, - records, - isInterim, - bucketSpan, - bucketInfluencers, - processingTimeMs, - scheduledEvents - ); - } - - /** - * Compare all the fields and embedded anomaly records (if any) - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - Bucket that = (Bucket) other; - - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.timestamp, that.timestamp) - && (this.eventCount == that.eventCount) - && (this.bucketSpan == that.bucketSpan) - && (this.anomalyScore == that.anomalyScore) - && (this.initialAnomalyScore == that.initialAnomalyScore) - && Objects.equals(this.records, that.records) - && Objects.equals(this.isInterim, that.isInterim) - && Objects.equals(this.bucketInfluencers, that.bucketInfluencers) - && (this.processingTimeMs == that.processingTimeMs) - && Objects.equals(this.scheduledEvents, that.scheduledEvents); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java deleted file mode 100644 index 62df14ce4e817..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -public class BucketInfluencer implements ToXContentObject { - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "bucket_influencer"; - public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); - - /** - * Field names - */ - public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name"); - public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score"); - public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score"); - public static final ParseField RAW_ANOMALY_SCORE = new ParseField("raw_anomaly_score"); - public static final ParseField PROBABILITY = new ParseField("probability"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_FIELD.getPreferredName(), - true, - a -> new BucketInfluencer((String) a[0], (Date) a[1], (long) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareString((bucketInfluencer, s) -> {}, Result.RESULT_TYPE); - PARSER.declareString(BucketInfluencer::setInfluencerFieldName, INFLUENCER_FIELD_NAME); - PARSER.declareDouble(BucketInfluencer::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE); - PARSER.declareDouble(BucketInfluencer::setAnomalyScore, ANOMALY_SCORE); - PARSER.declareDouble(BucketInfluencer::setRawAnomalyScore, RAW_ANOMALY_SCORE); - PARSER.declareDouble(BucketInfluencer::setProbability, PROBABILITY); - PARSER.declareBoolean(BucketInfluencer::setIsInterim, Result.IS_INTERIM); - } - - private final String jobId; - private String influenceField; - private double initialAnomalyScore; - private double anomalyScore; - private double rawAnomalyScore; - private double probability; - private boolean isInterim; - private final Date timestamp; - private final long bucketSpan; - - BucketInfluencer(String jobId, Date timestamp, long bucketSpan) { - this.jobId = jobId; - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - if (influenceField != null) { - builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField); - } - builder.field(INITIAL_ANOMALY_SCORE.getPreferredName(), initialAnomalyScore); - builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); - builder.field(RAW_ANOMALY_SCORE.getPreferredName(), rawAnomalyScore); - builder.field(PROBABILITY.getPreferredName(), probability); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public double getProbability() { - return probability; - } - - void setProbability(double probability) { - this.probability = probability; - } - - public String getInfluencerFieldName() { - return influenceField; - } - - void setInfluencerFieldName(String fieldName) { - this.influenceField = fieldName; - } - - public double getInitialAnomalyScore() { - return initialAnomalyScore; - } - - void setInitialAnomalyScore(double influenceScore) { - this.initialAnomalyScore = influenceScore; - } - - public double getAnomalyScore() { - return anomalyScore; - } - - void setAnomalyScore(double score) { - anomalyScore = score; - } - - public double getRawAnomalyScore() { - return rawAnomalyScore; - } - - void setRawAnomalyScore(double score) { - rawAnomalyScore = score; - } - - void setIsInterim(boolean isInterim) { - this.isInterim = isInterim; - } - - public boolean isInterim() { - return isInterim; - } - - public Date getTimestamp() { - return timestamp; - } - - @Override - public int hashCode() { - return Objects.hash( - influenceField, - initialAnomalyScore, - anomalyScore, - rawAnomalyScore, - probability, - isInterim, - timestamp, - jobId, - bucketSpan - ); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null) { - return false; - } - - if (getClass() != obj.getClass()) { - return false; - } - - BucketInfluencer other = (BucketInfluencer) obj; - - return Objects.equals(influenceField, other.influenceField) - && Double.compare(initialAnomalyScore, other.initialAnomalyScore) == 0 - && Double.compare(anomalyScore, other.anomalyScore) == 0 - && Double.compare(rawAnomalyScore, other.rawAnomalyScore) == 0 - && Double.compare(probability, other.probability) == 0 - && Objects.equals(isInterim, other.isInterim) - && Objects.equals(timestamp, other.timestamp) - && Objects.equals(jobId, other.jobId) - && bucketSpan == other.bucketSpan; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/CategoryDefinition.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/CategoryDefinition.java deleted file mode 100644 index 4b204d7279c38..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/CategoryDefinition.java +++ /dev/null @@ -1,232 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.Set; -import java.util.TreeSet; - -public class CategoryDefinition implements ToXContentObject { - - public static final ParseField TYPE = new ParseField("category_definition"); - - public static final ParseField CATEGORY_ID = new ParseField("category_id"); - public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name"); - public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value"); - public static final ParseField TERMS = new ParseField("terms"); - public static final ParseField REGEX = new ParseField("regex"); - public static final ParseField MAX_MATCHING_LENGTH = new ParseField("max_matching_length"); - public static final ParseField EXAMPLES = new ParseField("examples"); - public static final ParseField GROK_PATTERN = new ParseField("grok_pattern"); - public static final ParseField NUM_MATCHES = new ParseField("num_matches"); - public static final ParseField PREFERRED_TO_CATEGORIES = new ParseField("preferred_to_categories"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("categories"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - TYPE.getPreferredName(), - true, - a -> new CategoryDefinition((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareLong(CategoryDefinition::setCategoryId, CATEGORY_ID); - PARSER.declareString(CategoryDefinition::setPartitionFieldName, PARTITION_FIELD_NAME); - PARSER.declareString(CategoryDefinition::setPartitionFieldValue, PARTITION_FIELD_VALUE); - PARSER.declareString(CategoryDefinition::setTerms, TERMS); - PARSER.declareString(CategoryDefinition::setRegex, REGEX); - PARSER.declareLong(CategoryDefinition::setMaxMatchingLength, MAX_MATCHING_LENGTH); - PARSER.declareStringArray(CategoryDefinition::setExamples, EXAMPLES); - PARSER.declareString(CategoryDefinition::setGrokPattern, GROK_PATTERN); - PARSER.declareLong(CategoryDefinition::setNumMatches, NUM_MATCHES); - PARSER.declareLongArray(CategoryDefinition::setPreferredToCategories, PREFERRED_TO_CATEGORIES); - } - - private final String jobId; - private long categoryId = 0L; - private String partitionFieldName; - private String partitionFieldValue; - private String terms = ""; - private String regex = ""; - private long maxMatchingLength = 0L; - private final Set examples = new TreeSet<>(); - private String grokPattern; - private long numMatches = 0L; - private List preferredToCategories; - - CategoryDefinition(String jobId) { - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - public long getCategoryId() { - return categoryId; - } - - void setCategoryId(long categoryId) { - this.categoryId = categoryId; - } - - public String getPartitionFieldName() { - return partitionFieldName; - } - - public void setPartitionFieldName(String partitionFieldName) { - this.partitionFieldName = partitionFieldName; - } - - public String getPartitionFieldValue() { - return partitionFieldValue; - } - - public void setPartitionFieldValue(String partitionFieldValue) { - this.partitionFieldValue = partitionFieldValue; - } - - public String getTerms() { - return terms; - } - - void setTerms(String terms) { - this.terms = terms; - } - - public String getRegex() { - return regex; - } - - void setRegex(String regex) { - this.regex = regex; - } - - public long getMaxMatchingLength() { - return maxMatchingLength; - } - - void setMaxMatchingLength(long maxMatchingLength) { - this.maxMatchingLength = maxMatchingLength; - } - - public List getExamples() { - return new ArrayList<>(examples); - } - - void setExamples(Collection examples) { - this.examples.clear(); - this.examples.addAll(examples); - } - - void addExample(String example) { - examples.add(example); - } - - public String getGrokPattern() { - return grokPattern; - } - - void setGrokPattern(String grokPattern) { - this.grokPattern = grokPattern; - } - - public long getNumMatches() { - return numMatches; - } - - public void setNumMatches(long numMatches) { - this.numMatches = numMatches; - } - - public List getPreferredToCategories() { - return preferredToCategories; - } - - public void setPreferredToCategories(List preferredToCategories) { - this.preferredToCategories = Collections.unmodifiableList(preferredToCategories); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(CATEGORY_ID.getPreferredName(), categoryId); - if (partitionFieldName != null) { - builder.field(PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName); - } - if (partitionFieldValue != null) { - builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue); - } - builder.field(TERMS.getPreferredName(), terms); - builder.field(REGEX.getPreferredName(), regex); - builder.field(MAX_MATCHING_LENGTH.getPreferredName(), maxMatchingLength); - builder.field(EXAMPLES.getPreferredName(), examples); - if (grokPattern != null) { - builder.field(GROK_PATTERN.getPreferredName(), grokPattern); - } - builder.field(NUM_MATCHES.getPreferredName(), numMatches); - if (preferredToCategories != null && (preferredToCategories.isEmpty() == false)) { - builder.field(PREFERRED_TO_CATEGORIES.getPreferredName(), preferredToCategories); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - if (other == null || getClass() != other.getClass()) { - return false; - } - CategoryDefinition that = (CategoryDefinition) other; - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.categoryId, that.categoryId) - && Objects.equals(this.partitionFieldName, that.partitionFieldName) - && Objects.equals(this.partitionFieldValue, that.partitionFieldValue) - && Objects.equals(this.terms, that.terms) - && Objects.equals(this.regex, that.regex) - && Objects.equals(this.maxMatchingLength, that.maxMatchingLength) - && Objects.equals(this.examples, that.examples) - && Objects.equals(this.preferredToCategories, that.preferredToCategories) - && Objects.equals(this.numMatches, that.numMatches) - && Objects.equals(this.grokPattern, that.grokPattern); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - categoryId, - partitionFieldName, - partitionFieldValue, - terms, - regex, - maxMatchingLength, - examples, - preferredToCategories, - numMatches, - grokPattern - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influence.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influence.java deleted file mode 100644 index 0969b5983c75e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influence.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * Influence field name and list of influence field values/score pairs - */ -public class Influence implements ToXContentObject { - - /** - * Note all X-Content serialized field names are "influencer" not "influence" - */ - public static final ParseField INFLUENCER = new ParseField("influencer"); - public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name"); - public static final ParseField INFLUENCER_FIELD_VALUES = new ParseField("influencer_field_values"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - INFLUENCER.getPreferredName(), - true, - a -> new Influence((String) a[0], (List) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME); - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUES); - } - - private String field; - private List fieldValues; - - Influence(String field, List fieldValues) { - this.field = field; - this.fieldValues = Collections.unmodifiableList(fieldValues); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), field); - builder.field(INFLUENCER_FIELD_VALUES.getPreferredName(), fieldValues); - builder.endObject(); - return builder; - } - - public String getInfluencerFieldName() { - return field; - } - - public List getInfluencerFieldValues() { - return fieldValues; - } - - @Override - public int hashCode() { - return Objects.hash(field, fieldValues); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null) { - return false; - } - - if (getClass() != obj.getClass()) { - return false; - } - - Influence other = (Influence) obj; - return Objects.equals(field, other.field) && Objects.equals(fieldValues, other.fieldValues); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java deleted file mode 100644 index 46c7516b9853a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -public class Influencer implements ToXContentObject { - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "influencer"; - public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); - - /* - * Field names - */ - public static final ParseField PROBABILITY = new ParseField("probability"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name"); - public static final ParseField INFLUENCER_FIELD_VALUE = new ParseField("influencer_field_value"); - public static final ParseField INITIAL_INFLUENCER_SCORE = new ParseField("initial_influencer_score"); - public static final ParseField INFLUENCER_SCORE = new ParseField("influencer_score"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("influencers"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_FIELD.getPreferredName(), - true, - a -> new Influencer((String) a[0], (String) a[1], (String) a[2], (Date) a[3], (long) a[4]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME); - PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareString((influencer, s) -> {}, Result.RESULT_TYPE); - PARSER.declareDouble(Influencer::setProbability, PROBABILITY); - PARSER.declareDouble(Influencer::setInfluencerScore, INFLUENCER_SCORE); - PARSER.declareDouble(Influencer::setInitialInfluencerScore, INITIAL_INFLUENCER_SCORE); - PARSER.declareBoolean(Influencer::setInterim, Result.IS_INTERIM); - } - - private final String jobId; - private final Date timestamp; - private final long bucketSpan; - private String influenceField; - private String influenceValue; - private double probability; - private double initialInfluencerScore; - private double influencerScore; - private boolean isInterim; - - Influencer(String jobId, String fieldName, String fieldValue, Date timestamp, long bucketSpan) { - this.jobId = jobId; - influenceField = fieldName; - influenceValue = fieldValue; - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField); - builder.field(INFLUENCER_FIELD_VALUE.getPreferredName(), influenceValue); - builder.field(INFLUENCER_SCORE.getPreferredName(), influencerScore); - builder.field(INITIAL_INFLUENCER_SCORE.getPreferredName(), initialInfluencerScore); - builder.field(PROBABILITY.getPreferredName(), probability); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public double getProbability() { - return probability; - } - - void setProbability(double probability) { - this.probability = probability; - } - - public Date getTimestamp() { - return timestamp; - } - - public String getInfluencerFieldName() { - return influenceField; - } - - public String getInfluencerFieldValue() { - return influenceValue; - } - - public double getInitialInfluencerScore() { - return initialInfluencerScore; - } - - void setInitialInfluencerScore(double score) { - initialInfluencerScore = score; - } - - public double getInfluencerScore() { - return influencerScore; - } - - void setInfluencerScore(double score) { - influencerScore = score; - } - - public boolean isInterim() { - return isInterim; - } - - void setInterim(boolean value) { - isInterim = value; - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - timestamp, - influenceField, - influenceValue, - initialInfluencerScore, - influencerScore, - probability, - isInterim, - bucketSpan - ); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null) { - return false; - } - - if (getClass() != obj.getClass()) { - return false; - } - - Influencer other = (Influencer) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(timestamp, other.timestamp) - && Objects.equals(influenceField, other.influenceField) - && Objects.equals(influenceValue, other.influenceValue) - && Double.compare(initialInfluencerScore, other.initialInfluencerScore) == 0 - && Double.compare(influencerScore, other.influencerScore) == 0 - && Double.compare(probability, other.probability) == 0 - && (isInterim == other.isInterim) - && (bucketSpan == other.bucketSpan); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java deleted file mode 100644 index 9a6bb40682e6f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Objects; - -/** - * Overall Bucket Result POJO - */ -public class OverallBucket implements ToXContentObject { - - public static final ParseField OVERALL_SCORE = new ParseField("overall_score"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField JOBS = new ParseField("jobs"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("overall_buckets"); - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "overall_bucket"; - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_VALUE, - true, - a -> new OverallBucket((Date) a[0], (long) a[1], (double) a[2], (boolean) a[3]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), OVERALL_SCORE); - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), Result.IS_INTERIM); - PARSER.declareObjectArray(OverallBucket::setJobs, JobInfo.PARSER, JOBS); - } - - private final Date timestamp; - private final long bucketSpan; - private final double overallScore; - private final boolean isInterim; - private List jobs = Collections.emptyList(); - - OverallBucket(Date timestamp, long bucketSpan, double overallScore, boolean isInterim) { - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - this.overallScore = overallScore; - this.isInterim = isInterim; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(OVERALL_SCORE.getPreferredName(), overallScore); - builder.field(JOBS.getPreferredName(), jobs); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.endObject(); - return builder; - } - - public Date getTimestamp() { - return timestamp; - } - - /** - * Bucketspan expressed in seconds - */ - public long getBucketSpan() { - return bucketSpan; - } - - public double getOverallScore() { - return overallScore; - } - - public List getJobs() { - return jobs; - } - - void setJobs(List jobs) { - this.jobs = Collections.unmodifiableList(jobs); - } - - public boolean isInterim() { - return isInterim; - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, bucketSpan, overallScore, jobs, isInterim); - } - - /** - * Compare all the fields and embedded anomaly records (if any) - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - OverallBucket that = (OverallBucket) other; - - return Objects.equals(this.timestamp, that.timestamp) - && this.bucketSpan == that.bucketSpan - && this.overallScore == that.overallScore - && Objects.equals(this.jobs, that.jobs) - && this.isInterim == that.isInterim; - } - - public static class JobInfo implements ToXContentObject, Comparable { - - private static final ParseField MAX_ANOMALY_SCORE = new ParseField("max_anomaly_score"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "job_info", - true, - a -> new JobInfo((String) a[0], (double) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX_ANOMALY_SCORE); - } - - private final String jobId; - private final double maxAnomalyScore; - - JobInfo(String jobId, double maxAnomalyScore) { - this.jobId = Objects.requireNonNull(jobId); - this.maxAnomalyScore = maxAnomalyScore; - } - - public String getJobId() { - return jobId; - } - - public double getMaxAnomalyScore() { - return maxAnomalyScore; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(MAX_ANOMALY_SCORE.getPreferredName(), maxAnomalyScore); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, maxAnomalyScore); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - if (other == null || getClass() != other.getClass()) { - return false; - } - JobInfo that = (JobInfo) other; - return Objects.equals(this.jobId, that.jobId) && this.maxAnomalyScore == that.maxAnomalyScore; - } - - @Override - public int compareTo(JobInfo other) { - int result = this.jobId.compareTo(other.jobId); - if (result == 0) { - result = Double.compare(this.maxAnomalyScore, other.maxAnomalyScore); - } - return result; - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java deleted file mode 100644 index 6f5408bb2ae0a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.xcontent.ParseField; - -/** - * Contains common attributes for results. - */ -public final class Result { - - /** - * Serialisation fields - */ - public static final ParseField RESULT_TYPE = new ParseField("result_type"); - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField IS_INTERIM = new ParseField("is_interim"); - - private Result() {} -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java deleted file mode 100644 index 796cb18f3eb2d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.stats; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * A class to hold statistics about forecasts. - */ -public class ForecastStats implements ToXContentObject { - - public static final ParseField TOTAL = new ParseField("total"); - public static final ParseField FORECASTED_JOBS = new ParseField("forecasted_jobs"); - public static final ParseField MEMORY_BYTES = new ParseField("memory_bytes"); - public static final ParseField PROCESSING_TIME_MS = new ParseField("processing_time_ms"); - public static final ParseField RECORDS = new ParseField("records"); - public static final ParseField STATUS = new ParseField("status"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "forecast_stats", - true, - (a) -> { - int i = 0; - long total = (long) a[i++]; - SimpleStats memoryStats = (SimpleStats) a[i++]; - SimpleStats recordStats = (SimpleStats) a[i++]; - SimpleStats runtimeStats = (SimpleStats) a[i++]; - Map statusCounts = (Map) a[i]; - return new ForecastStats(total, memoryStats, recordStats, runtimeStats, statusCounts); - } - ); - - static { - PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, MEMORY_BYTES); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, RECORDS); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, PROCESSING_TIME_MS); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - Map counts = new HashMap<>(); - p.map().forEach((key, value) -> counts.put(key, ((Number) value).longValue())); - return counts; - }, STATUS, ObjectParser.ValueType.OBJECT); - } - - private final long total; - private final long forecastedJobs; - private SimpleStats memoryStats; - private SimpleStats recordStats; - private SimpleStats runtimeStats; - private Map statusCounts; - - public ForecastStats( - long total, - SimpleStats memoryStats, - SimpleStats recordStats, - SimpleStats runtimeStats, - Map statusCounts - ) { - this.total = total; - this.forecastedJobs = total > 0 ? 1 : 0; - if (total > 0) { - this.memoryStats = Objects.requireNonNull(memoryStats); - this.recordStats = Objects.requireNonNull(recordStats); - this.runtimeStats = Objects.requireNonNull(runtimeStats); - this.statusCounts = Collections.unmodifiableMap(statusCounts); - } - } - - /** - * The number of forecasts currently available for this model. - */ - public long getTotal() { - return total; - } - - /** - * The number of jobs that have at least one forecast. - */ - public long getForecastedJobs() { - return forecastedJobs; - } - - /** - * Statistics about the memory usage: minimum, maximum, average and total. - */ - public SimpleStats getMemoryStats() { - return memoryStats; - } - - /** - * Statistics about the number of forecast records: minimum, maximum, average and total. - */ - public SimpleStats getRecordStats() { - return recordStats; - } - - /** - * Statistics about the forecast runtime in milliseconds: minimum, maximum, average and total - */ - public SimpleStats getRuntimeStats() { - return runtimeStats; - } - - /** - * Counts per forecast status, for example: {"finished" : 2}. - */ - public Map getStatusCounts() { - return statusCounts; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(TOTAL.getPreferredName(), total); - builder.field(FORECASTED_JOBS.getPreferredName(), forecastedJobs); - - if (total > 0) { - builder.field(MEMORY_BYTES.getPreferredName(), memoryStats); - builder.field(RECORDS.getPreferredName(), recordStats); - builder.field(PROCESSING_TIME_MS.getPreferredName(), runtimeStats); - builder.field(STATUS.getPreferredName(), statusCounts); - } - return builder.endObject(); - } - - @Override - public int hashCode() { - return Objects.hash(total, forecastedJobs, memoryStats, recordStats, runtimeStats, statusCounts); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - ForecastStats other = (ForecastStats) obj; - return Objects.equals(total, other.total) - && Objects.equals(forecastedJobs, other.forecastedJobs) - && Objects.equals(memoryStats, other.memoryStats) - && Objects.equals(recordStats, other.recordStats) - && Objects.equals(runtimeStats, other.runtimeStats) - && Objects.equals(statusCounts, other.statusCounts); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java deleted file mode 100644 index abf2a278ba763..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.stats; - -import org.elasticsearch.client.ml.NodeAttributes; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.config.JobState; -import org.elasticsearch.client.ml.job.process.DataCounts; -import org.elasticsearch.client.ml.job.process.ModelSizeStats; -import org.elasticsearch.client.ml.job.process.TimingStats; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Class containing the statistics for a Machine Learning job. - * - */ -public class JobStats implements ToXContentObject { - - private static final ParseField DATA_COUNTS = new ParseField("data_counts"); - private static final ParseField MODEL_SIZE_STATS = new ParseField("model_size_stats"); - private static final ParseField TIMING_STATS = new ParseField("timing_stats"); - private static final ParseField FORECASTS_STATS = new ParseField("forecasts_stats"); - private static final ParseField STATE = new ParseField("state"); - private static final ParseField NODE = new ParseField("node"); - private static final ParseField OPEN_TIME = new ParseField("open_time"); - private static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("job_stats", true, (a) -> { - int i = 0; - String jobId = (String) a[i++]; - DataCounts dataCounts = (DataCounts) a[i++]; - JobState jobState = (JobState) a[i++]; - ModelSizeStats.Builder modelSizeStatsBuilder = (ModelSizeStats.Builder) a[i++]; - ModelSizeStats modelSizeStats = modelSizeStatsBuilder == null ? null : modelSizeStatsBuilder.build(); - TimingStats timingStats = (TimingStats) a[i++]; - ForecastStats forecastStats = (ForecastStats) a[i++]; - NodeAttributes node = (NodeAttributes) a[i++]; - String assignmentExplanation = (String) a[i++]; - TimeValue openTime = (TimeValue) a[i]; - return new JobStats(jobId, dataCounts, jobState, modelSizeStats, timingStats, forecastStats, node, assignmentExplanation, openTime); - }); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), DataCounts.PARSER, DATA_COUNTS); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> JobState.fromString(p.text()), - STATE, - ObjectParser.ValueType.VALUE - ); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ModelSizeStats.PARSER, MODEL_SIZE_STATS); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), TimingStats.PARSER, TIMING_STATS); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ForecastStats.PARSER, FORECASTS_STATS); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), NodeAttributes.PARSER, NODE); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ASSIGNMENT_EXPLANATION); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> TimeValue.parseTimeValue(p.textOrNull(), OPEN_TIME.getPreferredName()), - OPEN_TIME, - ObjectParser.ValueType.STRING_OR_NULL - ); - } - - private final String jobId; - private final DataCounts dataCounts; - private final JobState state; - private final ModelSizeStats modelSizeStats; - private final TimingStats timingStats; - private final ForecastStats forecastStats; - private final NodeAttributes node; - private final String assignmentExplanation; - private final TimeValue openTime; - - JobStats( - String jobId, - DataCounts dataCounts, - JobState state, - @Nullable ModelSizeStats modelSizeStats, - @Nullable TimingStats timingStats, - @Nullable ForecastStats forecastStats, - @Nullable NodeAttributes node, - @Nullable String assignmentExplanation, - @Nullable TimeValue openTime - ) { - this.jobId = Objects.requireNonNull(jobId); - this.dataCounts = Objects.requireNonNull(dataCounts); - this.state = Objects.requireNonNull(state); - this.modelSizeStats = modelSizeStats; - this.timingStats = timingStats; - this.forecastStats = forecastStats; - this.node = node; - this.assignmentExplanation = assignmentExplanation; - this.openTime = openTime; - } - - /** - * The jobId referencing the job for these statistics - */ - public String getJobId() { - return jobId; - } - - /** - * An object that describes the number of records processed and any related error counts - * See {@link DataCounts} - */ - public DataCounts getDataCounts() { - return dataCounts; - } - - /** - * An object that provides information about the size and contents of the model. - * See {@link ModelSizeStats} - */ - public ModelSizeStats getModelSizeStats() { - return modelSizeStats; - } - - public TimingStats getTimingStats() { - return timingStats; - } - - /** - * An object that provides statistical information about forecasts of this job. - * See {@link ForecastStats} - */ - public ForecastStats getForecastStats() { - return forecastStats; - } - - /** - * The status of the job - * See {@link JobState} - */ - public JobState getState() { - return state; - } - - /** - * For open jobs only, contains information about the node where the job runs - * See {@link NodeAttributes} - */ - public NodeAttributes getNode() { - return node; - } - - /** - * For open jobs only, contains messages relating to the selection of a node to run the job. - */ - public String getAssignmentExplanation() { - return assignmentExplanation; - } - - /** - * For open jobs only, the elapsed time for which the job has been open - */ - public TimeValue getOpenTime() { - return openTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(DATA_COUNTS.getPreferredName(), dataCounts); - builder.field(STATE.getPreferredName(), state.toString()); - if (modelSizeStats != null) { - builder.field(MODEL_SIZE_STATS.getPreferredName(), modelSizeStats); - } - if (timingStats != null) { - builder.field(TIMING_STATS.getPreferredName(), timingStats); - } - if (forecastStats != null) { - builder.field(FORECASTS_STATS.getPreferredName(), forecastStats); - } - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - if (assignmentExplanation != null) { - builder.field(ASSIGNMENT_EXPLANATION.getPreferredName(), assignmentExplanation); - } - if (openTime != null) { - builder.field(OPEN_TIME.getPreferredName(), openTime.getStringRep()); - } - return builder.endObject(); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, dataCounts, modelSizeStats, timingStats, forecastStats, state, node, assignmentExplanation, openTime); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - JobStats other = (JobStats) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(this.dataCounts, other.dataCounts) - && Objects.equals(this.modelSizeStats, other.modelSizeStats) - && Objects.equals(this.timingStats, other.timingStats) - && Objects.equals(this.forecastStats, other.forecastStats) - && Objects.equals(this.state, other.state) - && Objects.equals(this.node, other.node) - && Objects.equals(this.assignmentExplanation, other.assignmentExplanation) - && Objects.equals(this.openTime, other.openTime); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java deleted file mode 100644 index 01050d93b1a91..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.stats; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Helper class for min, max, avg and total statistics for a quantity - */ -public class SimpleStats implements ToXContentObject { - - public static final ParseField MIN = new ParseField("min"); - public static final ParseField MAX = new ParseField("max"); - public static final ParseField AVG = new ParseField("avg"); - public static final ParseField TOTAL = new ParseField("total"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("simple_stats", true, (a) -> { - int i = 0; - double total = (double) a[i++]; - double min = (double) a[i++]; - double max = (double) a[i++]; - double avg = (double) a[i++]; - return new SimpleStats(total, min, max, avg); - }); - - static { - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), TOTAL); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MIN); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AVG); - } - - private final double total; - private final double min; - private final double max; - private final double avg; - - SimpleStats(double total, double min, double max, double avg) { - this.total = total; - this.min = min; - this.max = max; - this.avg = avg; - } - - public double getMin() { - return min; - } - - public double getMax() { - return max; - } - - public double getAvg() { - return avg; - } - - public double getTotal() { - return total; - } - - @Override - public int hashCode() { - return Objects.hash(total, min, max, avg); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - SimpleStats other = (SimpleStats) obj; - return Objects.equals(total, other.total) - && Objects.equals(min, other.min) - && Objects.equals(avg, other.avg) - && Objects.equals(max, other.max); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(MIN.getPreferredName(), min); - builder.field(MAX.getPreferredName(), max); - builder.field(AVG.getPreferredName(), avg); - builder.field(TOTAL.getPreferredName(), total); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider b/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider index 9426b3d1bdde7..20b144ef1c562 100644 --- a/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider +++ b/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider @@ -1,6 +1,2 @@ org.elasticsearch.client.ilm.IndexLifecycleNamedXContentProvider -org.elasticsearch.client.ml.dataframe.MlDataFrameAnalysisNamedXContentProvider -org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider -org.elasticsearch.client.ml.dataframe.stats.AnalysisStatsNamedXContentProvider -org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider org.elasticsearch.client.transform.TransformNamedXContentProvider From ac3d0beaf00e3b9b6f95fc889b6783e627609705 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 15 Feb 2022 07:06:47 -0500 Subject: [PATCH 098/167] [ML] refactoring internal tokenization logic for NLP (#83835) This simplifies the internal logic used to pass tokenization results around while streamlining building the request sent to the model. This helps lay some of the ground work for windowing as collapsing request building && token results will be required (as a single sequence could result in a batch request). Additionally, many of the intellij warnings are addressed and code is modernized (i.e. taking advantage of records) --- .../deployment/DeploymentManager.java | 19 +- .../ml/inference/nlp/BertRequestBuilder.java | 71 ------- .../ml/inference/nlp/FillMaskProcessor.java | 38 ++-- .../ml/inference/nlp/MPNetRequestBuilder.java | 66 ------- .../xpack/ml/inference/nlp/NerProcessor.java | 49 ++--- .../xpack/ml/inference/nlp/NlpTask.java | 79 ++------ .../inference/nlp/PassThroughProcessor.java | 12 +- .../nlp/TextClassificationProcessor.java | 10 +- .../inference/nlp/TextEmbeddingProcessor.java | 10 +- .../nlp/ZeroShotClassificationProcessor.java | 48 +---- .../tokenizers/BertTokenizationResult.java | 118 ++++++++++++ .../nlp/tokenizers/BertTokenizer.java | 127 +++--------- .../nlp/tokenizers/CharSeqTokenTrieNode.java | 6 +- .../tokenizers/MPNetTokenizationResult.java | 78 ++++++++ .../nlp/tokenizers/MPNetTokenizer.java | 44 ++--- .../nlp/tokenizers/NlpTokenizer.java | 12 +- .../nlp/tokenizers/TokenizationResult.java | 181 +++++++++++------- .../nlp/tokenizers/WordPieceTokenFilter.java | 22 +-- .../results/PyTorchInferenceResult.java | 8 +- ....java => BertTokenizationResultTests.java} | 15 +- .../inference/nlp/FillMaskProcessorTests.java | 24 ++- ...java => MPNetTokenizationResultTests.java} | 15 +- .../ml/inference/nlp/NerProcessorTests.java | 8 +- .../nlp/TextClassificationProcessorTests.java | 2 +- .../ZeroShotClassificationProcessorTests.java | 2 +- .../nlp/tokenizers/BertTokenizerTests.java | 122 ++++++------ .../nlp/tokenizers/MPNetTokenizerTests.java | 18 +- 27 files changed, 542 insertions(+), 662 deletions(-) delete mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java delete mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilder.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizationResult.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizationResult.java rename x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/{BertRequestBuilderTests.java => BertTokenizationResultTests.java} (91%) rename x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/{MPNetRequestBuilderTests.java => MPNetTokenizationResultTests.java} (89%) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java index 34e7f6d0740e0..a8f744d7181e7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java @@ -29,6 +29,7 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; @@ -201,7 +202,11 @@ Vocabulary parseVocabularyDocLeniently(SearchHit hit) throws IOException { try ( InputStream stream = hit.getSourceRef().streamInput(); XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, stream) + .createParser( + XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry) + .withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), + stream + ) ) { return Vocabulary.createParser(true).apply(parser, null); } catch (IOException e) { @@ -374,8 +379,8 @@ protected void doRun() throws Exception { NlpConfig nlpConfig = (NlpConfig) config; NlpTask.Request request = processor.getRequestBuilder(nlpConfig) .buildRequest(text, requestIdStr, nlpConfig.getTokenization().getTruncate()); - logger.debug(() -> "Inference Request " + request.processInput.utf8ToString()); - if (request.tokenization.anyTruncated()) { + logger.debug(() -> "Inference Request " + request.processInput().utf8ToString()); + if (request.tokenization().anyTruncated()) { logger.debug("[{}] [{}] input truncated", modelId, requestId); } processContext.getResultProcessor() @@ -385,14 +390,14 @@ protected void doRun() throws Exception { inferenceResult -> processResult( inferenceResult, processContext, - request.tokenization, + request.tokenization(), processor.getResultProcessor((NlpConfig) config), this ), this::onFailure ) ); - processContext.process.get().writeInferenceRequest(request.processInput); + processContext.process.get().writeInferenceRequest(request.processInput()); } catch (IOException e) { logger.error(new ParameterizedMessage("[{}] error writing to inference process", processContext.task.getModelId()), e); onFailure(ExceptionsHelper.serverError("Error writing to inference process", e)); @@ -448,8 +453,8 @@ class ProcessContext { private volatile Instant startTime; private volatile Integer inferenceThreads; private volatile Integer modelThreads; - private AtomicInteger rejectedExecutionCount = new AtomicInteger(); - private AtomicInteger timeoutCount = new AtomicInteger(); + private final AtomicInteger rejectedExecutionCount = new AtomicInteger(); + private final AtomicInteger timeoutCount = new AtomicInteger(); ProcessContext(TrainedModelDeploymentTask task, ExecutorService executorService) { this.task = Objects.requireNonNull(task); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java deleted file mode 100644 index 88a6b6b8739f4..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.nlp; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.NlpTokenizer; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; - -import java.io.IOException; -import java.util.List; -import java.util.stream.Collectors; - -public class BertRequestBuilder implements NlpTask.RequestBuilder { - - static final String REQUEST_ID = "request_id"; - static final String TOKENS = "tokens"; - static final String ARG1 = "arg_1"; - static final String ARG2 = "arg_2"; - static final String ARG3 = "arg_3"; - - private final NlpTokenizer tokenizer; - - public BertRequestBuilder(NlpTokenizer tokenizer) { - this.tokenizer = tokenizer; - } - - @Override - public NlpTask.Request buildRequest(List inputs, String requestId, Tokenization.Truncate truncate) throws IOException { - if (tokenizer.getPadTokenId().isEmpty()) { - throw new IllegalStateException("The input tokenizer does not have a " + tokenizer.getPadToken() + " token in its vocabulary"); - } - - TokenizationResult tokenization = tokenizer.buildTokenizationResult( - inputs.stream().map(s -> tokenizer.tokenize(s, truncate)).collect(Collectors.toList()) - ); - return buildRequest(tokenization, requestId); - } - - @Override - public NlpTask.Request buildRequest(TokenizationResult tokenization, String requestId) throws IOException { - if (tokenizer.getPadTokenId().isEmpty()) { - throw new IllegalStateException("The input tokenizer does not have a " + tokenizer.getPadToken() + " token in its vocabulary"); - } - return new NlpTask.Request(tokenization, jsonRequest(tokenization, tokenizer.getPadTokenId().getAsInt(), requestId)); - } - - static BytesReference jsonRequest(TokenizationResult tokenization, int padToken, String requestId) throws IOException { - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - builder.field(REQUEST_ID, requestId); - - NlpTask.RequestBuilder.writePaddedTokens(TOKENS, tokenization, padToken, (tokens, i) -> tokens.getTokenIds()[i], builder); - NlpTask.RequestBuilder.writePaddedTokens(ARG1, tokenization, padToken, (tokens, i) -> 1, builder); - int batchSize = tokenization.getTokenizations().size(); - NlpTask.RequestBuilder.writeNonPaddedArguments(ARG2, batchSize, tokenization.getLongestSequenceLength(), i -> 0, builder); - NlpTask.RequestBuilder.writeNonPaddedArguments(ARG3, batchSize, tokenization.getLongestSequenceLength(), i -> i, builder); - builder.endObject(); - - // BytesReference.bytes closes the builder - return BytesReference.bytes(builder); - } - -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java index db79d24e35821..2ce2fc1ea471d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java @@ -23,20 +23,14 @@ import java.util.ArrayList; import java.util.List; import java.util.Optional; +import java.util.OptionalInt; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class FillMaskProcessor implements NlpTask.Processor { - - private final NlpTokenizer tokenizer; +public class FillMaskProcessor extends NlpTask.Processor { FillMaskProcessor(NlpTokenizer tokenizer, FillMaskConfig config) { - this.tokenizer = tokenizer; - } - - @Override - public void close() { - tokenizer.close(); + super(tokenizer); } @Override @@ -97,7 +91,7 @@ static InferenceResults processResult( int numResults, String resultsField ) { - if (tokenization.getTokenizations().isEmpty() || tokenization.getTokenizations().get(0).getTokenIds().length == 0) { + if (tokenization.isEmpty()) { throw new ElasticsearchStatusException("tokenization is empty", RestStatus.INTERNAL_SERVER_ERROR); } @@ -108,25 +102,20 @@ static InferenceResults processResult( ); } - int maskTokenIndex = -1; int maskTokenId = tokenizer.getMaskTokenId().getAsInt(); - for (int i = 0; i < tokenization.getTokenizations().get(0).getTokenIds().length; i++) { - if (tokenization.getTokenizations().get(0).getTokenIds()[i] == maskTokenId) { - maskTokenIndex = i; - break; - } - } - if (maskTokenIndex == -1) { + OptionalInt maskTokenIndex = tokenization.getTokenization(0).getTokenIndex(maskTokenId); + if (maskTokenIndex.isEmpty()) { throw new ElasticsearchStatusException( - "mask token id [{}] not found in the tokenization {}", + "mask token id [{}] not found in the tokenization", RestStatus.INTERNAL_SERVER_ERROR, - maskTokenId, - List.of(tokenization.getTokenizations().get(0).getTokenIds()) + maskTokenId ); } // TODO - process all results in the batch - double[] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax(pyTorchResult.getInferenceResult()[0][maskTokenIndex]); + double[] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax( + pyTorchResult.getInferenceResult()[0][maskTokenIndex.getAsInt()] + ); NlpHelpers.ScoreAndIndex[] scoreAndIndices = NlpHelpers.topK( // We need at least one to record the result @@ -142,10 +131,7 @@ static InferenceResults processResult( } return new FillMaskResults( tokenization.getFromVocab(scoreAndIndices[0].index), - tokenization.getTokenizations() - .get(0) - .getInput() - .replace(tokenizer.getMaskToken(), tokenization.getFromVocab(scoreAndIndices[0].index)), + tokenization.getTokenization(0).input().replace(tokenizer.getMaskToken(), tokenization.getFromVocab(scoreAndIndices[0].index)), results, Optional.ofNullable(resultsField).orElse(DEFAULT_RESULTS_FIELD), scoreAndIndices[0].score, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilder.java deleted file mode 100644 index f8ea5a513aa76..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilder.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.nlp; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.NlpTokenizer; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; - -import java.io.IOException; -import java.util.List; -import java.util.stream.Collectors; - -public class MPNetRequestBuilder implements NlpTask.RequestBuilder { - - static final String REQUEST_ID = "request_id"; - static final String TOKENS = "tokens"; - static final String ARG1 = "arg_1"; - - private final NlpTokenizer tokenizer; - - public MPNetRequestBuilder(NlpTokenizer tokenizer) { - this.tokenizer = tokenizer; - } - - @Override - public NlpTask.Request buildRequest(List inputs, String requestId, Tokenization.Truncate truncate) throws IOException { - if (tokenizer.getPadTokenId().isEmpty()) { - throw new IllegalStateException("The input tokenizer does not have a " + tokenizer.getPadToken() + " token in its vocabulary"); - } - - TokenizationResult tokenization = tokenizer.buildTokenizationResult( - inputs.stream().map(s -> tokenizer.tokenize(s, truncate)).collect(Collectors.toList()) - ); - return buildRequest(tokenization, requestId); - } - - @Override - public NlpTask.Request buildRequest(TokenizationResult tokenization, String requestId) throws IOException { - if (tokenizer.getPadTokenId().isEmpty()) { - throw new IllegalStateException("The input tokenizer does not have a " + tokenizer.getPadToken() + " token in its vocabulary"); - } - return new NlpTask.Request(tokenization, jsonRequest(tokenization, tokenizer.getPadTokenId().getAsInt(), requestId)); - } - - static BytesReference jsonRequest(TokenizationResult tokenization, int padToken, String requestId) throws IOException { - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - builder.field(REQUEST_ID, requestId); - - NlpTask.RequestBuilder.writePaddedTokens(TOKENS, tokenization, padToken, (tokens, i) -> tokens.getTokenIds()[i], builder); - NlpTask.RequestBuilder.writePaddedTokens(ARG1, tokenization, padToken, (tokens, i) -> 1, builder); - builder.endObject(); - - // BytesReference.bytes closes the builder - return BytesReference.bytes(builder); - } - -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java index 177bc387ea87a..1aa9ce8e6b0f6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java @@ -32,7 +32,7 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class NerProcessor implements NlpTask.Processor { +public class NerProcessor extends NlpTask.Processor { public enum Entity implements Writeable { NONE, @@ -83,20 +83,14 @@ boolean isBeginning() { private final IobTag[] iobMap; private final String resultsField; private final boolean ignoreCase; - private final NlpTokenizer tokenizer; NerProcessor(NlpTokenizer tokenizer, NerConfig config) { + super(tokenizer); validate(config.getClassificationLabels()); this.iobMap = buildIobMap(config.getClassificationLabels()); this.requestBuilder = tokenizer.requestBuilder(); this.resultsField = config.getResultsField(); this.ignoreCase = config.getTokenization().doLowerCase(); - this.tokenizer = tokenizer; - } - - @Override - public void close() { - tokenizer.close(); } /** @@ -188,11 +182,7 @@ static String buildAnnotatedText(String seq, List entiti return annotatedResultBuilder.toString(); } - static class NerResultProcessor implements NlpTask.ResultProcessor { - private final IobTag[] iobMap; - private final String resultsField; - private final boolean ignoreCase; - + record NerResultProcessor(IobTag[] iobMap, String resultsField, boolean ignoreCase) implements NlpTask.ResultProcessor { NerResultProcessor(IobTag[] iobMap, String resultsField, boolean ignoreCase) { this.iobMap = iobMap; this.resultsField = Optional.ofNullable(resultsField).orElse(DEFAULT_RESULTS_FIELD); @@ -201,7 +191,7 @@ static class NerResultProcessor implements NlpTask.ResultProcessor { @Override public InferenceResults processResult(TokenizationResult tokenization, PyTorchInferenceResult pyTorchResult) { - if (tokenization.getTokenizations().isEmpty() || tokenization.getTokenizations().get(0).getTokenIds().length == 0) { + if (tokenization.isEmpty()) { throw new ElasticsearchStatusException("no valid tokenization to build result", RestStatus.INTERNAL_SERVER_ERROR); } // TODO - process all results in the batch @@ -213,18 +203,16 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchIn // of maybe (1 + 0) / 2 = 0.5 while before softmax it'd be exp(10 - 5) / normalization // which could easily be close to 1. double[][] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax(pyTorchResult.getInferenceResult()[0]); - List taggedTokens = tagTokens(tokenization.getTokenizations().get(0), normalizedScores, iobMap); + List taggedTokens = tagTokens(tokenization.getTokenization(0), normalizedScores, iobMap); List entities = groupTaggedTokens( taggedTokens, - ignoreCase - ? tokenization.getTokenizations().get(0).getInput().toLowerCase(Locale.ROOT) - : tokenization.getTokenizations().get(0).getInput() + ignoreCase ? tokenization.getTokenization(0).input().toLowerCase(Locale.ROOT) : tokenization.getTokenization(0).input() ); return new NerResults( resultsField, - buildAnnotatedText(tokenization.getTokenizations().get(0).getInput(), entities), + buildAnnotatedText(tokenization.getTokenization(0).input(), entities), entities, tokenization.anyTruncated() ); @@ -236,12 +224,12 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchIn * in the original input replacing them with a single token that * gets labelled based on the average score of all its sub-tokens. */ - static List tagTokens(TokenizationResult.Tokenization tokenization, double[][] scores, IobTag[] iobMap) { + static List tagTokens(TokenizationResult.Tokens tokenization, double[][] scores, IobTag[] iobMap) { List taggedTokens = new ArrayList<>(); int startTokenIndex = 0; int numSpecialTokens = 0; - while (startTokenIndex < tokenization.getTokenIds().length) { - int inputMapping = tokenization.getTokenMap()[startTokenIndex]; + while (startTokenIndex < tokenization.tokenIds().length) { + int inputMapping = tokenization.tokenIds()[startTokenIndex]; if (inputMapping < 0) { // This token does not map to a token in the input (special tokens) startTokenIndex++; @@ -249,8 +237,7 @@ static List tagTokens(TokenizationResult.Tokenization tokenization, continue; } int endTokenIndex = startTokenIndex; - while (endTokenIndex < tokenization.getTokenMap().length - 1 - && tokenization.getTokenMap()[endTokenIndex + 1] == inputMapping) { + while (endTokenIndex < tokenization.tokenMap().length - 1 && tokenization.tokenMap()[endTokenIndex + 1] == inputMapping) { endTokenIndex++; } double[] avgScores = Arrays.copyOf(scores[startTokenIndex], iobMap.length); @@ -268,7 +255,7 @@ static List tagTokens(TokenizationResult.Tokenization tokenization, int maxScoreIndex = NlpHelpers.argmax(avgScores); double score = avgScores[maxScoreIndex]; taggedTokens.add( - new TaggedToken(tokenization.getTokens().get(startTokenIndex - numSpecialTokens), iobMap[maxScoreIndex], score) + new TaggedToken(tokenization.tokens().get(startTokenIndex - numSpecialTokens), iobMap[maxScoreIndex], score) ); startTokenIndex = endTokenIndex + 1; } @@ -325,17 +312,7 @@ static List groupTaggedTokens(List tokens, return entities; } - static class TaggedToken { - private final DelimitedToken token; - private final IobTag tag; - private final double score; - - TaggedToken(DelimitedToken token, IobTag tag, double score) { - this.token = token; - this.tag = tag; - this.score = score; - } - + record TaggedToken(DelimitedToken token, IobTag tag, double score) { @Override public String toString() { return new StringBuilder("{").append("token:") diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java index 4ee96e78db0a6..43fa0d8a2488d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Releasable; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; import org.elasticsearch.xpack.core.ml.inference.results.InferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig; @@ -46,78 +45,37 @@ public Processor createProcessor() throws ValidationException { } public interface RequestBuilder { - @FunctionalInterface - interface IntToIntFunction { - int applyAsInt(int value); - } - - @FunctionalInterface - interface TokenLookupFunction { - int apply(TokenizationResult.Tokenization tokenization, int index); - } - Request buildRequest(List inputs, String requestId, Tokenization.Truncate truncate) throws IOException; - - Request buildRequest(TokenizationResult tokenizationResult, String requestId) throws IOException; - - static void writePaddedTokens( - String fieldName, - TokenizationResult tokenization, - int padToken, - TokenLookupFunction generator, - XContentBuilder builder - ) throws IOException { - builder.startArray(fieldName); - for (var inputTokens : tokenization.getTokenizations()) { - builder.startArray(); - int i = 0; - for (; i < inputTokens.getTokenIds().length; i++) { - builder.value(generator.apply(inputTokens, i)); - } - - for (; i < tokenization.getLongestSequenceLength(); i++) { - builder.value(padToken); - } - builder.endArray(); - } - builder.endArray(); - } - - static void writeNonPaddedArguments( - String fieldName, - int numTokenizations, - int longestSequenceLength, - IntToIntFunction generator, - XContentBuilder builder - ) throws IOException { - builder.startArray(fieldName); - for (int i = 0; i < numTokenizations; i++) { - builder.startArray(); - for (int j = 0; j < longestSequenceLength; j++) { - builder.value(generator.applyAsInt(j)); - } - builder.endArray(); - } - builder.endArray(); - } } public interface ResultProcessor { InferenceResults processResult(TokenizationResult tokenization, PyTorchInferenceResult pyTorchResult); } - public interface Processor extends Releasable { + public abstract static class Processor implements Releasable { + + protected final NlpTokenizer tokenizer; + + public Processor(NlpTokenizer tokenizer) { + this.tokenizer = tokenizer; + } + + @Override + public void close() { + tokenizer.close(); + } + /** * Validate the task input string. * Throws an exception if the inputs fail validation * * @param inputs Text to validate */ - void validateInputs(List inputs); + public abstract void validateInputs(List inputs); - RequestBuilder getRequestBuilder(NlpConfig config); + public abstract RequestBuilder getRequestBuilder(NlpConfig config); - ResultProcessor getResultProcessor(NlpConfig config); + public abstract ResultProcessor getResultProcessor(NlpConfig config); } public static String extractInput(TrainedModelInput input, Map doc) { @@ -133,10 +91,7 @@ public static String extractInput(TrainedModelInput input, Map d throw ExceptionsHelper.badRequestException("Input value [{}] for field [{}] must be a string", inputValue, inputField); } - public static class Request { - public final TokenizationResult tokenization; - public final BytesReference processInput; - + public record Request(TokenizationResult tokenization, BytesReference processInput) { public Request(TokenizationResult tokenization, BytesReference processInput) { this.tokenization = Objects.requireNonNull(tokenization); this.processInput = Objects.requireNonNull(processInput); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java index 146967ffb04d4..f4859405d35b9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java @@ -24,21 +24,13 @@ * A NLP processor that directly returns the PyTorch result * without any post-processing */ -public class PassThroughProcessor implements NlpTask.Processor { +public class PassThroughProcessor extends NlpTask.Processor { private final NlpTask.RequestBuilder requestBuilder; - private final NlpTokenizer tokenizer; - private final String resultsField; PassThroughProcessor(NlpTokenizer tokenizer, PassThroughConfig config) { + super(tokenizer); this.requestBuilder = tokenizer.requestBuilder(); - this.resultsField = config.getResultsField(); - this.tokenizer = tokenizer; - } - - @Override - public void close() { - tokenizer.close(); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java index 77e994cbd06f0..a3dd5e619e5e6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java @@ -27,26 +27,20 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class TextClassificationProcessor implements NlpTask.Processor { +public class TextClassificationProcessor extends NlpTask.Processor { private final NlpTask.RequestBuilder requestBuilder; - private final NlpTokenizer tokenizer; private final String[] classLabels; private final int numTopClasses; TextClassificationProcessor(NlpTokenizer tokenizer, TextClassificationConfig config) { + super(tokenizer); this.requestBuilder = tokenizer.requestBuilder(); List classLabels = config.getClassificationLabels(); this.classLabels = classLabels.toArray(String[]::new); // negative values are a special case of asking for ALL classes. Since we require the output size to equal the classLabel size // This is a nice way of setting the value this.numTopClasses = config.getNumTopClasses() < 0 ? this.classLabels.length : config.getNumTopClasses(); - this.tokenizer = tokenizer; - } - - @Override - public void close() { - tokenizer.close(); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java index b26355fa9f473..0671235176ad2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java @@ -23,19 +23,13 @@ /** * A NLP processor that returns a single double[] output from the model. Assumes that only one tensor is returned via inference **/ -public class TextEmbeddingProcessor implements NlpTask.Processor { +public class TextEmbeddingProcessor extends NlpTask.Processor { private final NlpTask.RequestBuilder requestBuilder; - private final NlpTokenizer tokenizer; TextEmbeddingProcessor(NlpTokenizer tokenizer, TextEmbeddingConfig config) { + super(tokenizer); this.requestBuilder = tokenizer.requestBuilder(); - this.tokenizer = tokenizer; - } - - @Override - public void close() { - tokenizer.close(); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java index 699dd0084f1b7..861506606e21f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java @@ -33,9 +33,8 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class ZeroShotClassificationProcessor implements NlpTask.Processor { +public class ZeroShotClassificationProcessor extends NlpTask.Processor { - private final NlpTokenizer tokenizer; private final int entailmentPos; private final int contraPos; private final String[] labels; @@ -44,7 +43,7 @@ public class ZeroShotClassificationProcessor implements NlpTask.Processor { private final String resultsField; ZeroShotClassificationProcessor(NlpTokenizer tokenizer, ZeroShotClassificationConfig config) { - this.tokenizer = tokenizer; + super(tokenizer); List lowerCased = config.getClassificationLabels() .stream() .map(s -> s.toLowerCase(Locale.ROOT)) @@ -62,11 +61,6 @@ public class ZeroShotClassificationProcessor implements NlpTask.Processor { this.resultsField = config.getResultsField(); } - @Override - public void close() { - tokenizer.close(); - } - @Override public void validateInputs(List inputs) { // nothing to validate @@ -103,51 +97,25 @@ public NlpTask.ResultProcessor getResultProcessor(NlpConfig nlpConfig) { return new ResultProcessor(entailmentPos, contraPos, labelsValue, isMultiLabelValue, resultsFieldValue); } - static class RequestBuilder implements NlpTask.RequestBuilder { - - private final NlpTokenizer tokenizer; - private final String[] labels; - private final String hypothesisTemplate; - - RequestBuilder(NlpTokenizer tokenizer, String[] labels, String hypothesisTemplate) { - this.tokenizer = tokenizer; - this.labels = labels; - this.hypothesisTemplate = hypothesisTemplate; - } + record RequestBuilder(NlpTokenizer tokenizer, String[] labels, String hypothesisTemplate) implements NlpTask.RequestBuilder { @Override public NlpTask.Request buildRequest(List inputs, String requestId, Tokenization.Truncate truncate) throws IOException { if (inputs.size() > 1) { throw ExceptionsHelper.badRequestException("Unable to do zero-shot classification on more than one text input at a time"); } - List tokenizations = new ArrayList<>(labels.length); + List tokenizations = new ArrayList<>(labels.length); for (String label : labels) { tokenizations.add(tokenizer.tokenize(inputs.get(0), LoggerMessageFormat.format(null, hypothesisTemplate, label), truncate)); } TokenizationResult result = tokenizer.buildTokenizationResult(tokenizations); - return buildRequest(result, requestId); - } - - @Override - public NlpTask.Request buildRequest(TokenizationResult tokenizationResult, String requestId) throws IOException { - return tokenizer.requestBuilder().buildRequest(tokenizationResult, requestId); + return result.buildRequest(requestId, truncate); } } - static class ResultProcessor implements NlpTask.ResultProcessor { - private final int entailmentPos; - private final int contraPos; - private final String[] labels; - private final boolean isMultiLabel; - private final String resultsField; - - ResultProcessor(int entailmentPos, int contraPos, String[] labels, boolean isMultiLabel, String resultsField) { - this.entailmentPos = entailmentPos; - this.contraPos = contraPos; - this.labels = labels; - this.isMultiLabel = isMultiLabel; - this.resultsField = resultsField; - } + record ResultProcessor(int entailmentPos, int contraPos, String[] labels, boolean isMultiLabel, String resultsField) + implements + NlpTask.ResultProcessor { @Override public InferenceResults processResult(TokenizationResult tokenization, PyTorchInferenceResult pyTorchResult) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizationResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizationResult.java new file mode 100644 index 0000000000000..87429d2bcf2eb --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizationResult.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; +import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; + +import java.io.IOException; +import java.util.List; +import java.util.function.Function; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +public class BertTokenizationResult extends TokenizationResult { + + static final String REQUEST_ID = "request_id"; + static final String TOKENS = "tokens"; + static final String ARG1 = "arg_1"; + static final String ARG2 = "arg_2"; + static final String ARG3 = "arg_3"; + + public BertTokenizationResult(List vocab, List tokenizations, int padTokenId) { + super(vocab, tokenizations, padTokenId); + } + + @Override + public NlpTask.Request buildRequest(String requestId, Tokenization.Truncate t) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + builder.field(REQUEST_ID, requestId); + writePaddedTokens(TOKENS, builder); + writeAttentionMask(ARG1, builder); + writeTokenTypeIds(ARG2, builder); + writePositionIds(ARG3, builder); + builder.endObject(); + + // BytesReference.bytes closes the builder + BytesReference jsonRequest = BytesReference.bytes(builder); + return new NlpTask.Request(this, jsonRequest); + } + + static class BertTokensBuilder implements TokensBuilder { + protected final Stream.Builder tokenIds; + protected final Stream.Builder tokenMap; + protected final boolean withSpecialTokens; + protected final int clsTokenId; + protected final int sepTokenId; + + BertTokensBuilder(boolean withSpecialTokens, int clsTokenId, int sepTokenId) { + this.withSpecialTokens = withSpecialTokens; + this.clsTokenId = clsTokenId; + this.sepTokenId = sepTokenId; + this.tokenIds = Stream.builder(); + this.tokenMap = Stream.builder(); + } + + @Override + public TokensBuilder addSequence(List wordPieceTokenIds, List tokenPositionMap) { + if (withSpecialTokens) { + tokenIds.add(IntStream.of(clsTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(wordPieceTokenIds.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenPositionMap.stream().mapToInt(Integer::valueOf)); + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + return this; + } + + @Override + public TokensBuilder addSequencePair( + List tokenId1s, + List tokenMap1, + List tokenId2s, + List tokenMap2 + ) { + if (withSpecialTokens) { + tokenIds.add(IntStream.of(clsTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(tokenId1s.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenMap1.stream().mapToInt(Integer::valueOf)); + int previouslyFinalMap = tokenMap1.get(tokenMap1.size() - 1); + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(tokenId2s.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenMap2.stream().mapToInt(i -> i + previouslyFinalMap)); + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + return this; + } + + @Override + public Tokens build(String input, boolean truncated, List allTokens) { + return new Tokens( + input, + allTokens, + truncated, + tokenIds.build().flatMapToInt(Function.identity()).toArray(), + tokenMap.build().flatMapToInt(Function.identity()).toArray() + ); + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java index 7ada856f5dd0a..4b9b63ca57f79 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.inference.nlp.BertRequestBuilder; import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; import java.io.IOException; @@ -23,10 +22,7 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; -import java.util.function.Function; import java.util.stream.Collectors; -import java.util.stream.IntStream; -import java.util.stream.Stream; /** * Performs basic tokenization and normalization of input text @@ -49,17 +45,17 @@ public class BertTokenizer implements NlpTokenizer { private static final Set NEVER_SPLIT = Set.of(MASK_TOKEN); private final WordPieceAnalyzer wordPieceAnalyzer; - private final List originalVocab; + protected final List originalVocab; // TODO Not sure this needs to be a sorted map private final SortedMap vocab; protected final boolean withSpecialTokens; private final int maxSequenceLength; - private final NlpTask.RequestBuilder requestBuilder; private final String sepToken; protected final int sepTokenId; private final String clsToken; private final int clsTokenId; private final String padToken; + protected final int padTokenId; private final String maskToken; private final String unknownToken; @@ -71,7 +67,6 @@ protected BertTokenizer( boolean doStripAccents, boolean withSpecialTokens, int maxSequenceLength, - Function requestBuilderFactory, Set neverSplit ) { this( @@ -82,7 +77,6 @@ protected BertTokenizer( doStripAccents, withSpecialTokens, maxSequenceLength, - requestBuilderFactory, Sets.union(neverSplit, NEVER_SPLIT), SEPARATOR_TOKEN, CLASS_TOKEN, @@ -100,7 +94,6 @@ protected BertTokenizer( boolean doStripAccents, boolean withSpecialTokens, int maxSequenceLength, - Function requestBuilderFactory, Set neverSplit, String sepToken, String clsToken, @@ -120,13 +113,13 @@ protected BertTokenizer( this.vocab = vocab; this.withSpecialTokens = withSpecialTokens; this.maxSequenceLength = maxSequenceLength; - this.requestBuilder = requestBuilderFactory.apply(this); if (vocab.containsKey(unknownToken) == false) { throw ExceptionsHelper.conflictStatusException("stored vocabulary is missing required [{}] token", unknownToken); } if (vocab.containsKey(padToken) == false) { throw ExceptionsHelper.conflictStatusException("stored vocabulary is missing required [{}] token", padToken); } + this.padTokenId = vocab.get(padToken); if (withSpecialTokens) { Set missingSpecialTokens = Sets.difference(Set.of(sepToken, clsToken), vocab.keySet()); @@ -188,12 +181,12 @@ public String getMaskToken() { } @Override - public TokenizationResult buildTokenizationResult(List tokenizations) { - TokenizationResult tokenizationResult = new TokenizationResult(originalVocab); - for (TokenizationResult.Tokenization tokenization : tokenizations) { - tokenizationResult.addTokenization(tokenization); - } - return tokenizationResult; + public TokenizationResult buildTokenizationResult(List tokenizations) { + return new BertTokenizationResult(originalVocab, tokenizations, vocab.get(this.padToken)); + } + + TokenizationResult.TokensBuilder createTokensBuilder(int clsTokenId, int sepTokenId, boolean withSpecialTokens) { + return new BertTokenizationResult.BertTokensBuilder(withSpecialTokens, clsTokenId, sepTokenId); } /** @@ -208,7 +201,7 @@ public TokenizationResult buildTokenizationResult(List wordPieceTokenIds = innerResult.tokens; List tokenPositionMap = innerResult.tokenPositionMap; @@ -229,21 +222,14 @@ public TokenizationResult.Tokenization tokenize(String seq, Tokenization.Truncat ); } } - BertTokenizationBuilder bertTokenizationBuilder = bertTokenizationBuilder().addTokens( + return createTokensBuilder(clsTokenId, sepTokenId, withSpecialTokens).addSequence( wordPieceTokenIds.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), tokenPositionMap - ).addEndTokensIfNecessary(); - return new TokenizationResult.Tokenization( - seq, - innerResult.tokens, - isTruncated, - bertTokenizationBuilder.buildIds(), - bertTokenizationBuilder.buildMap() - ); + ).build(seq, isTruncated, innerResult.tokens); } @Override - public TokenizationResult.Tokenization tokenize(String seq1, String seq2, Tokenization.Truncate truncate) { + public TokenizationResult.Tokens tokenize(String seq1, String seq2, Tokenization.Truncate truncate) { var innerResultSeq1 = innerTokenize(seq1); List wordPieceTokenIdsSeq1 = innerResultSeq1.tokens; List tokenPositionMapSeq1 = innerResultSeq1.tokenPositionMap; @@ -302,28 +288,21 @@ public TokenizationResult.Tokenization tokenize(String seq1, String seq2, Tokeni ); } } - BertTokenizationBuilder bertTokenizationBuilder = bertTokenizationBuilder().addTokens( - wordPieceTokenIdsSeq1.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), - tokenPositionMapSeq1 - ) - .addTokens( - wordPieceTokenIdsSeq2.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), - tokenPositionMapSeq2 - ) - .addEndTokensIfNecessary(); List tokens = new ArrayList<>(innerResultSeq1.tokens); tokens.addAll(innerResultSeq2.tokens); - return new TokenizationResult.Tokenization( - seq1 + seq2, - tokens, - isTruncated, - bertTokenizationBuilder.buildIds(), - bertTokenizationBuilder.buildMap() - ); + return createTokensBuilder(clsTokenId, sepTokenId, withSpecialTokens).addSequencePair( + wordPieceTokenIdsSeq1.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), + tokenPositionMapSeq1, + wordPieceTokenIdsSeq2.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), + tokenPositionMapSeq2 + ).build(seq1 + seq2, isTruncated, tokens); } - protected BertTokenizationBuilder bertTokenizationBuilder() { - return new BertTokenizationBuilder(); + @Override + public NlpTask.RequestBuilder requestBuilder() { + return (inputs, requestId, truncate) -> buildTokenizationResult( + inputs.stream().map(s -> tokenize(s, truncate)).collect(Collectors.toList()) + ).buildRequest(requestId, truncate); } protected int getNumExtraTokensForSeqPair() { @@ -361,11 +340,6 @@ private static class InnerTokenization { } } - @Override - public NlpTask.RequestBuilder requestBuilder() { - return requestBuilder; - } - public int getMaxSequenceLength() { return maxSequenceLength; } @@ -374,59 +348,16 @@ public static Builder builder(List vocab, Tokenization tokenization) { return new Builder(vocab, tokenization); } - protected class BertTokenizationBuilder { - Stream.Builder tokenIds; - Stream.Builder tokenMap; - int numSeq; - - BertTokenizationBuilder() { - tokenIds = Stream.builder(); - tokenMap = Stream.builder(); - if (withSpecialTokens) { - tokenIds.add(IntStream.of(clsTokenId)); - tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); - } - } - - BertTokenizationBuilder addTokens(List wordPieceTokenIds, List tokenPositionMap) { - if (numSeq > 0 && withSpecialTokens) { - tokenIds.add(IntStream.of(sepTokenId)); - tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); - } - tokenIds.add(wordPieceTokenIds.stream().mapToInt(Integer::valueOf)); - tokenMap.add(tokenPositionMap.stream().mapToInt(Integer::valueOf)); - numSeq++; - return this; - } - - BertTokenizationBuilder addEndTokensIfNecessary() { - if (withSpecialTokens) { - tokenIds.add(IntStream.of(sepTokenId)); - tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); - } - return this; - } - - int[] buildIds() { - return tokenIds.build().flatMapToInt(Function.identity()).toArray(); - } - - int[] buildMap() { - return tokenMap.build().flatMapToInt(Function.identity()).toArray(); - } - } - public static class Builder { protected final List originalVocab; protected final SortedMap vocab; - protected boolean doLowerCase = false; + protected boolean doLowerCase; protected boolean doTokenizeCjKChars = true; - protected boolean withSpecialTokens = true; + protected boolean withSpecialTokens; protected int maxSequenceLength; protected Boolean doStripAccents = null; protected Set neverSplit; - protected Function requestBuilderFactory = BertRequestBuilder::new; protected Builder(List vocab, Tokenization tokenization) { this.originalVocab = vocab; @@ -479,11 +410,6 @@ public Builder setWithSpecialTokens(boolean withSpecialTokens) { return this; } - public Builder setRequestBuilderFactory(Function requestBuilderFactory) { - this.requestBuilderFactory = requestBuilderFactory; - return this; - } - public BertTokenizer build() { // if not set strip accents defaults to the value of doLowerCase if (doStripAccents == null) { @@ -502,7 +428,6 @@ public BertTokenizer build() { doStripAccents, withSpecialTokens, maxSequenceLength, - requestBuilderFactory, neverSplit ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNode.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNode.java index 9253759e41232..7b5514b692285 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNode.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNode.java @@ -16,13 +16,11 @@ import java.util.List; import java.util.Objects; -public class CharSeqTokenTrieNode { +public record CharSeqTokenTrieNode(CharArrayMap children) { public static final CharSeqTokenTrieNode EMPTY = new CharSeqTokenTrieNode(new CharArrayMap<>(0, false)); - private final CharArrayMap children; - - private CharSeqTokenTrieNode(CharArrayMap children) { + public CharSeqTokenTrieNode(CharArrayMap children) { this.children = Objects.requireNonNull(children); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizationResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizationResult.java new file mode 100644 index 0000000000000..44cd29309f648 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizationResult.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; +import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; + +import java.io.IOException; +import java.util.List; +import java.util.stream.IntStream; + +public class MPNetTokenizationResult extends TokenizationResult { + + static final String REQUEST_ID = "request_id"; + static final String TOKENS = "tokens"; + static final String ARG1 = "arg_1"; + + public MPNetTokenizationResult(List vocab, List tokenizations, int padTokenId) { + super(vocab, tokenizations, padTokenId); + } + + @Override + public NlpTask.Request buildRequest(String requestId, Tokenization.Truncate t) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + builder.field(REQUEST_ID, requestId); + writePaddedTokens(TOKENS, builder); + writeAttentionMask(ARG1, builder); + builder.endObject(); + + // BytesReference.bytes closes the builder + BytesReference jsonRequest = BytesReference.bytes(builder); + return new NlpTask.Request(this, jsonRequest); + } + + static class MPNetTokensBuilder extends BertTokenizationResult.BertTokensBuilder { + + MPNetTokensBuilder(boolean withSpecialTokens, int clsTokenId, int sepTokenId) { + super(withSpecialTokens, clsTokenId, sepTokenId); + } + + @Override + public TokensBuilder addSequencePair( + List tokenId1s, + List tokenMap1, + List tokenId2s, + List tokenMap2 + ) { + if (withSpecialTokens) { + tokenIds.add(IntStream.of(clsTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(tokenId1s.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenMap1.stream().mapToInt(Integer::valueOf)); + int previouslyFinalMap = tokenMap1.get(tokenMap1.size() - 1); + // MPNet adds two `` betwee sequence pairs + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId, sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION, SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(tokenId2s.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenMap2.stream().mapToInt(i -> i + previouslyFinalMap)); + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + return this; + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizer.java index e2468041b8df0..5639cac1aa758 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizer.java @@ -8,7 +8,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; -import org.elasticsearch.xpack.ml.inference.nlp.MPNetRequestBuilder; import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; import java.util.Collections; @@ -16,8 +15,7 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; -import java.util.function.Function; -import java.util.stream.IntStream; +import java.util.stream.Collectors; /** * Performs basic tokenization and normalization of input text @@ -41,7 +39,6 @@ protected MPNetTokenizer( boolean doStripAccents, boolean withSpecialTokens, int maxSequenceLength, - Function requestBuilderFactory, Set neverSplit ) { super( @@ -52,7 +49,6 @@ protected MPNetTokenizer( doStripAccents, withSpecialTokens, maxSequenceLength, - requestBuilderFactory, Sets.union(neverSplit, NEVER_SPLIT), SEPARATOR_TOKEN, CLASS_TOKEN, @@ -67,25 +63,20 @@ protected int getNumExtraTokensForSeqPair() { return 4; } - @Override - protected BertTokenizationBuilder bertTokenizationBuilder() { - return new MPNetTokenizationBuilder(); + TokenizationResult.TokensBuilder createTokensBuilder(int clsTokenId, int sepTokenId, boolean withSpecialTokens) { + return new MPNetTokenizationResult.MPNetTokensBuilder(withSpecialTokens, clsTokenId, sepTokenId); } - protected class MPNetTokenizationBuilder extends BertTokenizationBuilder { - - @Override - BertTokenizationBuilder addTokens(List wordPieceTokenIds, List tokenPositionMap) { - if (numSeq > 0 && withSpecialTokens) { - tokenIds.add(IntStream.of(sepTokenId, sepTokenId)); - tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION, SPECIAL_TOKEN_POSITION)); - } - tokenIds.add(wordPieceTokenIds.stream().mapToInt(Integer::valueOf)); - tokenMap.add(tokenPositionMap.stream().mapToInt(Integer::valueOf)); - numSeq++; - return this; - } + @Override + public NlpTask.RequestBuilder requestBuilder() { + return (inputs, requestId, truncate) -> buildTokenizationResult( + inputs.stream().map(s -> tokenize(s, truncate)).collect(Collectors.toList()) + ).buildRequest(requestId, truncate); + } + @Override + public TokenizationResult buildTokenizationResult(List tokenizations) { + return new MPNetTokenizationResult(originalVocab, tokenizations, getPadTokenId().orElseThrow()); } public static Builder mpBuilder(List vocab, Tokenization tokenization) { @@ -96,13 +87,12 @@ public static class Builder { protected final List originalVocab; protected final SortedMap vocab; - protected boolean doLowerCase = false; + protected boolean doLowerCase; protected boolean doTokenizeCjKChars = true; - protected boolean withSpecialTokens = true; + protected boolean withSpecialTokens; protected int maxSequenceLength; protected Boolean doStripAccents = null; protected Set neverSplit; - protected Function requestBuilderFactory = MPNetRequestBuilder::new; protected Builder(List vocab, Tokenization tokenization) { this.originalVocab = vocab; @@ -155,11 +145,6 @@ public Builder setWithSpecialTokens(boolean withSpecialTokens) { return this; } - public Builder setRequestBuilderFactory(Function requestBuilderFactory) { - this.requestBuilderFactory = requestBuilderFactory; - return this; - } - public MPNetTokenizer build() { // if not set strip accents defaults to the value of doLowerCase if (doStripAccents == null) { @@ -178,7 +163,6 @@ public MPNetTokenizer build() { doStripAccents, withSpecialTokens, maxSequenceLength, - requestBuilderFactory, neverSplit ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java index adf303667065b..7eab8dfcf8f50 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java @@ -12,8 +12,6 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.MPNetTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.inference.nlp.BertRequestBuilder; -import org.elasticsearch.xpack.ml.inference.nlp.MPNetRequestBuilder; import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; import org.elasticsearch.xpack.ml.inference.nlp.Vocabulary; @@ -25,11 +23,11 @@ public interface NlpTokenizer extends Releasable { - TokenizationResult buildTokenizationResult(List tokenizations); + TokenizationResult buildTokenizationResult(List tokenizations); - TokenizationResult.Tokenization tokenize(String seq, Tokenization.Truncate truncate); + TokenizationResult.Tokens tokenize(String seq, Tokenization.Truncate truncate); - TokenizationResult.Tokenization tokenize(String seq1, String seq2, Tokenization.Truncate truncate); + TokenizationResult.Tokens tokenize(String seq1, String seq2, Tokenization.Truncate truncate); NlpTask.RequestBuilder requestBuilder(); @@ -45,10 +43,10 @@ static NlpTokenizer build(Vocabulary vocabulary, Tokenization params) { ExceptionsHelper.requireNonNull(params, TOKENIZATION); ExceptionsHelper.requireNonNull(vocabulary, VOCABULARY); if (params instanceof BertTokenization) { - return BertTokenizer.builder(vocabulary.get(), params).setRequestBuilderFactory(BertRequestBuilder::new).build(); + return BertTokenizer.builder(vocabulary.get(), params).build(); } if (params instanceof MPNetTokenization) { - return MPNetTokenizer.mpBuilder(vocabulary.get(), params).setRequestBuilderFactory(MPNetRequestBuilder::new).build(); + return MPNetTokenizer.mpBuilder(vocabulary.get(), params).build(); } throw new IllegalArgumentException("unknown tokenization type [" + params.getName() + "]"); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java index c13df493d05ce..30ceb1c437a51 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java @@ -7,105 +7,150 @@ package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; -import java.util.ArrayList; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; +import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; + +import java.io.IOException; import java.util.List; +import java.util.OptionalInt; +import java.util.stream.IntStream; -public class TokenizationResult { +public abstract class TokenizationResult { + public static final int SPECIAL_TOKEN_POSITION = -1; private final List vocab; - private final List tokenizations = new ArrayList<>(); - private int maxLength; + private final List tokens; + private final int maxLength; + private final int padTokenId; - public TokenizationResult(List vocab) { + protected TokenizationResult(List vocab, List tokenizations, int padTokenId) { this.vocab = vocab; - this.maxLength = -1; + this.tokens = tokenizations; + this.padTokenId = padTokenId; + int max = 0; + for (Tokens tokenization : tokenizations) { + max = Math.max(tokenization.tokenIds.length, max); + } + this.maxLength = max; } - public boolean anyTruncated() { - return tokenizations.stream().anyMatch(Tokenization::isTruncated); + List getTokens() { + return tokens; } public String getFromVocab(int tokenId) { return vocab.get(tokenId); } - public List getTokenizations() { - return tokenizations; + public Tokens getTokenization(int tokenizationIndex) { + return tokens.get(tokenizationIndex); } - public void addTokenization( - String input, - boolean isTruncated, - List tokens, - int[] tokenIds, - int[] tokenMap - ) { - maxLength = Math.max(maxLength, tokenIds.length); - tokenizations.add(new Tokenization(input, tokens, isTruncated, tokenIds, tokenMap)); + public boolean anyTruncated() { + return tokens.stream().anyMatch(Tokens::truncated); } - public void addTokenization(Tokenization tokenization) { - maxLength = Math.max(maxLength, tokenization.tokenIds.length); - tokenizations.add(tokenization); + public boolean isEmpty() { + return this.tokens.isEmpty() || this.tokens.stream().allMatch(t -> t.tokenIds.length == 0); } - public int getLongestSequenceLength() { - return maxLength; + public abstract NlpTask.Request buildRequest(String requestId, Tokenization.Truncate t) throws IOException; + + protected void writePaddedTokens(String fieldName, XContentBuilder builder) throws IOException { + builder.startArray(fieldName); + for (var inputTokens : tokens) { + builder.startArray(); + + // Note, cannot write the array directly as the internal builder code writes start/end array values + for (int t : inputTokens.tokenIds) { + builder.value(t); + } + for (int i = inputTokens.tokenIds.length; i < maxLength; i++) { + builder.value(padTokenId); + } + builder.endArray(); + } + builder.endArray(); } - public static class Tokenization { - - private final String input; - private final List tokens; - private final int[] tokenIds; - private final int[] tokenMap; - private final boolean truncated; - - public Tokenization( - String input, - List tokens, - boolean truncated, - int[] tokenIds, - int[] tokenMap - ) { - assert tokenIds.length == tokenMap.length; - this.input = input; - this.tokens = tokens; - this.tokenIds = tokenIds; - this.tokenMap = tokenMap; - this.truncated = truncated; + protected void writeAttentionMask(String fieldName, XContentBuilder builder) throws IOException { + builder.startArray(fieldName); + for (var inputTokens : tokens) { + builder.startArray(); + // Note, cannot write the array directly as the internal builder code writes start/end array values + for (int ignored : inputTokens.tokenIds) { + builder.value(1); + } + for (int i = inputTokens.tokenIds.length; i < maxLength; i++) { + builder.value(padTokenId); + } + builder.endArray(); } + builder.endArray(); + } - /** - * The integer values of the tokens} - * - * @return A list of token Ids - */ - public int[] getTokenIds() { - return tokenIds; + protected void writeTokenTypeIds(String fieldName, XContentBuilder builder) throws IOException { + builder.startArray(fieldName); + for (int i = 0; i < tokens.size(); i++) { + builder.startArray(); + for (int j = 0; j < maxLength; j++) { + builder.value(0); + } + builder.endArray(); } + builder.endArray(); + } - /** - * Maps the token position to the position in the source text. - * Source words may be divided into more than one token so more - * than one token can map back to the source token - * - * @return Map of source token to - */ - public int[] getTokenMap() { - return tokenMap; + protected void writePositionIds(String fieldName, XContentBuilder builder) throws IOException { + builder.startArray(fieldName); + for (int i = 0; i < tokens.size(); i++) { + builder.startArray(); + for (int j = 0; j < maxLength; j++) { + builder.value(j); + } + builder.endArray(); } + builder.endArray(); + } - public String getInput() { - return input; - } + public record Tokens(String input, List tokens, boolean truncated, int[] tokenIds, int[] tokenMap) { - public List getTokens() { - return tokens; + public Tokens { + assert tokenIds.length == tokenMap.length; } - public boolean isTruncated() { - return truncated; + public OptionalInt getTokenIndex(int token) { + return IntStream.range(0, tokenIds.length).filter(tokenIndex -> token == tokenIds[tokenIndex]).findFirst(); } } + + interface TokensBuilder { + /** + * Adds tokens to the token builder + * @param tokenIds Token ids without special tokens added + * @param tokenMap Token map without considering special tokens + * @return The builder object + */ + TokensBuilder addSequence(List tokenIds, List tokenMap); + + /** + * Adds an encoded sequence pair to the token builder + * @param tokenId1s Sequence 1 ids + * @param tokenMap1 Sequence 1 token mappings + * @param tokenId2s Sequence 2 ids + * @param tokenMap2 Sequence 2 token map + * @return The builder object + */ + TokensBuilder addSequencePair(List tokenId1s, List tokenMap1, List tokenId2s, List tokenMap2); + + /** + * Builds the token object + * @param input the original sequence input, may be a simple concatenation of a sequence pair + * @param truncated Was this truncated when tokenized + * @param allTokens All the tokens with their values and offsets + * @return A new Tokens object + */ + Tokens build(String input, boolean truncated, List allTokens); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java index 2f6934d238736..eef885c5afb76 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java @@ -19,11 +19,12 @@ import java.util.ArrayList; import java.util.LinkedList; import java.util.List; +import java.util.Objects; public final class WordPieceTokenFilter extends TokenFilter { - protected final LinkedList tokens; + private final LinkedList tokens; private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); - protected final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); + private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); private final PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class); private static final CharSequence CONTINUATION = "##"; @@ -105,15 +106,14 @@ public boolean incrementToken() throws IOException { if (input.incrementToken()) { if (neverSplit.contains(termAtt)) { Integer maybeTokenized = vocabulary.get(termAtt); - if (maybeTokenized == null) { - tokenizedValues.add( - new WordPieceToken(termAtt.toString(), tokenizedUnknown, offsetAtt.startOffset(), offsetAtt.endOffset()) - ); - } else { - tokenizedValues.add( - new WordPieceToken(termAtt.toString(), maybeTokenized, offsetAtt.startOffset(), offsetAtt.endOffset()) - ); - } + tokenizedValues.add( + new WordPieceToken( + termAtt.toString(), + Objects.requireNonNullElse(maybeTokenized, tokenizedUnknown), + offsetAtt.startOffset(), + offsetAtt.endOffset() + ) + ); return true; } if (termAtt.length() > maxInputCharsPerWord) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchInferenceResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchInferenceResult.java index 0ac37ecc633b7..75cb77fb12b05 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchInferenceResult.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchInferenceResult.java @@ -91,14 +91,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(REQUEST_ID.getPreferredName(), requestId); if (inference != null) { builder.startArray(INFERENCE.getPreferredName()); - for (int i = 0; i < inference.length; i++) { + for (double[][] doubles : inference) { builder.startArray(); for (int j = 0; j < inference[0].length; j++) { - builder.startArray(); - for (int k = 0; k < inference[0][0].length; k++) { - builder.value(inference[i][j][k]); - } - builder.endArray(); + builder.value(doubles[j]); } builder.endArray(); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertTokenizationResultTests.java similarity index 91% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilderTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertTokenizationResultTests.java index a9a3227c43971..2d01ddefa5833 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertTokenizationResultTests.java @@ -26,7 +26,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; -public class BertRequestBuilderTests extends ESTestCase { +public class BertTokenizationResultTests extends ESTestCase { private BertTokenizer tokenizer; @@ -40,9 +40,9 @@ public void closeIt() { public void testBuildRequest() throws IOException { tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, null, 512, null)).build(); - BertRequestBuilder requestBuilder = new BertRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); NlpTask.Request request = requestBuilder.buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(5)); assertEquals("request1", jsonDocAsMap.get("request_id")); @@ -52,7 +52,6 @@ public void testBuildRequest() throws IOException { assertEquals(Arrays.asList(0, 1, 2, 3, 4), firstListItemFromMap("arg_3", jsonDocAsMap)); } - @SuppressWarnings("unchecked") private List firstListItemFromMap(String name, Map jsonDocAsMap) { return nthListItemFromMap(name, 0, jsonDocAsMap); } @@ -65,7 +64,7 @@ public static List nthListItemFromMap(String name, int n, Map requestBuilder.buildRequest( @@ -81,7 +80,7 @@ public void testInputTooLarge() throws IOException { ); } { - BertRequestBuilder requestBuilder = new BertRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); // input will become 3 tokens + the Class and Separator token = 5 which is // our max sequence length requestBuilder.buildRequest(Collections.singletonList("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); @@ -92,13 +91,13 @@ public void testInputTooLarge() throws IOException { public void testBatchWithPadding() throws IOException { tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, null, 512, null)).build(); - BertRequestBuilder requestBuilder = new BertRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); NlpTask.Request request = requestBuilder.buildRequest( List.of("Elasticsearch", "my little red car", "Godzilla day"), "request1", Tokenization.Truncate.NONE ); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(5)); assertThat((List>) jsonDocAsMap.get("tokens"), hasSize(3)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java index fe87963271c93..a4f591ec43b36 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java @@ -14,13 +14,13 @@ import org.elasticsearch.xpack.core.ml.inference.results.TopClassEntry; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.FillMaskConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.VocabularyConfig; +import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizationResult; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizer; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.WordPieceTokenFilter; import org.elasticsearch.xpack.ml.inference.pytorch.results.PyTorchInferenceResult; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.OptionalInt; @@ -40,25 +40,28 @@ public void testProcessResults() { { 0, 0, 0, 0, 0, 0, 0 }, // The { 0, 0, 0, 0, 0, 0, 0 }, // capital { 0, 0, 0, 0, 0, 0, 0 }, // of - { 0.01, 0.01, 0.3, 0.1, 0.01, 0.2, 1.2 }, // MASK + { 0.01, 0.01, 0.3, 0.01, 0.2, 1.2, 0.1 }, // MASK { 0, 0, 0, 0, 0, 0, 0 }, // is { 0, 0, 0, 0, 0, 0, 0 } // paris } }; String input = "The capital of " + BertTokenizer.MASK_TOKEN + " is Paris"; - List vocab = Arrays.asList("The", "capital", "of", BertTokenizer.MASK_TOKEN, "is", "Paris", "France"); + List vocab = Arrays.asList("The", "capital", "of", "is", "Paris", "France", BertTokenizer.MASK_TOKEN); List tokens = List.of(); int[] tokenMap = new int[] { 0, 1, 2, 3, 4, 5 }; - int[] tokenIds = new int[] { 0, 1, 2, 3, 4, 5 }; + int[] tokenIds = new int[] { 0, 1, 2, 6, 4, 5 }; - TokenizationResult tokenization = new TokenizationResult(vocab); - tokenization.addTokenization(input, false, tokens, tokenIds, tokenMap); + TokenizationResult tokenization = new BertTokenizationResult( + vocab, + List.of(new TokenizationResult.Tokens(input, tokens, false, tokenIds, tokenMap)), + 0 + ); BertTokenizer tokenizer = mock(BertTokenizer.class); when(tokenizer.getMaskToken()).thenReturn(BertTokenizer.MASK_TOKEN); - when(tokenizer.getMaskTokenId()).thenReturn(OptionalInt.of(3)); + when(tokenizer.getMaskTokenId()).thenReturn(OptionalInt.of(6)); String resultsField = randomAlphaOfLength(10); FillMaskResults result = (FillMaskResults) FillMaskProcessor.processResult( @@ -84,8 +87,11 @@ public void testProcessResults_GivenMissingTokens() { BertTokenizer tokenizer = mock(BertTokenizer.class); when(tokenizer.getMaskToken()).thenReturn("[MASK]"); - TokenizationResult tokenization = new TokenizationResult(Collections.emptyList()); - tokenization.addTokenization("", false, Collections.emptyList(), new int[] {}, new int[] {}); + TokenizationResult tokenization = new BertTokenizationResult( + List.of(), + List.of(new TokenizationResult.Tokens("", List.of(), false, new int[0], new int[0])), + 0 + ); PyTorchInferenceResult pyTorchResult = new PyTorchInferenceResult("1", new double[][][] { { {} } }, 0L, null); expectThrows( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetTokenizationResultTests.java similarity index 89% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilderTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetTokenizationResultTests.java index 3779ab43262d9..0a2907d3c67f6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetTokenizationResultTests.java @@ -26,7 +26,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; -public class MPNetRequestBuilderTests extends ESTestCase { +public class MPNetTokenizationResultTests extends ESTestCase { private MPNetTokenizer tokenizer; @After @@ -39,9 +39,9 @@ public void closeIt() { public void testBuildRequest() throws IOException { tokenizer = MPNetTokenizer.mpBuilder(TEST_CASED_VOCAB, new MPNetTokenization(null, null, 512, null)).build(); - MPNetRequestBuilder requestBuilder = new MPNetRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); NlpTask.Request request = requestBuilder.buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(3)); assertEquals("request1", jsonDocAsMap.get("request_id")); @@ -49,7 +49,6 @@ public void testBuildRequest() throws IOException { assertEquals(Arrays.asList(1, 1, 1, 1, 1), firstListItemFromMap("arg_1", jsonDocAsMap)); } - @SuppressWarnings("unchecked") private List firstListItemFromMap(String name, Map jsonDocAsMap) { return nthListItemFromMap(name, 0, jsonDocAsMap); } @@ -62,7 +61,7 @@ public static List nthListItemFromMap(String name, int n, Map requestBuilder.buildRequest( @@ -78,7 +77,7 @@ public void testInputTooLarge() throws IOException { ); } { - MPNetRequestBuilder requestBuilder = new MPNetRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); // input will become 3 tokens + the Class and Separator token = 5 which is // our max sequence length requestBuilder.buildRequest(Collections.singletonList("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); @@ -89,13 +88,13 @@ public void testInputTooLarge() throws IOException { public void testBatchWithPadding() throws IOException { tokenizer = MPNetTokenizer.mpBuilder(TEST_CASED_VOCAB, new MPNetTokenization(null, null, 512, null)).build(); - MPNetRequestBuilder requestBuilder = new MPNetRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); NlpTask.Request request = requestBuilder.buildRequest( List.of("Elasticsearch", "my little red car", "Godzilla day"), "request1", Tokenization.Truncate.NONE ); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(3)); assertThat((List>) jsonDocAsMap.get("tokens"), hasSize(3)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java index c7c2e3817978c..baafecf85c30a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java @@ -182,7 +182,7 @@ public void testGroupTaggedTokens() throws IOException { taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); - taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.B_ORG, 1.0)); + taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i), NerProcessor.IobTag.B_ORG, 1.0)); List entityGroups = NerProcessor.NerResultProcessor.groupTaggedTokens(taggedTokens, input); assertThat(entityGroups, hasSize(3)); @@ -218,7 +218,7 @@ public void testGroupTaggedTokens_GivenConsecutiveEntities() throws IOException taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.B_PER, 1.0)); - taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); + taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i), NerProcessor.IobTag.O, 1.0)); List entityGroups = NerProcessor.NerResultProcessor.groupTaggedTokens(taggedTokens, input); assertThat(entityGroups, hasSize(3)); @@ -241,7 +241,7 @@ public void testGroupTaggedTokens_GivenConsecutiveContinuingEntities() throws IO taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.B_PER, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.I_PER, 1.0)); - taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.B_ORG, 1.0)); + taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i), NerProcessor.IobTag.B_ORG, 1.0)); List entityGroups = NerProcessor.NerResultProcessor.groupTaggedTokens(taggedTokens, input); assertThat(entityGroups, hasSize(3)); @@ -272,7 +272,7 @@ public void testEntityContainsPunctuation() throws IOException { taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.I_ORG, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.I_ORG, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.I_ORG, 1.0)); - taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); + taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i), NerProcessor.IobTag.O, 1.0)); assertEquals(tokens.size(), taggedTokens.size()); List entityGroups = NerProcessor.NerResultProcessor.groupTaggedTokens(taggedTokens, input); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java index 0f1b03e4bea56..2d57e997c8f5b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java @@ -71,7 +71,7 @@ public void testBuildRequest() throws IOException { NlpTask.Request request = processor.getRequestBuilder(config) .buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(5)); assertEquals("request1", jsonDocAsMap.get("request_id")); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java index 9fd5bb8f833c2..4f5d614348967 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java @@ -50,7 +50,7 @@ public void testBuildRequest() throws IOException { (NlpConfig) new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("new", "stuff")).build().apply(config) ).buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(5)); assertEquals("request1", jsonDocAsMap.get("request_id")); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java index 381df2230532e..63f3af65f667d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java @@ -47,8 +47,8 @@ public class BertTokenizerTests extends ESTestCase { BertTokenizer.PAD_TOKEN ); - private List tokenStrings(List tokens) { - return tokens.stream().map(WordPieceTokenFilter.WordPieceToken::toString).collect(Collectors.toList()); + private List tokenStrings(List tokens) { + return tokens.stream().map(DelimitedToken::toString).collect(Collectors.toList()); } public void testTokenize() { @@ -58,10 +58,10 @@ public void testTokenize() { new BertTokenization(null, false, null, Tokenization.Truncate.NONE) ).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elastic", "##search", "fun")); - assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.tokens()), contains("Elastic", "##search", "fun")); + assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); } } @@ -103,11 +103,11 @@ public void testTokenizeLargeInputTruncation() { ).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + TokenizationResult.Tokens tokenization = tokenizer.tokenize( "Elasticsearch fun with Pancake and Godzilla", Tokenization.Truncate.FIRST ); - assertArrayEquals(new int[] { 0, 1, 3, 18, 17 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 0, 1, 3, 18, 17 }, tokenization.tokenIds()); } try ( @@ -120,16 +120,16 @@ public void testTokenizeLargeInputTruncation() { "Elasticsearch fun with Pancake and Godzilla", Tokenization.Truncate.FIRST ); - assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.getTokenMap()); + assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.tokenMap()); } } public void testTokenizeAppendSpecialTokens() { try (BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()).build()) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.tokenMap()); } } @@ -143,13 +143,13 @@ public void testNeverSplitTokens() { .build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + TokenizationResult.Tokens tokenization = tokenizer.tokenize( "Elasticsearch " + specialToken + " fun", Tokenization.Truncate.NONE ); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elastic", "##search", specialToken, "fun")); - assertArrayEquals(new int[] { 0, 1, 15, 3 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2 }, tokenization.getTokenMap()); + assertThat(tokenStrings(tokenization.tokens()), contains("Elastic", "##search", specialToken, "fun")); + assertArrayEquals(new int[] { 0, 1, 15, 3 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2 }, tokenization.tokenMap()); } } @@ -161,13 +161,13 @@ public void testDoLowerCase() { ).setDoLowerCase(false).setWithSpecialTokens(false).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 3, 2 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 1 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertArrayEquals(new int[] { 3, 2 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 1 }, tokenization.tokenMap()); tokenization = tokenizer.tokenize("elasticsearch fun", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 0, 1, 2 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + assertArrayEquals(new int[] { 0, 1, 2 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); } try ( @@ -177,9 +177,9 @@ public void testDoLowerCase() { ).setDoLowerCase(true).setWithSpecialTokens(false).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 0, 1, 2 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertArrayEquals(new int[] { 0, 1, 2 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); } } @@ -189,14 +189,14 @@ public void testPunctuation() { .setWithSpecialTokens(false) .build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch, fun.", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elastic", "##search", ",", "fun", ".")); - assertArrayEquals(new int[] { 0, 1, 11, 3, 10 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2, 3 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch, fun.", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.tokens()), contains("Elastic", "##search", ",", "fun", ".")); + assertArrayEquals(new int[] { 0, 1, 11, 3, 10 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2, 3 }, tokenization.tokenMap()); tokenization = tokenizer.tokenize("Elasticsearch, fun [MASK].", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 0, 1, 11, 3, 14, 10 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.getTokenMap()); + assertArrayEquals(new int[] { 0, 1, 11, 3, 14, 10 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.tokenMap()); } } @@ -224,20 +224,20 @@ public void testPunctuationWithMask() { ).setWithSpecialTokens(true).setNeverSplit(Set.of("[MASK]")).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("This is [MASK]-tastic!", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "[MASK]", "-", "ta", "##stic", "!")); - assertArrayEquals(new int[] { 0, 1, 2, 3, 4, 6, 7, 8, 9 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 4, 5, -1 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("This is [MASK]-tastic!", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.tokens()), contains("This", "is", "[MASK]", "-", "ta", "##stic", "!")); + assertArrayEquals(new int[] { 0, 1, 2, 3, 4, 6, 7, 8, 9 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 4, 5, -1 }, tokenization.tokenMap()); tokenization = tokenizer.tokenize("This is sub~[MASK]!", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "sub", "~", "[MASK]", "!")); - assertArrayEquals(new int[] { 0, 1, 2, 10, 5, 3, 8, 9 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, -1 }, tokenization.getTokenMap()); + assertThat(tokenStrings(tokenization.tokens()), contains("This", "is", "sub", "~", "[MASK]", "!")); + assertArrayEquals(new int[] { 0, 1, 2, 10, 5, 3, 8, 9 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, -1 }, tokenization.tokenMap()); tokenization = tokenizer.tokenize("This is sub,[MASK].tastic!", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "sub", ",", "[MASK]", ".", "ta", "##stic", "!")); - assertArrayEquals(new int[] { 0, 1, 2, 10, 11, 3, 12, 6, 7, 8, 9 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, 6, 6, 7, -1 }, tokenization.getTokenMap()); + assertThat(tokenStrings(tokenization.tokens()), contains("This", "is", "sub", ",", "[MASK]", ".", "ta", "##stic", "!")); + assertArrayEquals(new int[] { 0, 1, 2, 10, 11, 3, 12, 6, 7, 8, 9 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, 6, 6, 7, -1 }, tokenization.tokenMap()); } } @@ -257,23 +257,23 @@ public void testBatchInput() { tokenizer.tokenize("Godzilla Pancake red car day", Tokenization.Truncate.NONE) ) ); - assertThat(tr.getTokenizations(), hasSize(4)); + assertThat(tr.getTokens(), hasSize(4)); - TokenizationResult.Tokenization tokenization = tr.getTokenizations().get(0); - assertArrayEquals(new int[] { 0, 1 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tr.getTokenization(0); + assertArrayEquals(new int[] { 0, 1 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0 }, tokenization.tokenMap()); - tokenization = tr.getTokenizations().get(1); - assertArrayEquals(new int[] { 4, 5, 6, 7 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 1, 2, 3 }, tokenization.getTokenMap()); + tokenization = tr.getTokenization(1); + assertArrayEquals(new int[] { 4, 5, 6, 7 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 1, 2, 3 }, tokenization.tokenMap()); - tokenization = tr.getTokenizations().get(2); - assertArrayEquals(new int[] { 8, 9, 16 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + tokenization = tr.getTokenization(2); + assertArrayEquals(new int[] { 8, 9, 16 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); - tokenization = tr.getTokenizations().get(3); - assertArrayEquals(new int[] { 8, 9, 17, 6, 7, 16 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.getTokenMap()); + tokenization = tr.getTokenization(3); + assertArrayEquals(new int[] { 8, 9, 17, 6, 7, 16 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.tokenMap()); } } @@ -284,13 +284,13 @@ public void testMultiSeqTokenization() { .setWithSpecialTokens(true) .build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + TokenizationResult.Tokens tokenization = tokenizer.tokenize( "Elasticsearch is fun", "Godzilla my little red car", Tokenization.Truncate.NONE ); - var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + var tokenStream = Arrays.stream(tokenization.tokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); assertThat( tokenStream, contains( @@ -309,7 +309,7 @@ public void testMultiSeqTokenization() { BertTokenizer.SEPARATOR_TOKEN ) ); - assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.tokenIds()); } } @@ -321,13 +321,13 @@ public void testTokenizeLargeInputMultiSequenceTruncation() { ).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + TokenizationResult.Tokens tokenization = tokenizer.tokenize( "Elasticsearch is fun", "Godzilla my little red car", Tokenization.Truncate.FIRST ); - var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + var tokenStream = Arrays.stream(tokenization.tokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); assertThat( tokenStream, contains( @@ -359,12 +359,12 @@ public void testTokenizeLargeInputMultiSequenceTruncation() { ).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + TokenizationResult.Tokens tokenization = tokenizer.tokenize( "Elasticsearch is fun", "Godzilla my little red car", Tokenization.Truncate.SECOND ); - var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + var tokenStream = Arrays.stream(tokenization.tokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); assertThat( tokenStream, contains( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java index 8bec4be872ff0..a2b7b9b364e2c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java @@ -42,8 +42,8 @@ public class MPNetTokenizerTests extends ESTestCase { MPNetTokenizer.PAD_TOKEN ); - private List tokenStrings(List tokens) { - return tokens.stream().map(WordPieceTokenFilter.WordPieceToken::toString).collect(Collectors.toList()); + private List tokenStrings(List tokens) { + return tokens.stream().map(DelimitedToken::toString).collect(Collectors.toList()); } public void testTokenize() { @@ -53,10 +53,10 @@ public void testTokenize() { new MPNetTokenization(null, false, null, Tokenization.Truncate.NONE) ).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elastic", "##search", "fun")); - assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.tokens()), contains("Elastic", "##search", "fun")); + assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); } } @@ -67,13 +67,13 @@ public void testMultiSeqTokenization() { new MPNetTokenization(null, false, null, Tokenization.Truncate.NONE) ).setDoLowerCase(false).setWithSpecialTokens(true).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + TokenizationResult.Tokens tokenization = tokenizer.tokenize( "Elasticsearch is fun", "Godzilla my little red car", Tokenization.Truncate.NONE ); - var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + var tokenStream = Arrays.stream(tokenization.tokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); assertThat( tokenStream, contains( @@ -93,7 +93,7 @@ public void testMultiSeqTokenization() { MPNetTokenizer.SEPARATOR_TOKEN ) ); - assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.tokenIds()); } } From a86f9c5f027242065c91a7b505ac6ade9a13b933 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Tue, 15 Feb 2022 14:06:22 +0100 Subject: [PATCH 099/167] Remove usage of deprecated Provider#forUseAtConfigurationTime (#83932) Remove gradle api usage that has been deprecated with Gradle 7.4 --- .../conventions/VersionPropertiesLoader.java | 2 -- .../conventions/VersionPropertiesPlugin.java | 2 +- .../conventions/info/ParallelDetector.java | 2 +- ...icsearch.authenticated-testclusters.gradle | 2 -- .../src/main/groovy/elasticsearch.ide.gradle | 2 +- .../src/main/groovy/elasticsearch.run.gradle | 6 ++--- .../gradle/internal/InternalBwcGitPlugin.java | 23 ++++++++----------- .../internal/info/GlobalBuildInfoPlugin.java | 2 +- .../internal/test/RestTestBasePlugin.java | 2 +- build.gradle | 4 ++-- client/rest-high-level/build.gradle | 2 -- .../archives/integ-test-zip/build.gradle | 2 +- distribution/build.gradle | 1 - distribution/docker/build.gradle | 2 +- distribution/packages/build.gradle | 2 +- modules/ingest-geoip/build.gradle | 1 - modules/lang-painless/build.gradle | 2 +- modules/repository-azure/build.gradle | 2 +- qa/remote-clusters/build.gradle | 2 +- x-pack/plugin/core/build.gradle | 2 +- x-pack/plugin/eql/qa/correctness/build.gradle | 2 -- x-pack/plugin/ml/build.gradle | 2 +- x-pack/plugin/sql/sql-cli/build.gradle | 2 +- 23 files changed, 28 insertions(+), 43 deletions(-) diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesLoader.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesLoader.java index 1702c03f91177..510a8df411285 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesLoader.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesLoader.java @@ -43,7 +43,6 @@ protected static void loadBuildSrcVersion(Properties loadedProps, ProviderFactor ); } String qualifier = providers.systemProperty("build.version_qualifier") - .forUseAtConfigurationTime() .getOrElse(""); if (qualifier.isEmpty() == false) { if (qualifier.matches("(alpha|beta|rc)\\d+") == false) { @@ -52,7 +51,6 @@ protected static void loadBuildSrcVersion(Properties loadedProps, ProviderFactor elasticsearch += "-" + qualifier; } final String buildSnapshotSystemProperty = providers.systemProperty("build.snapshot") - .forUseAtConfigurationTime() .getOrElse("true"); switch (buildSnapshotSystemProperty) { case "true": diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesPlugin.java index 2d56e42e3d951..c4c664df46bc3 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesPlugin.java @@ -32,6 +32,6 @@ public void apply(Project project) { .registerIfAbsent("versions", VersionPropertiesBuildService.class, spec -> { spec.getParameters().getInfoPath().set(infoPath); }); - project.getExtensions().add("versions", serviceProvider.forUseAtConfigurationTime().get().getProperties()); + project.getExtensions().add("versions", serviceProvider.get().getProperties()); } } diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/ParallelDetector.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/ParallelDetector.java index 7359d1728b96c..53b48c0c58935 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/ParallelDetector.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/ParallelDetector.java @@ -78,7 +78,7 @@ public static int findDefaultParallel(Project project) { } private static boolean isMac(ProviderFactory providers) { - return providers.systemProperty("os.name").forUseAtConfigurationTime().getOrElse("").startsWith("Mac"); + return providers.systemProperty("os.name").getOrElse("").startsWith("Mac"); } } diff --git a/build-tools-internal/src/main/groovy/elasticsearch.authenticated-testclusters.gradle b/build-tools-internal/src/main/groovy/elasticsearch.authenticated-testclusters.gradle index b52e6ec7f005c..102a838235cb1 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.authenticated-testclusters.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.authenticated-testclusters.gradle @@ -10,10 +10,8 @@ import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask def clusterCredentials = [ username: providers.systemProperty('tests.rest.cluster.username') - .forUseAtConfigurationTime() .getOrElse('test_admin'), password: providers.systemProperty('tests.rest.cluster.password') - .forUseAtConfigurationTime() .getOrElse('x-pack-test-password') ] diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index 84b35b9a7568c..31638c9ddb1d4 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -69,7 +69,7 @@ tasks.register('configureIdeCheckstyle') { } // Applying this stuff, particularly the idea-ext plugin, has a cost so avoid it unless we're running in the IDE -if (providers.systemProperty('idea.active').forUseAtConfigurationTime().getOrNull() == 'true') { +if (providers.systemProperty('idea.active').getOrNull() == 'true') { project.apply(plugin: org.jetbrains.gradle.ext.IdeaExtPlugin) tasks.register('configureIdeaGradleJvm') { diff --git a/build-tools-internal/src/main/groovy/elasticsearch.run.gradle b/build-tools-internal/src/main/groovy/elasticsearch.run.gradle index 4eb4cdcdc32d8..b49c302e6ab99 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.run.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.run.gradle @@ -14,9 +14,9 @@ import org.elasticsearch.gradle.testclusters.RunTask // apply plugin: 'elasticsearch.internal-testclusters' testClusters.register("runTask") { - testDistribution = providers.systemProperty('run.distribution').orElse('default').forUseAtConfigurationTime().get() - if (providers.systemProperty('run.distribution').forUseAtConfigurationTime().getOrElse('default') == 'default') { - String licenseType = providers.systemProperty("run.license_type").forUseAtConfigurationTime().getOrElse("basic") + testDistribution = providers.systemProperty('run.distribution').orElse('default').get() + if (providers.systemProperty('run.distribution').getOrElse('default') == 'default') { + String licenseType = providers.systemProperty("run.license_type").getOrElse("basic") if (licenseType == 'trial') { setting 'xpack.ml.enabled', 'true' setting 'xpack.graph.enabled', 'true' diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java index 1dce3a7092d85..eda600f09004c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java @@ -84,25 +84,20 @@ public void apply(Project project) { String remoteRepo = remote.get(); // for testing only we can override the base remote url String remoteRepoUrl = providerFactory.systemProperty("testRemoteRepo") - .forUseAtConfigurationTime() .getOrElse("https://github.com/" + remoteRepo + "/elasticsearch.git"); addRemote.setCommandLine(asList("git", "remote", "add", remoteRepo, remoteRepoUrl)); }); TaskProvider fetchLatestTaskProvider = tasks.register("fetchLatest", LoggedExec.class, fetchLatest -> { - var gitFetchLatest = project.getProviders() - .systemProperty("tests.bwc.git_fetch_latest") - .forUseAtConfigurationTime() - .orElse("true") - .map(fetchProp -> { - if ("true".equals(fetchProp)) { - return true; - } - if ("false".equals(fetchProp)) { - return false; - } - throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + fetchProp + "]"); - }); + var gitFetchLatest = project.getProviders().systemProperty("tests.bwc.git_fetch_latest").orElse("true").map(fetchProp -> { + if ("true".equals(fetchProp)) { + return true; + } + if ("false".equals(fetchProp)) { + return false; + } + throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + fetchProp + "]"); + }); fetchLatest.onlyIf(t -> project.getGradle().getStartParameter().isOffline() == false && gitFetchLatest.get()); fetchLatest.dependsOn(addRemoteTaskProvider); fetchLatest.setWorkingDir(gitExtension.getCheckoutDir().get()); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index f8c9e2a86261c..be44f003bf036 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -269,7 +269,7 @@ private File findRuntimeJavaHome() { } private String findJavaHome(String version) { - Provider javaHomeNames = providers.gradleProperty("org.gradle.java.installations.fromEnv").forUseAtConfigurationTime(); + Provider javaHomeNames = providers.gradleProperty("org.gradle.java.installations.fromEnv"); String javaHomeEnvVar = getJavaHomeEnvVarName(version); // Provide a useful error if we're looking for a Java home version that we haven't told Gradle about yet diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java index 1d5d0078e771a..20cf4328e4c84 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java @@ -78,6 +78,6 @@ public void apply(Project project) { } private String systemProperty(String propName) { - return providerFactory.systemProperty(propName).forUseAtConfigurationTime().getOrNull(); + return providerFactory.systemProperty(propName).getOrNull(); } } diff --git a/build.gradle b/build.gradle index 978d2fefee794..120fadf16b31d 100644 --- a/build.gradle +++ b/build.gradle @@ -177,8 +177,8 @@ allprojects { // injecting groovy property variables into all projects project.ext { // for ide hacks... - isEclipse = providers.systemProperty("eclipse.launcher").forUseAtConfigurationTime().isPresent() || // Detects gradle launched from Eclipse's IDE - providers.systemProperty("eclipse.application").forUseAtConfigurationTime().isPresent() || // Detects gradle launched from the Eclipse compiler server + isEclipse = providers.systemProperty("eclipse.launcher").isPresent() || // Detects gradle launched from Eclipse's IDE + providers.systemProperty("eclipse.application").isPresent() || // Detects gradle launched from the Eclipse compiler server gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff gradle.startParameter.taskNames.contains('cleanEclipse') } diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 1da2f9ae57f6a..288caec3ce183 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -64,11 +64,9 @@ File pkiTrustCert = file("./src/test/resources/org/elasticsearch/client/security def clusterUserNameProvider = providers.systemProperty('tests.rest.cluster.username') .orElse('test_user') - .forUseAtConfigurationTime() def clusterPasswordProvider = providers.systemProperty('tests.rest.cluster.password') .orElse('test-user-password') - .forUseAtConfigurationTime() tasks.named('splitPackagesAudit').configure { // the client package should be owned by the client, but server has some classes there too diff --git a/distribution/archives/integ-test-zip/build.gradle b/distribution/archives/integ-test-zip/build.gradle index f83aaf74fc2a1..e8e2908a5606a 100644 --- a/distribution/archives/integ-test-zip/build.gradle +++ b/distribution/archives/integ-test-zip/build.gradle @@ -43,7 +43,7 @@ tasks.named("integTest").configure { * 2. *One* of these tests is incompatible with that and should be skipped * when running against an external cluster. */ - if (project.providers.systemProperty("tests.rest.cluster").forUseAtConfigurationTime().isPresent()) { + if (project.providers.systemProperty("tests.rest.cluster").isPresent()) { nonInputProperties.systemProperty 'tests.logfile', testClusters.named('integTest').map(c -> c.singleNode().serverLog) } else { systemProperty 'tests.logfile', '--external--' diff --git a/distribution/build.gradle b/distribution/build.gradle index 76549a83d0b0d..8fe7cfa7e2017 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -37,7 +37,6 @@ tasks.register("generateDependenciesReport", ConcatFilesTask) { headerLine = "name,version,url,license,sourceURL" target = new File(providers.systemProperty('csv') .orElse("${project.buildDir}/reports/dependencies/es-dependencies.csv") - .forUseAtConfigurationTime() .get() ) // explicitly add our dependency on the JDK diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index a7b0e57210366..a2fa645a63b07 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -18,7 +18,7 @@ apply plugin: 'elasticsearch.rest-resources' ext.cloudflareZlibVersion = '1.2.8' -String buildId = providers.systemProperty('build.id').forUseAtConfigurationTime().getOrNull() +String buildId = providers.systemProperty('build.id').getOrNull() boolean useLocalArtifacts = buildId != null && buildId.isBlank() == false repositories { diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index a0c7598352e6c..86715499368f0 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -273,7 +273,7 @@ ospackage { signingKeyPassphrase = project.property('signing.password') signingKeyRingFile = project.hasProperty('signing.secretKeyRingFile') ? project.file(project.property('signing.secretKeyRingFile')) : - new File(new File(project.providers.systemProperty('user.home').orElse('.gnupg').forUseAtConfigurationTime().get()), 'secring.gpg') + new File(new File(project.providers.systemProperty('user.home').orElse('.gnupg').get()), 'secring.gpg') } // version found on oldest supported distro, centos-6 diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index b265a162f07e7..efe677d7ef223 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -35,7 +35,6 @@ restResources { } def useFixture = providers.environmentVariable("geoip_use_service") - .forUseAtConfigurationTime() .map { s -> Boolean.parseBoolean(s) == false } .getOrElse(true) diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index ae3a9ee4ca5d7..b5c7bbf484a58 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -181,7 +181,7 @@ tasks.register("generateContextApiSpec", DefaultTestClustersTask) { classpath = sourceSets.doc.runtimeClasspath systemProperty "cluster.uri", "${-> generateContextApiSpecCluster.get().singleNode().getAllHttpSocketURI().get(0)}" systemProperty "jdksrc", providers.systemProperty("jdksrc").getOrNull() - systemProperty "packageSources", providers.systemProperty("packageSources").forUseAtConfigurationTime().getOrNull() + systemProperty "packageSources", providers.systemProperty("packageSources").getOrNull() }.assertNormalExitValue() } } diff --git a/modules/repository-azure/build.gradle b/modules/repository-azure/build.gradle index ee7a0b39b0696..e0c57ea89ff8e 100644 --- a/modules/repository-azure/build.gradle +++ b/modules/repository-azure/build.gradle @@ -358,7 +358,7 @@ testClusters.matching { it.name == "yamlRestTest" }.configureEach { setting 'azure.client.integration_test.endpoint_suffix', azureAddress String firstPartOfSeed = BuildParams.testSeed.tokenize(':').get(0) - def ignoreTestSeed = providers.systemProperty('ignore.tests.seed').forUseAtConfigurationTime().isPresent() ? PropertyNormalization.IGNORE_VALUE : PropertyNormalization.DEFAULT + def ignoreTestSeed = providers.systemProperty('ignore.tests.seed').isPresent() ? PropertyNormalization.IGNORE_VALUE : PropertyNormalization.DEFAULT setting 'thread_pool.repository_azure.max', (Math.abs(Long.parseUnsignedLong(firstPartOfSeed, 16) % 10) + 1).toString(), ignoreTestSeed } } diff --git a/qa/remote-clusters/build.gradle b/qa/remote-clusters/build.gradle index 93e1da8c52b9d..df03b37c5a603 100644 --- a/qa/remote-clusters/build.gradle +++ b/qa/remote-clusters/build.gradle @@ -65,7 +65,7 @@ tasks.named("preProcessFixture").configure { dockerCompose { tcpPortsToIgnoreWhenWaiting = [9600, 9601] - if ('default'.equalsIgnoreCase(providers.systemProperty('tests.distribution').forUseAtConfigurationTime().getOrElse('default'))) { + if ('default'.equalsIgnoreCase(providers.systemProperty('tests.distribution').getOrElse('default'))) { useComposeFiles = ['docker-compose.yml'] } else { useComposeFiles = ['docker-compose-oss.yml'] diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 768bbb936ee0f..ce2da68cfefb4 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -77,7 +77,7 @@ tasks.named("processResources").configure { inputs.properties(expansions) filter("tokens" : expansions, ReplaceTokens.class) } - String licenseKey = providers.systemProperty("license.key").forUseAtConfigurationTime().getOrNull() + String licenseKey = providers.systemProperty("license.key").getOrNull() if (licenseKey != null) { println "Using provided license key from ${licenseKey}" } else if (BuildParams.isSnapshotBuild()) { diff --git a/x-pack/plugin/eql/qa/correctness/build.gradle b/x-pack/plugin/eql/qa/correctness/build.gradle index 681a0acf71483..27f88dc65364e 100644 --- a/x-pack/plugin/eql/qa/correctness/build.gradle +++ b/x-pack/plugin/eql/qa/correctness/build.gradle @@ -15,13 +15,11 @@ dependencies { } File serviceAccountFile = providers.environmentVariable('eql_test_credentials_file') - .forUseAtConfigurationTime() .orElse(providers.systemProperty('eql.test.credentials.file').forUseAtConfigurationTime()) .map { s -> new File(s)} .getOrNull() Boolean preserveData = providers.systemProperty('eql.test.preserve.data') - .forUseAtConfigurationTime() .map { s -> Boolean.parseBoolean(s) } .getOrElse(false) diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 0efcce2d23fc6..495e4c1097260 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -16,7 +16,7 @@ repositories { forRepository { ivy { name "ml-cpp" - url providers.systemProperty('build.ml_cpp.repo').forUseAtConfigurationTime().orElse('https://prelert-artifacts.s3.amazonaws.com').get() + url providers.systemProperty('build.ml_cpp.repo').orElse('https://prelert-artifacts.s3.amazonaws.com').get() metadataSources { // no repository metadata, look directly for the artifact artifact() diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle index 426408950b890..46fc6cd554384 100644 --- a/x-pack/plugin/sql/sql-cli/build.gradle +++ b/x-pack/plugin/sql/sql-cli/build.gradle @@ -54,7 +54,7 @@ tasks.register("runcli") { dependsOn "shadowJar" doLast { List command = ["${BuildParams.runtimeJavaHome}/bin/java"] - if ('true'.equals(providers.systemProperty('debug').orElse('false').forUseAtConfigurationTime().get())) { + if ('true'.equals(providers.systemProperty('debug').orElse('false').get())) { command += '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000' } command += ['-jar', shadowJar.archiveFile.get().asFile.absolutePath] From efb76d8c162b1c58abb8fe706c3e4583eec01cb2 Mon Sep 17 00:00:00 2001 From: kkewwei Date: Tue, 15 Feb 2022 22:03:39 +0800 Subject: [PATCH 100/167] Eagerly check keyword field length (#83738) If the UTF8 encoding of a keyword field value is bigger than the max length 32766, Lucene fill fail the indexing request and, to roll back the changes, will mark the (possibly partially indexed) document as deleted. This results in deletes, even in an append-only workload, which in turn leads to slower merges, as these will potentially have to fall back to MergeStrategy.DOC instead of MergeStrategy.BULK. To avoid this, we do a preflight check here before indexing the document into Lucene. Closes #80865 --- docs/changelog/83738.yaml | 6 +++++ .../index/mapper/KeywordFieldMapper.java | 24 +++++++++++++++++++ .../index/mapper/KeywordFieldMapperTests.java | 13 ++++++++++ 3 files changed, 43 insertions(+) create mode 100644 docs/changelog/83738.yaml diff --git a/docs/changelog/83738.yaml b/docs/changelog/83738.yaml new file mode 100644 index 0000000000000..e4d661bf08284 --- /dev/null +++ b/docs/changelog/83738.yaml @@ -0,0 +1,6 @@ +pr: 83738 +summary: Check the utf8 length of keyword field is not bigger than 32766 in ES, rather than in Lucene. +area: Mapping +type: enhancement +issues: + - 80865 diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index dd934733d4a3c..bdc9976208d4e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -61,6 +61,7 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -69,6 +70,8 @@ import java.util.Objects; import java.util.function.Supplier; +import static org.apache.lucene.util.ByteBlockPool.BYTE_BLOCK_SIZE; + /** * A field mapper for keywords. This mapper accepts strings and indexes them as-is. */ @@ -905,6 +908,27 @@ private void indexValue(DocumentParserContext context, String value) { // convert to utf8 only once before feeding postings/dv/stored fields final BytesRef binaryValue = new BytesRef(value); + + // If the UTF8 encoding of the field value is bigger than the max length 32766, Lucene fill fail the indexing request and, to roll + // back the changes, will mark the (possibly partially indexed) document as deleted. This results in deletes, even in an append-only + // workload, which in turn leads to slower merges, as these will potentially have to fall back to MergeStrategy.DOC instead of + // MergeStrategy.BULK. To avoid this, we do a preflight check here before indexing the document into Lucene. + if (binaryValue.length > BYTE_BLOCK_SIZE - 2) { + byte[] prefix = new byte[30]; + System.arraycopy(binaryValue.bytes, binaryValue.offset, prefix, 0, 30); + String msg = "Document contains at least one immense term in field=\"" + + fieldType().name() + + "\" (whose " + + "UTF8 encoding is longer than the max length " + + (BYTE_BLOCK_SIZE - 2) + + "), all of which were " + + "skipped. Please correct the analyzer to not produce such terms. The prefix of the first immense " + + "term is: '" + + Arrays.toString(prefix) + + "...'"; + throw new IllegalArgumentException(msg); + } + if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { Field field = new KeywordField(fieldType().name(), binaryValue, fieldType); context.doc().add(field); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index b061346d0dddb..9286daa30d37d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -605,4 +605,17 @@ public void testDimensionInRoutingPath() throws IOException { ); mapper.documentMapper().validate(settings, false); // Doesn't throw } + + public void testKeywordFieldUtf8LongerThan32766() throws Exception { + DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword"))); + StringBuilder stringBuilder = new StringBuilder(32768); + for (int i = 0; i < 32768; i++) { + stringBuilder.append("a"); + } + MapperParsingException e = expectThrows( + MapperParsingException.class, + () -> mapper.parse(source(b -> b.field("field", stringBuilder.toString()))) + ); + assertThat(e.getCause().getMessage(), containsString("UTF8 encoding is longer than the max length")); + } } From c98a7299ee11d52123e896585048412326b577bb Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 15 Feb 2022 15:07:50 +0000 Subject: [PATCH 101/167] Fix LeaderCheckerTests#testFollowerBehaviour (#83897) This test computes the expected message by tracking the different kinds of failures generated by a mock transport service. This tracking counts consecutive failures so is reset on success, but it is not explicitly reset when starting the second pass. This was usually fine since we start the second pass with a long sequence of checks that do not fail which would reset the tracking. Rarely however this sequence was too short to contain any success responses, so the counters would not be reset as needed. This commit makes sure we run at least `leaderCheckRetryCount` iterations without a failure to ensure that at least one of them succeeds. Closes #83857 Closes #83924 --- .../cluster/coordination/LeaderCheckerTests.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java index 257203b75364a..f06bf27a9c806 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java @@ -205,7 +205,9 @@ public String toString() { leaderChecker.updateLeader(leader2); { checkCount.set(0); - final long maxCheckCount = randomLongBetween(2, 1000); + // run at least leaderCheckRetryCount iterations to ensure at least one success so that we reset the counters and clear out + // anything left over from the previous run + final long maxCheckCount = randomLongBetween(leaderCheckRetryCount, 1000); logger.info("--> checking again that no failure is detected in {} checks", maxCheckCount); while (checkCount.get() < maxCheckCount) { deterministicTaskQueue.runAllRunnableTasks(); From 4e6f4b0b84f09c404ac2cf7c808ff533ee2c8048 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 15 Feb 2022 10:20:56 -0500 Subject: [PATCH 102/167] MetadataIndexStateService cleanups (#83868) --- .../elasticsearch/cluster/ClusterState.java | 4 ++ .../cluster/block/ClusterBlocks.java | 7 ++++ .../metadata/MetadataIndexStateService.java | 42 ++++++------------- 3 files changed, 24 insertions(+), 29 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index 3fd0f52ddac94..68fe938e63de7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -631,6 +631,10 @@ public DiscoveryNodes nodes() { return nodes; } + public Builder routingTable(RoutingTable.Builder routingTableBuilder) { + return routingTable(routingTableBuilder.build()); + } + public Builder routingTable(RoutingTable routingTable) { this.routingTable = routingTable; return this; diff --git a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java index fbbf6ed2391c4..2e95915661707 100644 --- a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java +++ b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java @@ -300,6 +300,13 @@ public static Builder builder() { return new Builder(); } + /** + * Convenience method, equivalent to: {@code builder().blocks(blocks)} + */ + public static Builder builder(ClusterBlocks blocks) { + return builder().blocks(blocks); + } + public static class Builder { private final Set global = new HashSet<>(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java index ddd449f9d265d..312bb505f9059 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java @@ -225,10 +225,8 @@ public void clusterStateProcessed( ); } // acknowledged maybe be false but some indices may have been correctly - // closed, so - // we maintain a kind of coherency by overriding the shardsAcknowledged - // value - // (see ShardsAcknowledgedResponse constructor) + // closed, so we maintain a kind of coherency by overriding the + // shardsAcknowledged value (see ShardsAcknowledgedResponse constructor) boolean shardsAcked = acknowledged ? shardsAcknowledged : false; listener.onResponse( new CloseIndexResponse(acknowledged, shardsAcked, indices) @@ -271,11 +269,9 @@ static ClusterState addIndexClosedBlocks( final Map blockedIndices, final ClusterState currentState ) { - final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); - final Set indicesToClose = new HashSet<>(); for (Index index : indices) { - final IndexMetadata indexMetadata = metadata.getSafe(index); + final IndexMetadata indexMetadata = currentState.metadata().getIndexSafe(index); if (indexMetadata.getState() != IndexMetadata.State.CLOSE) { indicesToClose.add(index); } else { @@ -304,8 +300,7 @@ static ClusterState addIndexClosedBlocks( ); } - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - final RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); for (Index index : indicesToClose) { ClusterBlock indexBlock = null; @@ -334,7 +329,7 @@ static ClusterState addIndexClosedBlocks( blockedIndices.keySet().stream().map(Object::toString).collect(Collectors.joining(",")) ) ); - return ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable.build()).build(); + return ClusterState.builder(currentState).blocks(blocks).build(); } /** @@ -366,8 +361,7 @@ private static Tuple> addIndexBlock( return Tuple.tuple(currentState, Collections.emptyMap()); } - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - final RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); final Map blockedIndices = new HashMap<>(); for (Index index : indicesToAddBlock) { @@ -405,10 +399,7 @@ private static Tuple> addIndexBlock( block.name, blockedIndices.keySet().stream().map(Object::toString).collect(Collectors.toList()) ); - return Tuple.tuple( - ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable.build()).build(), - blockedIndices - ); + return Tuple.tuple(ClusterState.builder(currentState).blocks(blocks).metadata(metadata).build(), blockedIndices); } /** @@ -793,9 +784,8 @@ static Tuple> closeRoutingTable( final Map blockedIndices, final Map verifyResult ) { - final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); final RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); final Set closedIndices = new HashSet<>(); @@ -881,7 +871,7 @@ static Tuple> closeRoutingTable( } logger.info("completed closing of indices {}", closedIndices); return Tuple.tuple( - ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable.build()).build(), + ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable).build(), closingResults.values() ); } @@ -939,10 +929,7 @@ private static Tuple> finalizeBlock( final Map verifyResult, final APIBlock block ) { - - final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - final RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); final Set effectivelyBlockedIndices = new HashSet<>(); Map blockingResults = new HashMap<>(verifyResult); @@ -995,10 +982,7 @@ private static Tuple> finalizeBlock( } } logger.info("completed adding block {} to indices {}", block.name, effectivelyBlockedIndices); - return Tuple.tuple( - ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable.build()).build(), - blockingResults.values() - ); + return Tuple.tuple(ClusterState.builder(currentState).blocks(blocks).build(), blockingResults.values()); } /** @@ -1129,7 +1113,7 @@ private ClusterState openIndices(final Index[] indices, final ClusterState curre }); final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); final Version minIndexCompatibilityVersion = currentState.getNodes().getMaxNodeVersion().minimumIndexCompatibilityVersion(); for (IndexMetadata indexMetadata : indicesToOpen) { @@ -1168,7 +1152,7 @@ private ClusterState openIndices(final Index[] indices, final ClusterState curre routingTable.addAsFromCloseToOpen(updatedState.metadata().getIndexSafe(previousIndexMetadata.getIndex())); } } - return ClusterState.builder(updatedState).routingTable(routingTable.build()).build(); + return ClusterState.builder(updatedState).routingTable(routingTable).build(); } } From ff2c410c565652cc06f63097256f54042cbf00e9 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 15 Feb 2022 16:36:49 +0000 Subject: [PATCH 103/167] Fix GMSPersStateTests#testDataOnlyNodePersistence (#83900) This test assumes that we call `setLastAcceptedState` at least once in the loop, but very rarely this would not be the case. This fix ensures that we always persist at least one state. Closes #83759 --- .../gateway/GatewayMetaStatePersistedStateTests.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java index aa7ae1804cc2a..1c256f00e2dc1 100644 --- a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java @@ -480,9 +480,11 @@ public void testDataOnlyNodePersistence() throws Exception { // generate a series of updates and check if batching works final String indexName = randomAlphaOfLength(10); long currentTerm = state.term(); + boolean wroteState = false; final int iterations = randomIntBetween(1, 1000); for (int i = 0; i < iterations; i++) { - if (rarely()) { + final boolean mustWriteState = wroteState == false && i == iterations - 1; + if (rarely() && mustWriteState == false) { // bump term currentTerm = currentTerm + (rarely() ? randomIntBetween(1, 5) : 0L); persistedState.setCurrentTerm(currentTerm); @@ -496,8 +498,10 @@ public void testDataOnlyNodePersistence() throws Exception { Metadata.builder().coordinationMetadata(createCoordinationMetadata(term)).put(indexMetadata, false).build() ); persistedState.setLastAcceptedState(state); + wroteState = true; } } + assertTrue(wroteState); // must write it at least once assertEquals(currentTerm, persistedState.getCurrentTerm()); assertClusterStateEqual(state, persistedState.getLastAcceptedState()); assertBusy(() -> assertTrue(gateway.allPendingAsyncStatesWritten())); From dae889b3e5c4a31b0849e3bc440ece1d018a9050 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 15 Feb 2022 08:56:21 -0800 Subject: [PATCH 104/167] Forbid guava group instead of artifact name (#83546) We disallow including guava on the compile classpaths, but currently this only forbids the main guava artifact. Yet there are other artifacts (eg failureaccess) provided by guava. This commit changes the dependency check to use the guava group instead of artifact name. --- .../elasticsearch.forbidden-dependencies.gradle | 11 +++++------ modules/repository-gcs/build.gradle | 2 +- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.forbidden-dependencies.gradle b/build-tools-internal/src/main/groovy/elasticsearch.forbidden-dependencies.gradle index 2c20d79fac711..e67cb7846c791 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.forbidden-dependencies.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.forbidden-dependencies.gradle @@ -8,16 +8,15 @@ // we do not want any of these dependencies on the compilation classpath // because they could then be used within Elasticsearch -List FORBIDDEN_DEPENDENCIES = [ - 'guava' +List FORBIDDEN_DEPENDENCY_GROUPS = [ + 'com.google.guava' ] Closure checkDeps = { Configuration configuration -> configuration.resolutionStrategy.eachDependency { - String artifactName = it.target.name - if (FORBIDDEN_DEPENDENCIES.contains(artifactName)) { - throw new GradleException("Dependency '${artifactName}' on configuration '${configuration.name}' is not allowed. " + - "If it is needed as a transitive depenency, try adding it to the runtime classpath") + if (FORBIDDEN_DEPENDENCY_GROUPS.contains(it.target.group)) { + throw new GradleException("Dependency '${it.target.group}:${it.target.name}' on configuration '${configuration.name}' is not allowed. " + + "If it is needed as a transitive dependency, try adding it to the runtime classpath") } } } diff --git a/modules/repository-gcs/build.gradle b/modules/repository-gcs/build.gradle index ff6dbe983d9c4..0f81809b15a20 100644 --- a/modules/repository-gcs/build.gradle +++ b/modules/repository-gcs/build.gradle @@ -30,7 +30,7 @@ dependencies { api 'com.google.cloud:google-cloud-core:1.95.4' api 'com.google.cloud:google-cloud-core-http:1.95.4' runtimeOnly 'com.google.guava:guava:30.1.1-jre' - api 'com.google.guava:failureaccess:1.0.1' + runtimeOnly 'com.google.guava:failureaccess:1.0.1' api "commons-logging:commons-logging:${versions.commonslogging}" api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" api "commons-codec:commons-codec:${versions.commonscodec}" From 6eddf3d29cfd5e8ea0bb7442b7d2bbc2741e0d42 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 15 Feb 2022 17:08:18 +0000 Subject: [PATCH 105/167] Fix ZeroBytesReference#indexOf (#83956) This method would claim to find a zero byte even if there are no remaining bytes in the buffer. This commit fixes that. --- .../org/elasticsearch/common/bytes/ZeroBytesReference.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReference.java b/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReference.java index 2876220483e2c..5c06e5ed5ebee 100644 --- a/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReference.java +++ b/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReference.java @@ -20,12 +20,14 @@ public class ZeroBytesReference extends AbstractBytesReference { private final int length; public ZeroBytesReference(int length) { + assert 0 <= length : length; this.length = length; } @Override public int indexOf(byte marker, int from) { - if (marker == 0) { + assert 0 <= from && from <= length : from + " vs " + length; + if (marker == 0 && from < length) { return from; } else { return -1; @@ -34,6 +36,7 @@ public int indexOf(byte marker, int from) { @Override public byte get(int index) { + assert 0 <= index && index < length : index + " vs " + length; return 0; } @@ -44,6 +47,7 @@ public int length() { @Override public BytesReference slice(int from, int length) { + assert from + length <= this.length : from + " and " + length + " vs " + this.length; return new ZeroBytesReference(length); } From e117812ef8062f66fd224a8e849ce3b907462678 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Tue, 15 Feb 2022 12:19:04 -0500 Subject: [PATCH 106/167] [DOCS] Update migration APIs docs (#83901) * Adds a shared note that the migration APIs are mainly intended for internal use by Kibana's Upgrade Assistant. * Updates the feature migration API docs to use updated "migration" terminology. * Removes some references to major versions from the deprecation API docs. --- .../migration/apis/deprecation.asciidoc | 9 ++-- ...de.asciidoc => feature-migration.asciidoc} | 54 +++++++++++-------- .../apis/shared-migration-apis-tip.asciidoc | 4 ++ docs/reference/migration/migration.asciidoc | 7 ++- docs/reference/redirects.asciidoc | 5 ++ 5 files changed, 50 insertions(+), 29 deletions(-) rename docs/reference/migration/apis/{feature_upgrade.asciidoc => feature-migration.asciidoc} (68%) create mode 100644 docs/reference/migration/apis/shared-migration-apis-tip.asciidoc diff --git a/docs/reference/migration/apis/deprecation.asciidoc b/docs/reference/migration/apis/deprecation.asciidoc index e3ebd57263c2d..fd82bb3e0e6d2 100644 --- a/docs/reference/migration/apis/deprecation.asciidoc +++ b/docs/reference/migration/apis/deprecation.asciidoc @@ -5,14 +5,11 @@ Deprecation info ++++ -IMPORTANT: Use this API to check for deprecated configuration before performing -a major version upgrade. You should run it on the last minor version of the -major version you are upgrading from, as earlier minor versions may not include -all deprecations. +include::{es-repo-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] The deprecation API is to be used to retrieve information about different cluster, node, and index level settings that use deprecated features that will -be removed or changed in the next major version. +be removed or changed in a future version. [[migration-api-request]] ==== {api-request-title} @@ -118,7 +115,7 @@ issue. |======= |warning | You can upgrade directly, but you are using deprecated functionality -which will not be available or behave differently in the next major version. +which will not be available or behave differently in a future version. |critical | You cannot upgrade without fixing this problem. |======= diff --git a/docs/reference/migration/apis/feature_upgrade.asciidoc b/docs/reference/migration/apis/feature-migration.asciidoc similarity index 68% rename from docs/reference/migration/apis/feature_upgrade.asciidoc rename to docs/reference/migration/apis/feature-migration.asciidoc index 1f1fc5b2aa239..9cd904f42e084 100644 --- a/docs/reference/migration/apis/feature_upgrade.asciidoc +++ b/docs/reference/migration/apis/feature-migration.asciidoc @@ -1,35 +1,47 @@ [role="xpack"] -[[migration-api-feature-upgrade]] -=== Feature Upgrade APIs +[[feature-migration-api]] +=== Feature migration APIs ++++ -Feature upgrade APIs +Feature migration ++++ -IMPORTANT: Use this API to check for system features that need to be upgraded before -a major version upgrade. You should run it on the last minor version of the -major version you are upgrading from. +include::{es-repo-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] -The feature upgrade APIs are to be used to retrieve information about system features -that have to be upgraded before a cluster can be migrated to the next major version number, -and to trigger an automated system upgrade that might potentially involve downtime for -{es} system features. +Version upgrades sometimes require changes to how features store configuration +information and data in system indices. The feature migration APIs enable you to +see what features require changes, initiate the automatic migration process, and +check migration status. -[[feature-upgrade-api-request]] +Some functionality might be temporarily unavailable during the migration +process. + +[[feature-migration-api-request]] ==== {api-request-title} `GET /migration/system_features` -[[feature-upgrade-api-prereqs]] +`POST /migration/system_features` + +[[feature-migration-api-prereqs]] ==== {api-prereq-title} * If the {es} {security-features} are enabled, you must have the `manage` <> to use this API. -[[feature-upgrade-api-example]] +[[feature-migration-api-desc]] +==== {api-description-title} + +Submit a GET request to the `_migration/system_features` endpoint to see what +features need to be migrated and the status of any migrations that are in +progress. + +Submit a POST request to the endpoint to start the migration process. + +[[feature-migration-api-example]] ==== {api-examples-title} -To see the list of system features needing upgrades, submit a GET request to the -`_migration/system_features` endpoint: +When you submit a GET request to the `_migration/system_features` endpoint, the +response indicates the status of any features that need to be migrated. [source,console] -------------------------------------------------- @@ -120,10 +132,10 @@ Example response: -------------------------------------------------- // TESTRESPONSE[s/"minimum_index_version" : "8.0.0"/"minimum_index_version" : $body.$_path/] -This response tells us that Elasticsearch security needs its internal -indices upgraded before we can upgrade the cluster to 8.0. -To perform the required upgrade, submit a POST request to the same endpoint. +When you submit a POST request to the `_migration/system_features` endpoint to +start the migration process, the response indicates what features will be +migrated. [source,console] -------------------------------------------------- @@ -138,13 +150,13 @@ Example response: "accepted" : true, "features" : [ { - "feature_name" : "security" + "feature_name" : "security" <1> } ] } -------------------------------------------------- // TESTRESPONSE[skip: can't actually upgrade system indices in these tests] -This tells us that the security index is being upgraded. To check the -overall status of the upgrade, call the endpoint with GET. +<1> {es} security will be migrated before the cluster is upgraded. +Subsequent GET requests will return the status of the migration process. diff --git a/docs/reference/migration/apis/shared-migration-apis-tip.asciidoc b/docs/reference/migration/apis/shared-migration-apis-tip.asciidoc new file mode 100644 index 0000000000000..6a606ac83354c --- /dev/null +++ b/docs/reference/migration/apis/shared-migration-apis-tip.asciidoc @@ -0,0 +1,4 @@ +TIP: These APIs are designed for indirect use by {kib}'s **Upgrade Assistant**. +We strongly recommend you use the **Upgrade Assistant** to upgrade from +{prev-major-last} to {version}. For upgrade instructions, refer to +{stack-ref}/upgrading-elastic-stack.html[Upgrading to Elastic {version}]. \ No newline at end of file diff --git a/docs/reference/migration/migration.asciidoc b/docs/reference/migration/migration.asciidoc index 88c1631e30903..ffb2ca7a7859d 100644 --- a/docs/reference/migration/migration.asciidoc +++ b/docs/reference/migration/migration.asciidoc @@ -2,9 +2,12 @@ [[migration-api]] == Migration APIs -The migration APIs simplify upgrading {xpack} indices from one version to another. +The migration APIs power {kib}'s **Upgrade Assistant** feature. + +include::apis/shared-migration-apis-tip.asciidoc[] * <> +* <> include::apis/deprecation.asciidoc[] -include::apis/feature_upgrade.asciidoc[] +include::apis/feature-migration.asciidoc[] diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 7badd5ce5dd45..c8c31ee3dd775 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -3,6 +3,11 @@ The following pages have moved or been deleted. +[role="exclude",id="migration-api-feature-upgrade"] +=== Feature upgrade APIs + +Refer to <>. + [role="exclude",id="java-clients"] === Java transport client and security From 11e1d009a4c114e117094374f1be513750140b0f Mon Sep 17 00:00:00 2001 From: Dan Roscigno Date: Tue, 15 Feb 2022 12:29:00 -0500 Subject: [PATCH 107/167] [DOCS] Update links to avoid redirects (#83944) Replaces two links that currently go to the redirect page with the updated links to reduce clicks. --- x-pack/docs/en/security/authentication/saml-guide.asciidoc | 3 ++- x-pack/docs/en/security/authentication/saml-realm.asciidoc | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/x-pack/docs/en/security/authentication/saml-guide.asciidoc b/x-pack/docs/en/security/authentication/saml-guide.asciidoc index e4f6bc23c1543..ba19563c75f81 100644 --- a/x-pack/docs/en/security/authentication/saml-guide.asciidoc +++ b/x-pack/docs/en/security/authentication/saml-guide.asciidoc @@ -20,7 +20,8 @@ required in {kib} to activate the SAML authentication provider. NOTE: The SAML support in {kib} is designed on the expectation that it will be the primary (or sole) authentication method for users of that {kib} instance. Once you enable SAML authentication in {kib} it will affect all users who try -to login. The <> section provides more detail about how this works. +to login. The <> section provides more detail about how +this works. [[saml-guide-idp]] === The identity provider diff --git a/x-pack/docs/en/security/authentication/saml-realm.asciidoc b/x-pack/docs/en/security/authentication/saml-realm.asciidoc index cd91505f63d32..dba414d192081 100644 --- a/x-pack/docs/en/security/authentication/saml-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/saml-realm.asciidoc @@ -17,4 +17,4 @@ chain. In order to simplify the process of configuring SAML authentication within the Elastic Stack, there is a step-by-step guide to -<>. +<>. From e3deacf54712a787d573038c101254270e097381 Mon Sep 17 00:00:00 2001 From: Tobias Stadler Date: Tue, 15 Feb 2022 18:42:17 +0100 Subject: [PATCH 108/167] [DOCS] Fix typos (#83895) --- .../painless-watcher-context-variables.asciidoc | 2 +- docs/plugins/repository.asciidoc | 2 +- docs/reference/analysis/analyzers/pattern-analyzer.asciidoc | 2 +- .../analysis/tokenfilters/predicate-tokenfilter.asciidoc | 2 +- docs/reference/cat/trainedmodel.asciidoc | 2 +- docs/reference/cluster/stats.asciidoc | 2 +- docs/reference/commands/keystore.asciidoc | 2 +- docs/reference/graph/explore.asciidoc | 2 +- docs/reference/how-to/recipes/scoring.asciidoc | 2 +- docs/reference/migration/migrate_8_0/plugin-changes.asciidoc | 2 +- docs/reference/migration/migrate_8_0/sql-jdbc-changes.asciidoc | 2 +- .../ml/anomaly-detection/ml-configuring-alerts.asciidoc | 2 +- docs/reference/ml/ml-shared.asciidoc | 2 +- docs/reference/modules/discovery/bootstrapping.asciidoc | 2 +- docs/reference/snapshot-restore/apis/put-repo-api.asciidoc | 2 +- docs/reference/sql/limitations.asciidoc | 2 +- 16 files changed, 16 insertions(+), 16 deletions(-) diff --git a/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc b/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc index 0a21ae1fd2bdc..c1ef342100143 100644 --- a/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc +++ b/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc @@ -9,7 +9,7 @@ The following variables are available in all watcher contexts. The id of the watch. `ctx['id']` (`String`, read-only):: - The server generated unique identifer for the run watch. + The server generated unique identifier for the run watch. `ctx['metadata']` (`Map`, read-only):: Metadata can be added to the top level of the watch definition. This diff --git a/docs/plugins/repository.asciidoc b/docs/plugins/repository.asciidoc index 61716e5f6a059..7355231141791 100644 --- a/docs/plugins/repository.asciidoc +++ b/docs/plugins/repository.asciidoc @@ -6,7 +6,7 @@ functionality in Elasticsearch by adding repositories backed by the cloud or by distributed file systems: [discrete] -==== Offical repository plugins +==== Official repository plugins NOTE: Support for S3, GCS and Azure repositories is now bundled in {es} by default. diff --git a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc index 92c293795a3d2..e685c17f96865 100644 --- a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc @@ -366,7 +366,7 @@ The regex above is easier to understand as: [discrete] === Definition -The `pattern` anlayzer consists of: +The `pattern` analyzer consists of: Tokenizer:: * <> diff --git a/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc index b90350e2bbd13..9a20131a227e7 100644 --- a/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc @@ -44,7 +44,7 @@ The filter produces the following tokens. The API response contains the position and offsets of each output token. Note the `predicate_token_filter` filter does not change the tokens' original -positions or offets. +positions or offsets. .*Response* [%collapsible] diff --git a/docs/reference/cat/trainedmodel.asciidoc b/docs/reference/cat/trainedmodel.asciidoc index a092b8a207bb2..6f305be845fc4 100644 --- a/docs/reference/cat/trainedmodel.asciidoc +++ b/docs/reference/cat/trainedmodel.asciidoc @@ -72,7 +72,7 @@ The estimated heap size to keep the trained model in memory. `id`::: (Default) -Idetifier for the trained model. +Identifier for the trained model. `ingest.count`, `ic`, `ingestCount`::: The total number of documents that are processed by the model. diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index b59b7e34689ec..8ea35d268134d 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -1096,7 +1096,7 @@ Total size of all file stores across all selected nodes. `total_in_bytes`:: (integer) -Total size, in bytes, of all file stores across all seleced nodes. +Total size, in bytes, of all file stores across all selected nodes. `free`:: (<>) diff --git a/docs/reference/commands/keystore.asciidoc b/docs/reference/commands/keystore.asciidoc index c56b7dbc6a64e..a8e542cb8e52d 100644 --- a/docs/reference/commands/keystore.asciidoc +++ b/docs/reference/commands/keystore.asciidoc @@ -218,7 +218,7 @@ password. [[show-keystore-value]] ==== Show settings in the keystore -To display the value of a setting in the keystorem use the `show` command: +To display the value of a setting in the keystore use the `show` command: [source,sh] ---------------------------------------------------------------- diff --git a/docs/reference/graph/explore.asciidoc b/docs/reference/graph/explore.asciidoc index 62d50ed23ecea..6536653ae4cd6 100644 --- a/docs/reference/graph/explore.asciidoc +++ b/docs/reference/graph/explore.asciidoc @@ -84,7 +84,7 @@ graph as vertices. For example: field::: Identifies a field in the documents of interest. include::: Identifies the terms of interest that form the starting points from which you want to spider out. You do not have to specify a seed query -if you specify an include clause. The include clause implicitly querys for +if you specify an include clause. The include clause implicitly queries for documents that contain any of the listed terms listed. In addition to specifying a simple array of strings, you can also pass objects with `term` and `boost` values to boost matches on particular terms. diff --git a/docs/reference/how-to/recipes/scoring.asciidoc b/docs/reference/how-to/recipes/scoring.asciidoc index 47a3622aabf1f..5c5a8977d34d4 100644 --- a/docs/reference/how-to/recipes/scoring.asciidoc +++ b/docs/reference/how-to/recipes/scoring.asciidoc @@ -192,7 +192,7 @@ While both options would return similar scores, there are trade-offs: <> provides a lot of flexibility, enabling you to combine the text relevance score with static signals as you prefer. On the other hand, the <> only -exposes a couple ways to incorporate static signails into the score. However, +exposes a couple ways to incorporate static signals into the score. However, it relies on the <> and <> fields, which index values in a special way that allows the <> to skip diff --git a/docs/reference/migration/migrate_8_0/plugin-changes.asciidoc b/docs/reference/migration/migrate_8_0/plugin-changes.asciidoc index 4955cfc189504..63a625c89026d 100644 --- a/docs/reference/migration/migrate_8_0/plugin-changes.asciidoc +++ b/docs/reference/migration/migrate_8_0/plugin-changes.asciidoc @@ -13,7 +13,7 @@ TIP: {ess-skip-section} ==== *Details* + In previous versions of {es}, in order to register a snapshot repository -backed by Amazon S3, Google Cloud Storge (GCS) or Microsoft Azure Blob +backed by Amazon S3, Google Cloud Storage (GCS) or Microsoft Azure Blob Storage, you first had to install the corresponding Elasticsearch plugin, for example `repository-s3`. These plugins are now included in {es} by default. diff --git a/docs/reference/migration/migrate_8_0/sql-jdbc-changes.asciidoc b/docs/reference/migration/migrate_8_0/sql-jdbc-changes.asciidoc index 4833f587d1896..0e14d5b4ec0af 100644 --- a/docs/reference/migration/migrate_8_0/sql-jdbc-changes.asciidoc +++ b/docs/reference/migration/migrate_8_0/sql-jdbc-changes.asciidoc @@ -12,7 +12,7 @@ *Details* + To reduce the dependency of the JDBC driver onto Elasticsearch classes, the JDBC driver returns geometry data as strings using the WKT (well-known text) format instead of classes from the `org.elasticsearch.geometry`. -Users can choose the geometry library desired to convert the string represantion into a full-blown objects +Users can choose the geometry library desired to convert the string representation into a full-blown objects either such as the `elasticsearch-geo` library (which returned the object `org.elasticsearch.geo` as before), jts or spatial4j. diff --git a/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc b/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc index 3844d5fcd7aed..4bbf9d800e3da 100644 --- a/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc @@ -330,7 +330,7 @@ formatting is based on the {kib} settings. The peak number of bytes of memory ever used by the model. ==== -==== _Data delay has occured_ +==== _Data delay has occurred_ `context.message`:: A preconstructed message for the rule. diff --git a/docs/reference/ml/ml-shared.asciidoc b/docs/reference/ml/ml-shared.asciidoc index d5b0a2fcdaded..6fc55539c439c 100644 --- a/docs/reference/ml/ml-shared.asciidoc +++ b/docs/reference/ml/ml-shared.asciidoc @@ -995,7 +995,7 @@ Tokenize with special tokens. The tokens typically included in MPNet-style token end::inference-config-nlp-tokenization-mpnet-with-special-tokens[] tag::inference-config-nlp-vocabulary[] -The configuration for retreiving the vocabulary of the model. The vocabulary is +The configuration for retrieving the vocabulary of the model. The vocabulary is then used at inference time. This information is usually provided automatically by storing vocabulary in a known, internally managed index. end::inference-config-nlp-vocabulary[] diff --git a/docs/reference/modules/discovery/bootstrapping.asciidoc b/docs/reference/modules/discovery/bootstrapping.asciidoc index be1149d54a0b7..a313f05dbf5f4 100644 --- a/docs/reference/modules/discovery/bootstrapping.asciidoc +++ b/docs/reference/modules/discovery/bootstrapping.asciidoc @@ -75,7 +75,7 @@ configuration. If each node name is a fully-qualified domain name such as `master-a.example.com` then you must use fully-qualified domain names in the `cluster.initial_master_nodes` list too; conversely if your node names are bare hostnames (without the `.example.com` suffix) then you must use bare hostnames -in the `cluster.initial_master_nodes` list. If you use a mix of fully-qualifed +in the `cluster.initial_master_nodes` list. If you use a mix of fully-qualified and bare hostnames, or there is some other mismatch between `node.name` and `cluster.initial_master_nodes`, then the cluster will not form successfully and you will see log messages like the following. diff --git a/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc b/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc index 0b77795540a14..4d578b3df489d 100644 --- a/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc @@ -91,7 +91,7 @@ Repository type. Other repository types are available through official plugins: -`hfds`:: {plugins}/repository-hdfs.html[Hadoop Distributed File System (HDFS) repository] +`hdfs`:: {plugins}/repository-hdfs.html[Hadoop Distributed File System (HDFS) repository] ==== [[put-snapshot-repo-api-settings-param]] diff --git a/docs/reference/sql/limitations.asciidoc b/docs/reference/sql/limitations.asciidoc index 383e5b1a08edd..fae4e97ff6eec 100644 --- a/docs/reference/sql/limitations.asciidoc +++ b/docs/reference/sql/limitations.asciidoc @@ -4,7 +4,7 @@ [discrete] [[large-parsing-trees]] -=== Large queries may throw `ParsingExpection` +=== Large queries may throw `ParsingException` Extremely large queries can consume too much memory during the parsing phase, in which case the {es-sql} engine will abort parsing and throw an error. In such cases, consider reducing the query to a smaller size by potentially From 2bcc03db274502f1531c6aab939aea21b4bd251f Mon Sep 17 00:00:00 2001 From: Claudio Marins Date: Tue, 15 Feb 2022 15:08:19 -0300 Subject: [PATCH 109/167] [GCE Discovery] Correcly handle large zones with 500 or more instances (#83785) Discovery gce plugin has some issues while running in large pools of vms instances. This pr attempts to solve it. Closes #83783 --- docs/changelog/83785.yaml | 6 +++ .../cloud/gce/GceInstancesServiceImpl.java | 19 ++++++---- .../discovery/gce/GceDiscoveryTests.java | 13 +++++++ .../discovery/gce/GceMockUtils.java | 2 +- .../zones/europe-west1-b/instances | 37 +++++++++++++++++++ .../instances%3FpageToken=next-token | 36 ++++++++++++++++++ 6 files changed, 105 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/83785.yaml create mode 100644 plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances create mode 100644 plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances%3FpageToken=next-token diff --git a/docs/changelog/83785.yaml b/docs/changelog/83785.yaml new file mode 100644 index 0000000000000..db6795c82e93e --- /dev/null +++ b/docs/changelog/83785.yaml @@ -0,0 +1,6 @@ +pr: 83785 +summary: '[GCE Discovery] Correcly handle large zones with 500 or more instances' +area: Distributed +type: bug +issues: + - 83783 diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java index 55f0292285135..5667de257d867 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java @@ -69,14 +69,19 @@ public Collection instances() { try { // hack around code messiness in GCE code // TODO: get this fixed - InstanceList instanceList = Access.doPrivilegedIOException(() -> { - Compute.Instances.List list = client().instances().list(project, zoneId); - return list.execute(); + return Access.doPrivilegedIOException(() -> { + String nextPageToken = null; + List zoneInstances = new ArrayList<>(); + do { + Compute.Instances.List list = client().instances().list(project, zoneId).setPageToken(nextPageToken); + InstanceList instanceList = list.execute(); + nextPageToken = instanceList.getNextPageToken(); + if (instanceList.isEmpty() == false && instanceList.getItems() != null) { + zoneInstances.addAll(instanceList.getItems()); + } + } while (nextPageToken != null); + return zoneInstances; }); - // assist type inference - return instanceList.isEmpty() || instanceList.getItems() == null - ? Collections.emptyList() - : instanceList.getItems(); } catch (IOException e) { logger.warn((Supplier) () -> new ParameterizedMessage("Problem fetching instance list for zone {}", zoneId), e); logger.debug("Full exception:", e); diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java index a32f54638f8d6..f363b0bd2bc94 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java @@ -272,4 +272,17 @@ public void testMetadataServerValues() { List dynamicHosts = buildDynamicNodes(mock, nodeSettings); assertThat(dynamicHosts, hasSize(1)); } + + public void testNodesWithPagination() { + Settings nodeSettings = Settings.builder() + .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) + .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") + .putList(GceSeedHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev") + .build(); + mock = new GceInstancesServiceMock(nodeSettings); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); + assertEquals("10.240.79.59", dynamicHosts.get(0).getAddress()); + assertEquals("10.240.79.60", dynamicHosts.get(1).getAddress()); + } } diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java index 889228ac838a6..f2833fda8a0c5 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java @@ -67,7 +67,7 @@ public static String readGoogleApiJsonResponse(String url) throws IOException { private static String readJsonResponse(String url, String urlRoot) throws IOException { // We extract from the url the mock file path we want to use - String mockFileName = Strings.replace(url, urlRoot, ""); + String mockFileName = Strings.replace(url, urlRoot, "").replace("?", "%3F"); URL resource = GceMockUtils.class.getResource(mockFileName); if (resource == null) { diff --git a/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances new file mode 100644 index 0000000000000..e2fb8b6c21256 --- /dev/null +++ b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances @@ -0,0 +1,37 @@ +{ + "id": "dummy", + "items":[ + { + "description": "ES Node 1", + "id": "9309873766428965105", + "kind": "compute#instance", + "machineType": "n1-standard-1", + "name": "test1", + "networkInterfaces": [ + { + "accessConfigs": [ + { + "kind": "compute#accessConfig", + "name": "External NAT", + "natIP": "104.155.13.147", + "type": "ONE_TO_ONE_NAT" + } + ], + "name": "nic0", + "network": "default", + "networkIP": "10.240.79.59" + } + ], + "status": "RUNNING", + "tags": { + "fingerprint": "xA6QJb-rGtg=", + "items": [ + "elasticsearch", + "dev" + ] + }, + "zone": "europe-west1-b" + } + ], + "nextPageToken": "next-token" +} diff --git a/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances%3FpageToken=next-token b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances%3FpageToken=next-token new file mode 100644 index 0000000000000..62bd2b2d8f4f8 --- /dev/null +++ b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances%3FpageToken=next-token @@ -0,0 +1,36 @@ +{ + "id": "dummy", + "items":[ + { + "description": "ES Node 2", + "id": "9309873766428965105", + "kind": "compute#instance", + "machineType": "n1-standard-1", + "name": "test2", + "networkInterfaces": [ + { + "accessConfigs": [ + { + "kind": "compute#accessConfig", + "name": "External NAT", + "natIP": "104.155.13.147", + "type": "ONE_TO_ONE_NAT" + } + ], + "name": "nic0", + "network": "default", + "networkIP": "10.240.79.60" + } + ], + "status": "RUNNING", + "tags": { + "fingerprint": "xA6QJb-rGtg=", + "items": [ + "elasticsearch", + "dev" + ] + }, + "zone": "europe-west1-b" + } + ] +} From d6aba55d3abf60d670398cb57c1af9ec15d9e3ae Mon Sep 17 00:00:00 2001 From: James Baiera Date: Tue, 15 Feb 2022 14:10:10 -0500 Subject: [PATCH 110/167] Simplify LocalExporter cleaner function to fix failing tests (#83812) LocalExporter must be initialized fully before it can be used in the CleanerService to clean up indices. Nothing about its local state is needed for cleaning indices, and I don't think anything about its initialization of monitoring resources is needed in order to delete old indices either. Waiting for initialization can be time consuming, and thus causes some test failures in the cleaner service. By slimming down the required state of the cleaner listener this should clear up some of the test failures surrounding it. --- .../exporter/local/LocalExporter.java | 78 +++++++++---------- .../AbstractIndicesCleanerTestCase.java | 8 -- 2 files changed, 39 insertions(+), 47 deletions(-) diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index d71c9aac6167f..02a0f6b7f0a7f 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -598,64 +598,64 @@ private boolean canUseWatcher() { @Override public void onCleanUpIndices(TimeValue retention) { - if (state.get() != State.RUNNING) { + ClusterState clusterState = clusterService.state(); + if (clusterService.localNode() == null + || clusterState == null + || clusterState.blocks().hasGlobalBlockWithLevel(ClusterBlockLevel.METADATA_WRITE)) { logger.debug("exporter not ready"); return; } - if (clusterService.state().nodes().isLocalNodeElectedMaster()) { + if (clusterState.nodes().isLocalNodeElectedMaster()) { // Reference date time will be compared to index.creation_date settings, // that's why it must be in UTC ZonedDateTime expiration = ZonedDateTime.now(ZoneOffset.UTC).minus(retention.millis(), ChronoUnit.MILLIS); logger.debug("cleaning indices [expiration={}, retention={}]", expiration, retention); - ClusterState clusterState = clusterService.state(); - if (clusterState != null) { - final long expirationTimeMillis = expiration.toInstant().toEpochMilli(); - final long currentTimeMillis = System.currentTimeMillis(); + final long expirationTimeMillis = expiration.toInstant().toEpochMilli(); + final long currentTimeMillis = System.currentTimeMillis(); - // list of index patterns that we clean up - final String[] indexPatterns = new String[] { ".monitoring-*" }; + // list of index patterns that we clean up + final String[] indexPatterns = new String[] { ".monitoring-*" }; - // Get the names of the current monitoring indices - final Set currents = MonitoredSystem.allSystems() - .map(s -> MonitoringTemplateUtils.indexName(dateTimeFormatter, s, currentTimeMillis)) - .collect(Collectors.toSet()); + // Get the names of the current monitoring indices + final Set currents = MonitoredSystem.allSystems() + .map(s -> MonitoringTemplateUtils.indexName(dateTimeFormatter, s, currentTimeMillis)) + .collect(Collectors.toSet()); - // avoid deleting the current alerts index, but feel free to delete older ones - currents.add(MonitoringTemplateRegistry.ALERTS_INDEX_TEMPLATE_NAME); + // avoid deleting the current alerts index, but feel free to delete older ones + currents.add(MonitoringTemplateRegistry.ALERTS_INDEX_TEMPLATE_NAME); - Set indices = new HashSet<>(); - for (ObjectObjectCursor index : clusterState.getMetadata().indices()) { - String indexName = index.key; + Set indices = new HashSet<>(); + for (ObjectObjectCursor index : clusterState.getMetadata().indices()) { + String indexName = index.key; - if (Regex.simpleMatch(indexPatterns, indexName)) { - // Never delete any "current" index (e.g., today's index or the most recent version no timestamp, like alerts) - if (currents.contains(indexName)) { - continue; - } + if (Regex.simpleMatch(indexPatterns, indexName)) { + // Never delete any "current" index (e.g., today's index or the most recent version no timestamp, like alerts) + if (currents.contains(indexName)) { + continue; + } - long creationDate = index.value.getCreationDate(); - if (creationDate <= expirationTimeMillis) { - if (logger.isDebugEnabled()) { - logger.debug( - "detected expired index [name={}, created={}, expired={}]", - indexName, - Instant.ofEpochMilli(creationDate).atZone(ZoneOffset.UTC), - expiration - ); - } - indices.add(indexName); + long creationDate = index.value.getCreationDate(); + if (creationDate <= expirationTimeMillis) { + if (logger.isDebugEnabled()) { + logger.debug( + "detected expired index [name={}, created={}, expired={}]", + indexName, + Instant.ofEpochMilli(creationDate).atZone(ZoneOffset.UTC), + expiration + ); } + indices.add(indexName); } } + } - if (indices.isEmpty() == false) { - logger.info("cleaning up [{}] old indices", indices.size()); - deleteIndices(indices); - } else { - logger.debug("no old indices found for clean up"); - } + if (indices.isEmpty() == false) { + logger.info("cleaning up [{}] old indices", indices.size()); + deleteIndices(indices); + } else { + logger.debug("no old indices found for clean up"); } } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java index a78e7baab75f4..c3fcb7525775e 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.exporter.Exporter; import org.elasticsearch.xpack.monitoring.exporter.Exporters; -import org.elasticsearch.xpack.monitoring.exporter.local.LocalExporter; import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; import org.junit.Before; @@ -23,7 +22,6 @@ import java.util.Locale; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; -import static org.hamcrest.Matchers.is; @ClusterScope(scope = TEST, numDataNodes = 0, numClientNodes = 0) public abstract class AbstractIndicesCleanerTestCase extends MonitoringIntegTestCase { @@ -40,7 +38,6 @@ public void setup() { cleanerService.setGlobalRetention(TimeValue.MAX_VALUE); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/78737") public void testNothingToDelete() throws Exception { CleanerService.Listener listener = getListener(); listener.onCleanUpIndices(days(0)); @@ -107,7 +104,6 @@ public void testIgnoreCurrentTimestampedIndex() throws Exception { assertIndicesCount(1); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/78862") public void testDeleteIndices() throws Exception { CleanerService.Listener listener = getListener(); @@ -167,10 +163,6 @@ protected CleanerService.Listener getListener() throws Exception { Exporters exporters = internalCluster().getInstance(Exporters.class, internalCluster().getMasterName()); for (Exporter exporter : exporters.getEnabledExporters()) { if (exporter instanceof CleanerService.Listener) { - // Ensure that the exporter is initialized. - if (exporter instanceof LocalExporter) { - assertBusy(() -> assertThat(((LocalExporter) exporter).isExporterReady(), is(true))); - } return (CleanerService.Listener) exporter; } } From c1aba1e109aecf376b44155b43dbc7f76cd540ec Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Tue, 15 Feb 2022 15:24:33 -0500 Subject: [PATCH 111/167] [DOCS] Move tip for percolate query example (#83972) Moves a tip for the percolate query to the beginning of the example. --- docs/reference/query-dsl/percolate-query.asciidoc | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/docs/reference/query-dsl/percolate-query.asciidoc b/docs/reference/query-dsl/percolate-query.asciidoc index 684b0b571f149..55f1fd5c705f9 100644 --- a/docs/reference/query-dsl/percolate-query.asciidoc +++ b/docs/reference/query-dsl/percolate-query.asciidoc @@ -10,7 +10,13 @@ contains the document that will be used as query to match with the stored queries. [discrete] -=== Sample Usage +=== Sample usage + +TIP: To provide a simple example, this documentation uses one index, +`my-index-000001`, for both the percolate queries and documents. This setup can +work well when there are just a few percolate queries registered. For heavier +usage, we recommend you store queries and documents in separate indices. For +more details, refer to <>. Create an index with two fields: @@ -118,10 +124,6 @@ The above request will yield the following response: <2> The `_percolator_document_slot` field indicates which document has matched with this query. Useful when percolating multiple document simultaneously. -TIP: To provide a simple example, this documentation uses one index `my-index-000001` for both the percolate queries and documents. -This set-up can work well when there are just a few percolate queries registered. However, with heavier usage it is recommended -to store queries and documents in separate indices. Please see <> for more details. - [discrete] ==== Parameters From 1fe2b0d866feb3e38409298a7e9168d554bee693 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Tue, 15 Feb 2022 15:56:04 -0500 Subject: [PATCH 112/167] [DOCS] Fix percolate query headings (#83988) Fixes the heading levels for the percolate query doc so the on-page TOC displays correctly. --- docs/reference/query-dsl/percolate-query.asciidoc | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/docs/reference/query-dsl/percolate-query.asciidoc b/docs/reference/query-dsl/percolate-query.asciidoc index 55f1fd5c705f9..24b951a46ed9d 100644 --- a/docs/reference/query-dsl/percolate-query.asciidoc +++ b/docs/reference/query-dsl/percolate-query.asciidoc @@ -9,8 +9,7 @@ stored in an index. The `percolate` query itself contains the document that will be used as query to match with the stored queries. -[discrete] -=== Sample usage +==== Sample usage TIP: To provide a simple example, this documentation uses one index, `my-index-000001`, for both the percolate queries and documents. This setup can @@ -124,7 +123,6 @@ The above request will yield the following response: <2> The `_percolator_document_slot` field indicates which document has matched with this query. Useful when percolating multiple document simultaneously. -[discrete] ==== Parameters The following parameters are required when percolating a document: @@ -150,7 +148,6 @@ In that case the `document` parameter can be substituted with the following para `preference`:: Optionally, preference to be used to fetch document to percolate. `version`:: Optionally, the expected version of the document to be fetched. -[discrete] ==== Percolating in a filter context In case you are not interested in the score, better performance can be expected by wrapping @@ -185,7 +182,6 @@ should be wrapped in a `constant_score` query or a `bool` query's filter clause. Note that the `percolate` query never gets cached by the query cache. -[discrete] ==== Percolating multiple documents The `percolate` query can match multiple documents simultaneously with the indexed percolator queries. @@ -267,14 +263,12 @@ GET /my-index-000001/_search <1> The `_percolator_document_slot` indicates that the first, second and last documents specified in the `percolate` query are matching with this query. -[discrete] ==== Percolating an Existing Document In order to percolate a newly indexed document, the `percolate` query can be used. Based on the response from an index request, the `_id` and other meta information can be used to immediately percolate the newly added document. -[discrete] ===== Example Based on the previous example. @@ -332,14 +326,12 @@ case the search request would fail with a version conflict error. The search response returned is identical as in the previous example. -[discrete] ==== Percolate query and highlighting The `percolate` query is handled in a special way when it comes to highlighting. The queries hits are used to highlight the document that is provided in the `percolate` query. Whereas with regular highlighting the query in the search request is used to highlight the hits. -[discrete] ===== Example This example is based on the mapping of the first example. @@ -557,7 +549,6 @@ The slightly different response: <1> The highlight fields have been prefixed with the document slot they belong to, in order to know which highlight field belongs to what document. -[discrete] ==== Specifying multiple percolate queries It is possible to specify multiple `percolate` queries in a single search request: @@ -643,7 +634,6 @@ The above search request returns a response similar to this: <1> The `_percolator_document_slot_query1` percolator slot field indicates that these matched slots are from the `percolate` query with `_name` parameter set to `query1`. -[discrete] [[how-it-works]] ==== How it Works Under the Hood @@ -691,6 +681,7 @@ a different index configuration, like the number of primary shards. [[percolate-query-notes]] ==== Notes + ===== Allow expensive queries Percolate queries will not be executed if <> is set to false. From a89d4c34cbd8e5b3b3879df3aec632e7494e3c35 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Tue, 15 Feb 2022 11:48:38 -1000 Subject: [PATCH 113/167] TSDB: Add time series aggs cancellation (#83492) Adds support for low-level cancelling time-series based aggregations before they reach the reduce phase. Relates to #74660 --- .../search/SearchCancellationIT.java | 135 +++++++++++++++++- .../search/aggregations/AggregationPhase.java | 35 ++++- .../timeseries/TimeSeriesIndexSearcher.java | 18 ++- .../search/query/QueryPhase.java | 2 +- .../TimeSeriesCancellationTests.java | 128 +++++++++++++++++ .../TimeSeriesIndexSearcherTests.java | 3 +- .../aggregations/AggregatorTestCase.java | 4 +- 7 files changed, 315 insertions(+), 10 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesCancellationTests.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index 9a800c2656c45..465c394403bef 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -28,6 +29,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.rest.RestStatus; @@ -36,13 +39,16 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; +import org.elasticsearch.search.aggregations.timeseries.TimeSeriesAggregationBuilder; import org.elasticsearch.search.lookup.LeafStoredFieldsLookup; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.transport.TransportService; +import org.junit.BeforeClass; +import java.time.Instant; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -55,9 +61,12 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; +import static org.elasticsearch.index.IndexSettings.TIME_SERIES_END_TIME; +import static org.elasticsearch.index.IndexSettings.TIME_SERIES_START_TIME; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.scriptQuery; import static org.elasticsearch.search.SearchCancellationIT.ScriptedBlockPlugin.SEARCH_BLOCK_SCRIPT_NAME; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.containsString; @@ -69,6 +78,13 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) public class SearchCancellationIT extends ESIntegTestCase { + private static boolean lowLevelCancellation; + + @BeforeClass + public static void init() { + lowLevelCancellation = randomBoolean(); + } + @Override protected Collection> nodePlugins() { return Collections.singleton(ScriptedBlockPlugin.class); @@ -76,7 +92,6 @@ protected Collection> nodePlugins() { @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - boolean lowLevelCancellation = randomBoolean(); logger.info("Using lowLevelCancellation: {}", lowLevelCancellation); return Settings.builder() .put(super.nodeSettings(nodeOrdinal, otherSettings)) @@ -227,7 +242,12 @@ public void testCancellationDuringAggregation() throws Exception { new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.COMBINE_SCRIPT_NAME, Collections.emptyMap()) ) .reduceScript( - new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.REDUCE_SCRIPT_NAME, Collections.emptyMap()) + new Script( + ScriptType.INLINE, + "mockscript", + ScriptedBlockPlugin.REDUCE_BLOCK_SCRIPT_NAME, + Collections.emptyMap() + ) ) ) ) @@ -238,6 +258,80 @@ public void testCancellationDuringAggregation() throws Exception { ensureSearchWasCancelled(searchResponse); } + public void testCancellationDuringTimeSeriesAggregation() throws Exception { + List plugins = initBlockFactory(); + int numberOfShards = between(2, 5); + long now = Instant.now().toEpochMilli(); + int numberOfRefreshes = between(1, 5); + int numberOfDocsPerRefresh = numberOfShards * between(1500, 2000) / numberOfRefreshes; + assertAcked( + prepareCreate("test").setSettings( + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES.name()) + .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "dim") + .put(TIME_SERIES_START_TIME.getKey(), now) + .put(TIME_SERIES_END_TIME.getKey(), now + (long) numberOfRefreshes * numberOfDocsPerRefresh + 1) + .build() + ).setMapping(""" + { + "properties": { + "@timestamp": {"type": "date", "format": "epoch_millis"}, + "dim": {"type": "keyword", "time_series_dimension": true} + } + } + """) + ); + + for (int i = 0; i < numberOfRefreshes; i++) { + // Make sure we sometimes have a few segments + BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + for (int j = 0; j < numberOfDocsPerRefresh; j++) { + bulkRequestBuilder.add( + client().prepareIndex("test") + .setOpType(DocWriteRequest.OpType.CREATE) + .setSource("@timestamp", now + (long) i * numberOfDocsPerRefresh + j, "val", (double) j, "dim", String.valueOf(i)) + ); + } + assertNoFailures(bulkRequestBuilder.get()); + } + + logger.info("Executing search"); + TimeSeriesAggregationBuilder timeSeriesAggregationBuilder = new TimeSeriesAggregationBuilder("test_agg"); + ActionFuture searchResponse = client().prepareSearch("test") + .setQuery(matchAllQuery()) + .addAggregation( + timeSeriesAggregationBuilder.subAggregation( + new ScriptedMetricAggregationBuilder("sub_agg").initScript( + new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.INIT_SCRIPT_NAME, Collections.emptyMap()) + ) + .mapScript( + new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.MAP_BLOCK_SCRIPT_NAME, Collections.emptyMap()) + ) + .combineScript( + new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.COMBINE_SCRIPT_NAME, Collections.emptyMap()) + ) + .reduceScript( + new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.REDUCE_FAIL_SCRIPT_NAME, Collections.emptyMap()) + ) + ) + ) + .execute(); + awaitForBlock(plugins); + cancelSearch(SearchAction.NAME); + disableBlocks(plugins); + + SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, searchResponse::actionGet); + assertThat(ExceptionsHelper.status(ex), equalTo(RestStatus.BAD_REQUEST)); + logger.info("All shards failed with", ex); + if (lowLevelCancellation) { + // Ensure that we cancelled in TimeSeriesIndexSearcher and not in reduce phase + assertThat(ExceptionsHelper.stackTrace(ex), containsString("TimeSeriesIndexSearcher")); + } + + } + public void testCancellationOfScrollSearches() throws Exception { List plugins = initBlockFactory(); @@ -414,8 +508,11 @@ public static class ScriptedBlockPlugin extends MockScriptPlugin { static final String SEARCH_BLOCK_SCRIPT_NAME = "search_block"; static final String INIT_SCRIPT_NAME = "init"; static final String MAP_SCRIPT_NAME = "map"; + static final String MAP_BLOCK_SCRIPT_NAME = "map_block"; static final String COMBINE_SCRIPT_NAME = "combine"; static final String REDUCE_SCRIPT_NAME = "reduce"; + static final String REDUCE_FAIL_SCRIPT_NAME = "reduce_fail"; + static final String REDUCE_BLOCK_SCRIPT_NAME = "reduce_block"; static final String TERM_SCRIPT_NAME = "term"; private final AtomicInteger hits = new AtomicInteger(); @@ -449,10 +546,16 @@ public Map, Object>> pluginScripts() { this::nullScript, MAP_SCRIPT_NAME, this::nullScript, + MAP_BLOCK_SCRIPT_NAME, + this::mapBlockScript, COMBINE_SCRIPT_NAME, this::nullScript, - REDUCE_SCRIPT_NAME, + REDUCE_BLOCK_SCRIPT_NAME, this::blockScript, + REDUCE_SCRIPT_NAME, + this::termScript, + REDUCE_FAIL_SCRIPT_NAME, + this::reduceFailScript, TERM_SCRIPT_NAME, this::termScript ); @@ -474,6 +577,11 @@ private Object searchBlockScript(Map params) { return true; } + private Object reduceFailScript(Map params) { + fail("Shouldn't reach reduce"); + return true; + } + private Object nullScript(Map params) { return null; } @@ -483,7 +591,9 @@ private Object blockScript(Map params) { if (runnable != null) { runnable.run(); } - LogManager.getLogger(SearchCancellationIT.class).info("Blocking in reduce"); + if (shouldBlock.get()) { + LogManager.getLogger(SearchCancellationIT.class).info("Blocking in reduce"); + } hits.incrementAndGet(); try { assertBusy(() -> assertFalse(shouldBlock.get())); @@ -493,6 +603,23 @@ private Object blockScript(Map params) { return 42; } + private Object mapBlockScript(Map params) { + final Runnable runnable = beforeExecution.get(); + if (runnable != null) { + runnable.run(); + } + if (shouldBlock.get()) { + LogManager.getLogger(SearchCancellationIT.class).info("Blocking in map"); + } + hits.incrementAndGet(); + try { + assertBusy(() -> assertFalse(shouldBlock.get())); + } catch (Exception e) { + throw new RuntimeException(e); + } + return 1; + } + private Object termScript(Map params) { return 1; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java index ffcc971eeda7a..ce28ab0499d54 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java @@ -8,11 +8,14 @@ package org.elasticsearch.search.aggregations; import org.apache.lucene.search.Collector; +import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.search.SearchService; import org.elasticsearch.search.aggregations.timeseries.TimeSeriesIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.profile.query.CollectorResult; import org.elasticsearch.search.profile.query.InternalProfileCollector; +import org.elasticsearch.search.query.QueryPhase; import java.io.IOException; import java.util.ArrayList; @@ -40,7 +43,7 @@ public void preProcess(SearchContext context) { } if (context.aggregations().factories().context() != null && context.aggregations().factories().context().isInSortOrderExecutionRequired()) { - TimeSeriesIndexSearcher searcher = new TimeSeriesIndexSearcher(context.searcher()); + TimeSeriesIndexSearcher searcher = new TimeSeriesIndexSearcher(context.searcher(), getCancellationChecks(context)); try { searcher.search(context.rewrittenQuery(), bucketCollector); } catch (IOException e) { @@ -55,6 +58,36 @@ public void preProcess(SearchContext context) { } } + private List getCancellationChecks(SearchContext context) { + List cancellationChecks = new ArrayList<>(); + if (context.lowLevelCancellation()) { + // This searching doesn't live beyond this phase, so we don't need to remove query cancellation + cancellationChecks.add(() -> { + final SearchShardTask task = context.getTask(); + if (task != null) { + task.ensureNotCancelled(); + } + }); + } + + boolean timeoutSet = context.scrollContext() == null + && context.timeout() != null + && context.timeout().equals(SearchService.NO_TIMEOUT) == false; + + if (timeoutSet) { + final long startTime = context.getRelativeTimeInMillis(); + final long timeout = context.timeout().millis(); + final long maxTime = startTime + timeout; + cancellationChecks.add(() -> { + final long time = context.getRelativeTimeInMillis(); + if (time > maxTime) { + throw new QueryPhase.TimeExceededException(); + } + }); + } + return cancellationChecks; + } + public void execute(SearchContext context) { if (context.aggregations() == null) { context.queryResult().aggregations(null); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcher.java index 4837a291df98f..71ccf96fd6bc2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcher.java @@ -37,22 +37,29 @@ * TODO: Convert it to use index sort instead of hard-coded tsid and timestamp values */ public class TimeSeriesIndexSearcher { + private static final int CHECK_CANCELLED_SCORER_INTERVAL = 1 << 11; // We need to delegate to the other searcher here as opposed to extending IndexSearcher and inheriting default implementations as the // IndexSearcher would most of the time be a ContextIndexSearcher that has important logic related to e.g. document-level security. private final IndexSearcher searcher; + private final List cancellations; - public TimeSeriesIndexSearcher(IndexSearcher searcher) { + public TimeSeriesIndexSearcher(IndexSearcher searcher, List cancellations) { this.searcher = searcher; + this.cancellations = cancellations; } public void search(Query query, BucketCollector bucketCollector) throws IOException { + int seen = 0; query = searcher.rewrite(query); Weight weight = searcher.createWeight(query, bucketCollector.scoreMode(), 1); // Create LeafWalker for each subreader List leafWalkers = new ArrayList<>(); for (LeafReaderContext leaf : searcher.getIndexReader().leaves()) { + if (++seen % CHECK_CANCELLED_SCORER_INTERVAL == 0) { + checkCancelled(); + } LeafBucketCollector leafCollector = bucketCollector.getLeafCollector(leaf); Scorer scorer = weight.scorer(leaf); if (scorer != null) { @@ -76,6 +83,9 @@ protected boolean lessThan(LeafWalker a, LeafWalker b) { // walkers are ordered by timestamp. while (populateQueue(leafWalkers, queue)) { do { + if (++seen % CHECK_CANCELLED_SCORER_INTERVAL == 0) { + checkCancelled(); + } LeafWalker walker = queue.top(); walker.collectCurrent(); if (walker.nextDoc() == DocIdSetIterator.NO_MORE_DOCS || walker.shouldPop()) { @@ -131,6 +141,12 @@ private boolean queueAllHaveTsid(PriorityQueue queue, BytesRef tsid) return true; } + private void checkCancelled() { + for (Runnable r : cancellations) { + r.run(); + } + } + private static class LeafWalker { private final LeafCollector collector; private final Bits liveDocs; diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 0b72df78a510f..937378719ff81 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -267,5 +267,5 @@ private static boolean canEarlyTerminate(IndexReader reader, SortAndFormats sort return true; } - static class TimeExceededException extends RuntimeException {} + public static class TimeExceededException extends RuntimeException {} } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesCancellationTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesCancellationTests.java new file mode 100644 index 0000000000000..b66db7736a7ff --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesCancellationTests.java @@ -0,0 +1,128 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.search.aggregations.timeseries; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedDocValuesField; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; +import org.elasticsearch.search.aggregations.BucketCollector; +import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.test.ESTestCase; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.Matchers.equalTo; + +public class TimeSeriesCancellationTests extends ESTestCase { + + private static Directory dir; + private static IndexReader reader; + + @BeforeClass + public static void setup() throws IOException { + dir = newDirectory(); + IndexWriterConfig iwc = newIndexWriterConfig(); + iwc.setIndexSort( + new Sort( + new SortField(TimeSeriesIdFieldMapper.NAME, SortField.Type.STRING), + new SortField(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD, SortField.Type.LONG) + ) + ); + RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); + indexRandomDocuments(iw, randomIntBetween(2048, 4096)); + iw.flush(); + reader = iw.getReader(); + iw.close(); + } + + private static void indexRandomDocuments(RandomIndexWriter w, int numDocs) throws IOException { + for (int i = 1; i <= numDocs; ++i) { + Document doc = new Document(); + String tsid = "tsid" + randomIntBetween(0, 30); + long time = randomNonNegativeLong(); + doc.add(new SortedDocValuesField(TimeSeriesIdFieldMapper.NAME, new BytesRef(tsid))); + doc.add(new NumericDocValuesField(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD, time)); + w.addDocument(doc); + } + } + + @AfterClass + public static void cleanup() throws IOException { + IOUtils.close(reader, dir); + dir = null; + reader = null; + } + + public void testLowLevelCancellationActions() throws IOException { + ContextIndexSearcher searcher = new ContextIndexSearcher( + reader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + IndexSearcher.getDefaultQueryCachingPolicy(), + true + ); + TimeSeriesIndexSearcher timeSeriesIndexSearcher = new TimeSeriesIndexSearcher( + searcher, + List.of(() -> { throw new TaskCancelledException("Cancel"); }) + ); + CountingBucketCollector bc = new CountingBucketCollector(); + expectThrows(TaskCancelledException.class, () -> timeSeriesIndexSearcher.search(new MatchAllDocsQuery(), bc)); + // We count every segment and every record as 1 and break on 2048th iteration counting from 0 + // so we expect to see 2048 - number_of_segments - 1 (-1 is because we check before we collect) + assertThat(bc.count.get(), equalTo(Math.max(0, 2048 - reader.leaves().size() - 1))); + } + + public static class CountingBucketCollector extends BucketCollector { + public AtomicInteger count = new AtomicInteger(); + + @Override + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOException { + return new LeafBucketCollector() { + @Override + public void collect(int doc, long owningBucketOrd) throws IOException { + count.incrementAndGet(); + } + }; + } + + @Override + public void preCollection() throws IOException { + + } + + @Override + public void postCollection() throws IOException { + + } + + @Override + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; + } + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcherTests.java index 670a6b1f1d31d..7bc5a2522d55b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcherTests.java @@ -34,6 +34,7 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; @@ -85,7 +86,7 @@ public void testCollectInOrderAcrossSegments() throws IOException, InterruptedEx IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = new IndexSearcher(reader); - TimeSeriesIndexSearcher indexSearcher = new TimeSeriesIndexSearcher(searcher); + TimeSeriesIndexSearcher indexSearcher = new TimeSeriesIndexSearcher(searcher, List.of()); BucketCollector collector = new BucketCollector() { diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 369d07ab26446..dfdfd267373b5 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -578,7 +578,7 @@ private A searchAndReduce( C a = createAggregator(builder, context); a.preCollection(); if (context.isInSortOrderExecutionRequired()) { - new TimeSeriesIndexSearcher(subSearcher).search(rewritten, a); + new TimeSeriesIndexSearcher(subSearcher, List.of()).search(rewritten, a); } else { Weight weight = subSearcher.createWeight(rewritten, ScoreMode.COMPLETE, 1f); subSearcher.search(weight, a); @@ -589,7 +589,7 @@ private A searchAndReduce( } else { root.preCollection(); if (context.isInSortOrderExecutionRequired()) { - new TimeSeriesIndexSearcher(searcher).search(rewritten, MultiBucketCollector.wrap(true, List.of(root))); + new TimeSeriesIndexSearcher(searcher, List.of()).search(rewritten, MultiBucketCollector.wrap(true, List.of(root))); } else { searcher.search(rewritten, MultiBucketCollector.wrap(true, List.of(root))); } From c33da22a77e7b8bd80619395d2e9c676b95ef13f Mon Sep 17 00:00:00 2001 From: James Baiera Date: Tue, 15 Feb 2022 17:00:47 -0500 Subject: [PATCH 114/167] Update YAML Rest tests to check for product header on all responses (#83290) This PR adds assertions to YAML Rest tests to ensure that product headers are always returned in rest responses. Additional work has been included to fix a number of misuses of ThreadContext, mostly because of stashing listeners without their accompanying contexts. BWC Rest tests have been disabled for a few cases while the fixes are backported. --- docs/changelog/83290.yaml | 5 ++ .../AbstractAsyncBulkByScrollAction.java | 4 +- .../test/cat.snapshots/10_basic.yml | 4 ++ .../test/snapshot.clone/10_basic.yml | 3 + .../test/snapshot.create/10_basic.yml | 3 + .../test/snapshot.get/10_basic.yml | 5 +- .../20_repository_uuid.yml | 5 ++ .../test/snapshot.restore/10_basic.yml | 3 + .../test/snapshot.status/10_basic.yml | 3 + .../rest-api-spec/test/tsdb/30_snapshot.yml | 4 ++ .../restore/RestoreClusterStateListener.java | 65 +++++++++++-------- .../TransportRestoreSnapshotAction.java | 7 +- .../service/ClusterApplierService.java | 4 +- .../snapshots/SnapshotsService.java | 4 +- .../rest/yaml/ClientYamlTestResponse.java | 15 +++-- .../test/rest/yaml/section/DoSection.java | 26 ++++++++ .../rest/yaml/section/DoSectionTests.java | 1 + .../ccr/action/TransportPutFollowAction.java | 3 +- .../ccr/action/TransportUnfollowAction.java | 9 ++- .../action/TransportXPackUsageAction.java | 53 +++++++-------- .../watcher/WatcherUsageTransportAction.java | 9 ++- 21 files changed, 162 insertions(+), 73 deletions(-) create mode 100644 docs/changelog/83290.yaml diff --git a/docs/changelog/83290.yaml b/docs/changelog/83290.yaml new file mode 100644 index 0000000000000..9b3bb8ef056e5 --- /dev/null +++ b/docs/changelog/83290.yaml @@ -0,0 +1,5 @@ +pr: 83290 +summary: Update YAML Rest tests to check for product header on all responses +area: Infra/REST API +type: enhancement +issues: [] diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java index bab93e56b653f..beac9ab88c78c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java @@ -593,7 +593,7 @@ protected void finishHim(Exception failure) { */ protected void finishHim(Exception failure, List indexingFailures, List searchFailures, boolean timedOut) { logger.debug("[{}]: finishing without any catastrophic failures", task.getId()); - scrollSource.close(() -> { + scrollSource.close(threadPool.getThreadContext().preserveContext(() -> { if (failure == null) { BulkByScrollResponse response = buildResponse( timeValueNanos(System.nanoTime() - startTime.get()), @@ -605,7 +605,7 @@ protected void finishHim(Exception failure, List indexingFailures, List } else { listener.onFailure(failure); } - }); + })); } /** diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.snapshots/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.snapshots/10_basic.yml index f7d60671c7e88..23860cb412722 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.snapshots/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.snapshots/10_basic.yml @@ -23,6 +23,10 @@ $/ --- "Test cat snapshots output": + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" + - do: snapshot.create_repository: repository: test_cat_snapshots_1 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.clone/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.clone/10_basic.yml index fb289355e08fb..80e7139cd8df3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.clone/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.clone/10_basic.yml @@ -1,5 +1,8 @@ --- setup: + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.create/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.create/10_basic.yml index f7c522b712244..e060e7dff5bda 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.create/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.create/10_basic.yml @@ -1,5 +1,8 @@ --- setup: + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get/10_basic.yml index b50ece87e9f88..08753e4e732bf 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get/10_basic.yml @@ -1,5 +1,8 @@ --- setup: + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: @@ -61,6 +64,7 @@ setup: --- "Get snapshot info when verbose is false": + - do: indices.create: index: test_index @@ -198,7 +202,6 @@ setup: - skip: version: " - 7.12.99" reason: "Introduced in 7.13.0" - - do: indices.create: index: test_index diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get_repository/20_repository_uuid.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get_repository/20_repository_uuid.yml index 0532d208d0cba..503c6cc7133de 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get_repository/20_repository_uuid.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get_repository/20_repository_uuid.yml @@ -1,4 +1,9 @@ --- +setup: + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" +--- "Get repository returns UUID": - skip: version: " - 7.12.99" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.restore/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.restore/10_basic.yml index 1ea5b542625e8..e91f38e985e43 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.restore/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.restore/10_basic.yml @@ -1,5 +1,8 @@ --- setup: + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.status/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.status/10_basic.yml index c35f2419bdc91..2c4573ccd58b8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.status/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.status/10_basic.yml @@ -1,5 +1,8 @@ --- setup: + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml index 104b383ae811f..39c6dd4345bdf 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml @@ -1,5 +1,9 @@ --- setup: + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" + - do: snapshot.create_repository: repository: test_repo diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreClusterStateListener.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreClusterStateListener.java index 2f3e92d2f55a9..c2931714e72a7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreClusterStateListener.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreClusterStateListener.java @@ -16,10 +16,13 @@ import org.elasticsearch.cluster.RestoreInProgress; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.snapshots.RestoreInfo; import org.elasticsearch.snapshots.RestoreService; +import java.util.function.Supplier; + import static org.elasticsearch.snapshots.RestoreService.restoreInProgress; public class RestoreClusterStateListener implements ClusterStateListener { @@ -29,43 +32,48 @@ public class RestoreClusterStateListener implements ClusterStateListener { private final ClusterService clusterService; private final String uuid; private final ActionListener listener; + private final Supplier contextSupplier; private RestoreClusterStateListener( ClusterService clusterService, RestoreService.RestoreCompletionResponse response, - ActionListener listener + ActionListener listener, + Supplier contextSupplier ) { this.clusterService = clusterService; this.uuid = response.getUuid(); this.listener = listener; + this.contextSupplier = contextSupplier; } @Override public void clusterChanged(ClusterChangedEvent changedEvent) { - final RestoreInProgress.Entry prevEntry = restoreInProgress(changedEvent.previousState(), uuid); - final RestoreInProgress.Entry newEntry = restoreInProgress(changedEvent.state(), uuid); - if (prevEntry == null) { - // When there is a master failure after a restore has been started, this listener might not be registered - // on the current master and as such it might miss some intermediary cluster states due to batching. - // Clean up listener in that case and acknowledge completion of restore operation to client. - clusterService.removeListener(this); - listener.onResponse(new RestoreSnapshotResponse((RestoreInfo) null)); - } else if (newEntry == null) { - clusterService.removeListener(this); - ImmutableOpenMap shards = prevEntry.shards(); - assert prevEntry.state().completed() : "expected completed snapshot state but was " + prevEntry.state(); - assert RestoreService.completed(shards) : "expected all restore entries to be completed"; - RestoreInfo ri = new RestoreInfo( - prevEntry.snapshot().getSnapshotId().getName(), - prevEntry.indices(), - shards.size(), - shards.size() - RestoreService.failedShards(shards) - ); - RestoreSnapshotResponse response = new RestoreSnapshotResponse(ri); - logger.debug("restore of [{}] completed", prevEntry.snapshot().getSnapshotId()); - listener.onResponse(response); - } else { - // restore not completed yet, wait for next cluster state update + try (ThreadContext.StoredContext stored = contextSupplier.get()) { + final RestoreInProgress.Entry prevEntry = restoreInProgress(changedEvent.previousState(), uuid); + final RestoreInProgress.Entry newEntry = restoreInProgress(changedEvent.state(), uuid); + if (prevEntry == null) { + // When there is a master failure after a restore has been started, this listener might not be registered + // on the current master and as such it might miss some intermediary cluster states due to batching. + // Clean up listener in that case and acknowledge completion of restore operation to client. + clusterService.removeListener(this); + listener.onResponse(new RestoreSnapshotResponse((RestoreInfo) null)); + } else if (newEntry == null) { + clusterService.removeListener(this); + ImmutableOpenMap shards = prevEntry.shards(); + assert prevEntry.state().completed() : "expected completed snapshot state but was " + prevEntry.state(); + assert RestoreService.completed(shards) : "expected all restore entries to be completed"; + RestoreInfo ri = new RestoreInfo( + prevEntry.snapshot().getSnapshotId().getName(), + prevEntry.indices(), + shards.size(), + shards.size() - RestoreService.failedShards(shards) + ); + RestoreSnapshotResponse response = new RestoreSnapshotResponse(ri); + logger.debug("restore of [{}] completed", prevEntry.snapshot().getSnapshotId()); + listener.onResponse(response); + } else { + // restore not completed yet, wait for next cluster state update + } } } @@ -76,8 +84,11 @@ public void clusterChanged(ClusterChangedEvent changedEvent) { public static void createAndRegisterListener( ClusterService clusterService, RestoreService.RestoreCompletionResponse response, - ActionListener listener + ActionListener listener, + ThreadContext threadContext ) { - clusterService.addListener(new RestoreClusterStateListener(clusterService, response, listener)); + clusterService.addListener( + new RestoreClusterStateListener(clusterService, response, listener, threadContext.newRestorableContext(true)) + ); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java index 7b247f1b14a42..73b66fa5d1bb5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java @@ -72,7 +72,12 @@ protected void masterOperation( ) { restoreService.restoreSnapshot(request, listener.delegateFailure((delegatedListener, restoreCompletionResponse) -> { if (restoreCompletionResponse.getRestoreInfo() == null && request.waitForCompletion()) { - RestoreClusterStateListener.createAndRegisterListener(clusterService, restoreCompletionResponse, delegatedListener); + RestoreClusterStateListener.createAndRegisterListener( + clusterService, + restoreCompletionResponse, + delegatedListener, + threadPool.getThreadContext() + ); } else { delegatedListener.onResponse(new RestoreSnapshotResponse(restoreCompletionResponse.getRestoreInfo())); } diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java index 66d5428d5d135..122659c64422e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java @@ -213,7 +213,7 @@ public void removeApplier(ClusterStateApplier applier) { } /** - * Add a listener for updated cluster states + * Add a listener for updated cluster states. Listeners are executed in the system thread context. */ public void addListener(ClusterStateListener listener) { clusterStateListeners.add(listener); @@ -222,7 +222,7 @@ public void addListener(ClusterStateListener listener) { /** * Removes a listener for updated cluster states. */ - public void removeListener(ClusterStateListener listener) { + public void removeListener(final ClusterStateListener listener) { clusterStateListeners.remove(listener); } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 35f88ddef3ea9..a0384b9efcb43 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -2957,7 +2958,8 @@ static Map filterDataStreamAliases( * @param listener listener */ private void addListener(Snapshot snapshot, ActionListener> listener) { - snapshotCompletionListeners.computeIfAbsent(snapshot, k -> new CopyOnWriteArrayList<>()).add(listener); + snapshotCompletionListeners.computeIfAbsent(snapshot, k -> new CopyOnWriteArrayList<>()) + .add(ContextPreservingActionListener.wrapPreservingContext(listener, threadPool.getThreadContext())); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java index bdd8ba9dab1df..86121fa0d7da0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java @@ -87,13 +87,20 @@ public String getReasonPhrase() { * Get a list of all of the values of all warning headers returned in the response. */ public List getWarningHeaders() { - List warningHeaders = new ArrayList<>(); + return getHeaders("Warning"); + } + + /** + * Get a list of all the values of a given header returned in the response. + */ + public List getHeaders(String name) { + List headers = new ArrayList<>(); for (Header header : response.getHeaders()) { - if (header.getName().equals("Warning")) { - warningHeaders.add(header.getValue()); + if (header.getName().equalsIgnoreCase(name)) { + headers.add(header.getValue()); } } - return warningHeaders; + return headers; } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 23a7146561da9..efc53b08fad27 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -367,6 +367,7 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx final String testPath = executionContext.getClientYamlTestCandidate() != null ? executionContext.getClientYamlTestCandidate().getTestPath() : null; + checkElasticProductHeader(response.getHeaders("X-elastic-product")); checkWarningHeaders(response.getWarningHeaders(), testPath); } catch (ClientYamlTestResponseException e) { ClientYamlTestResponse restTestResponse = e.getRestTestResponse(); @@ -392,6 +393,31 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx } } + void checkElasticProductHeader(final List productHeaders) { + if (productHeaders.isEmpty()) { + fail("Response is missing required X-Elastic-Product response header"); + } + boolean headerPresent = false; + final List unexpected = new ArrayList<>(); + for (String header : productHeaders) { + if (header.equals("Elasticsearch")) { + headerPresent = true; + break; + } else { + unexpected.add(header); + } + } + if (headerPresent == false) { + StringBuilder failureMessage = new StringBuilder(); + appendBadHeaders( + failureMessage, + unexpected, + "did not get expected product header [Elasticsearch], found header" + (unexpected.size() > 1 ? "s" : "") + ); + fail(failureMessage.toString()); + } + } + void checkWarningHeaders(final List warningHeaders) { checkWarningHeaders(warningHeaders, null); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java index fdd3451012d5c..b7238588ffe36 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java @@ -605,6 +605,7 @@ public void testNodeSelectorByVersion() throws IOException { doSection.getApiCallSection().getNodeSelector() ) ).thenReturn(mockResponse); + when(mockResponse.getHeaders("X-elastic-product")).thenReturn(List.of("Elasticsearch")); doSection.execute(context); verify(context).callApi( "indices.get_field_mapping", diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index 1661585b5062f..e6053ce1ff818 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -268,7 +268,8 @@ public void onFailure(Exception e) { assert restoreInfo.failedShards() > 0 : "Should have failed shards"; delegatedListener.onResponse(new PutFollowAction.Response(true, false, false)); } - }) + }), + threadPool.getThreadContext() ); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java index 1f775b97ee4d0..e76154ee5f470 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -178,10 +179,16 @@ private void removeRetentionLeaseForShard( ) { logger.trace("{} removing retention lease [{}] while unfollowing leader index", followerShardId, retentionLeaseId); final ThreadContext threadContext = threadPool.getThreadContext(); + // We're about to stash the thread context for this retention lease removal. The listener will be completed while the + // context is stashed. The context needs to be restored in the listener when it is completing or else it is simply wiped. + final ActionListener preservedListener = new ContextPreservingActionListener<>( + threadContext.newRestorableContext(true), + listener + ); try (ThreadContext.StoredContext ignore = threadPool.getThreadContext().stashContext()) { // we have to execute under the system context so that if security is enabled the removal is authorized threadContext.markAsSystemContext(); - CcrRetentionLeases.asyncRemoveRetentionLease(leaderShardId, retentionLeaseId, remoteClient, listener); + CcrRetentionLeases.asyncRemoveRetentionLease(leaderShardId, retentionLeaseId, remoteClient, preservedListener); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java index 959ffc448f548..6a9d00e62e975 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -20,15 +21,9 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackFeatureSet; -import org.elasticsearch.xpack.core.XPackFeatureSet.Usage; -import org.elasticsearch.xpack.core.common.IteratingActionListener; import java.util.ArrayList; -import java.util.Collections; import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReferenceArray; -import java.util.function.BiConsumer; public class TransportXPackUsageAction extends TransportMasterNodeAction { @@ -66,32 +61,28 @@ protected List usageActions() { @Override protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, ActionListener listener) { - final ActionListener> usageActionListener = listener.delegateFailure( - (l, usages) -> l.onResponse(new XPackUsageResponse(usages)) - ); - final AtomicReferenceArray featureSetUsages = new AtomicReferenceArray<>(usageActions.size()); - final AtomicInteger position = new AtomicInteger(0); - final BiConsumer>> consumer = (featureUsageAction, iteratingListener) -> { - // Since we're executing the actions locally we should create a new request - // to avoid mutating the original request and setting the wrong parent task, - // since it is possible that the parent task gets cancelled and new child tasks are banned. - final XPackUsageRequest childRequest = new XPackUsageRequest(); - childRequest.setParentTask(request.getParentTask()); - client.executeLocally(featureUsageAction, childRequest, iteratingListener.delegateFailure((l, usageResponse) -> { - featureSetUsages.set(position.getAndIncrement(), usageResponse.getUsage()); - // the value sent back doesn't matter since our predicate keeps iterating - l.onResponse(Collections.emptyList()); - })); - }; - IteratingActionListener, XPackUsageFeatureAction> iteratingActionListener = - new IteratingActionListener<>(usageActionListener, consumer, usageActions, threadPool.getThreadContext(), (ignore) -> { - final List usageList = new ArrayList<>(featureSetUsages.length()); - for (int i = 0; i < featureSetUsages.length(); i++) { - usageList.add(featureSetUsages.get(i)); + new ActionRunnable<>(listener) { + final List responses = new ArrayList<>(usageActions.size()); + + @Override + protected void doRun() { + if (responses.size() < usageActions().size()) { + final var childRequest = new XPackUsageRequest(); + childRequest.setParentTask(request.getParentTask()); + client.executeLocally( + usageActions.get(responses.size()), + childRequest, + listener.delegateFailure((delegate, response) -> { + responses.add(response.getUsage()); + run(); // XPackUsageFeatureTransportAction always forks to MANAGEMENT so no risk of stack overflow here + }) + ); + } else { + assert responses.size() == usageActions.size() : responses.size() + " vs " + usageActions.size(); + listener.onResponse(new XPackUsageResponse(responses)); } - return usageList; - }, (ignore) -> true); - iteratingActionListener.run(); + } + }.run(); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java index cf4a178ba85fa..97f47e13abb7d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -75,6 +76,10 @@ protected void masterOperation( ActionListener listener ) { if (enabled) { + ActionListener preservingListener = ContextPreservingActionListener.wrapPreservingContext( + listener, + client.threadPool().getThreadContext() + ); try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashWithOrigin(WATCHER_ORIGIN)) { WatcherStatsRequest statsRequest = new WatcherStatsRequest(); statsRequest.includeStats(true); @@ -91,8 +96,8 @@ protected void masterOperation( true, mergedCounters.toNestedMap() ); - listener.onResponse(new XPackUsageFeatureResponse(usage)); - }, listener::onFailure)); + preservingListener.onResponse(new XPackUsageFeatureResponse(usage)); + }, preservingListener::onFailure)); } } else { WatcherFeatureSetUsage usage = new WatcherFeatureSetUsage( From 2aab7cc8680d47880043f119724305f8392cd4ca Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 15 Feb 2022 14:40:21 -0800 Subject: [PATCH 115/167] Add CI matrix configuration for snapshot BWC versions (#83990) --- .ci/snapshotBwcVersions | 5 +++++ build.gradle | 31 +++++++++++++++++++------------ 2 files changed, 24 insertions(+), 12 deletions(-) create mode 100644 .ci/snapshotBwcVersions diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions new file mode 100644 index 0000000000000..3fdb4a121405a --- /dev/null +++ b/.ci/snapshotBwcVersions @@ -0,0 +1,5 @@ +BWC_VERSION: + - "7.17.1" + - "8.0.1" + - "8.1.0" + - "8.2.0" diff --git a/build.gradle b/build.gradle index 120fadf16b31d..d62c6358e4cd4 100644 --- a/build.gradle +++ b/build.gradle @@ -68,17 +68,28 @@ ext.testArtifact = { p, String name = "test" -> } tasks.register("updateCIBwcVersions") { - doLast { - File yml = file(".ci/bwcVersions") - yml.text = "" - yml << "BWC_VERSION:\n" - BuildParams.bwcVersions.indexCompatible.each { - yml << " - \"$it\"\n" + def writeVersions = { File file, List versions -> + file.text = "" + file << "BWC_VERSION:\n" + versions.each { + file << " - \"$it\"\n" } } + doLast { + writeVersions(file(".ci/bwcVersions"), BuildParams.bwcVersions.indexCompatible) + writeVersions(file(".ci/snapshotBwcVersions"), BuildParams.bwcVersions.unreleasedIndexCompatible) + } } tasks.register("verifyVersions") { + def verifyCiYaml = { File file, List versions -> + String ciYml = file.text + versions.each { + if (ciYml.contains("\"$it\"\n") == false) { + throw new Exception("${file} is outdated, run `./gradlew updateCIBwcVersions` and check in the results") + } + } + } doLast { if (gradle.startParameter.isOffline()) { throw new GradleException("Must run in online mode to verify versions") @@ -94,12 +105,8 @@ tasks.register("verifyVersions") { .collect { Version.fromString(it) } ) } - String ciYml = file(".ci/bwcVersions").text - BuildParams.bwcVersions.indexCompatible.each { - if (ciYml.contains("\"$it\"\n") == false) { - throw new Exception(".ci/bwcVersions is outdated, run `./gradlew updateCIBwcVersions` and check in the results"); - } - } + verifyCiYaml(file(".ci/bwcVersions"), BuildParams.bwcVersions.indexCompatible) + verifyCiYaml(file(".ci/snapshotBwcVersions"), BuildParams.bwcVersions.unreleasedIndexCompatible) // Make sure backport bot config file is up to date JsonNode backportConfig = new ObjectMapper().readTree(file(".backportrc.json")) From e8b34c720da7bb25f9048ef51dd0b8f696862705 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Tue, 15 Feb 2022 18:20:22 -0500 Subject: [PATCH 116/167] [DOCS] Re-add HTTP proxy setings from #82737 (#84001) Re-adds HTTP proxy settings for the GCS repository type. These settings were added with https://github.com/elastic/elasticsearch/pull/82737. The docs were accidentally removed as part of https://github.com/elastic/elasticsearch/pull/82996. --- .../reference/snapshot-restore/repository-gcs.asciidoc | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/reference/snapshot-restore/repository-gcs.asciidoc b/docs/reference/snapshot-restore/repository-gcs.asciidoc index 3a036e1487972..37dfe2add0b18 100644 --- a/docs/reference/snapshot-restore/repository-gcs.asciidoc +++ b/docs/reference/snapshot-restore/repository-gcs.asciidoc @@ -191,6 +191,16 @@ are marked as `Secure`. can be specified explicitly. For example, it can be used to switch between projects when the same credentials are usable for both the production and the development projects. +`proxy.host`:: + Host name of a proxy to connect to the Google Cloud Storage through. + +`proxy.port`:: + Port of a proxy to connect to the Google Cloud Storage through. + +`proxy.type`:: + Proxy type for the client. Supported values are `direct` (no proxy), + `http`, and `socks`. Defaults to `direct`. + [[repository-gcs-repository]] ==== Repository settings From 2db116a69722c71f6ef3c452e7bbf554614c1038 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 15 Feb 2022 16:22:45 -0800 Subject: [PATCH 117/167] Make action names available in NodeClient (#83919) The actions available in NodeClient are registered on node startup and hidden to callers. However, operator privileges needs to verify that all actions are classified into operator and non-operator actions. Currently the test uses reflection hacks to make the internal action objects available. This commit makes the action names available as a public method on NodeClient, so that the reflection hacks are no longer necessary. It would be nice to have a test specific way to expose this, but the test code in question actually serves the action names up in a rest api, so it is not test code, as far as server is concerned, that needs the action names. --- .../client/internal/node/NodeClient.java | 8 ++++++ .../actions/RestGetActionsAction.java | 25 +------------------ 2 files changed, 9 insertions(+), 24 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java b/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java index 99ab7b6519a79..4c4bfb4dae799 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java @@ -26,6 +26,7 @@ import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.Transport; +import java.util.List; import java.util.Map; import java.util.function.Supplier; @@ -67,6 +68,13 @@ public void initialize( this.namedWriteableRegistry = namedWriteableRegistry; } + /** + * Return the names of all available actions registered with this client. + */ + public List getActionNames() { + return actions.keySet().stream().map(ActionType::name).toList(); + } + @Override public void close() { // nothing really to do diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/actions/RestGetActionsAction.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/actions/RestGetActionsAction.java index 8dd65407ce81d..76b416bc56c42 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/actions/RestGetActionsAction.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/actions/RestGetActionsAction.java @@ -7,22 +7,13 @@ package org.elasticsearch.xpack.security.operator.actions; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; -import java.lang.reflect.Field; -import java.security.AccessController; -import java.security.PrivilegedAction; import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -37,23 +28,9 @@ public String getName() { return "test_get_actions"; } - @SuppressForbidden(reason = "Use reflection for testing only") - @SuppressWarnings({ "rawtypes", "unchecked" }) @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - final Map actions = AccessController.doPrivileged( - (PrivilegedAction>) () -> { - try { - final Field actionsField = client.getClass().getDeclaredField("actions"); - actionsField.setAccessible(true); - return (Map) actionsField.get(client); - } catch (NoSuchFieldException | IllegalAccessException e) { - throw new ElasticsearchException(e); - } - } - ); - - final List actionNames = actions.keySet().stream().map(ActionType::name).collect(Collectors.toList()); + final List actionNames = client.getActionNames(); return channel -> new RestToXContentListener<>(channel).onResponse( (builder, params) -> builder.startObject().field("actions", actionNames).endObject() ); From a7e57dfe003efa9b284acbf9dd134d7f5f25efe0 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 15 Feb 2022 18:13:16 -0800 Subject: [PATCH 118/167] Use latch to speedup multi feature migration test (#84007) The multi feature migration test works by having two system index features that are to be upgraded. The reindexing of the system indices of those features is done in sequence by the system index migrator, and the test has an assertBusy that waits for all the migrations to complete. Unfortunately assertBusy backs off exponentially, so it isn't great for quickly resuming the test once the underlying assertion becomes true, since the condition might change shortly after an iteration, and then have to wait for a long backoff. This commit adds a latch which will count down all the executions of the test plugins, so that the assert busy will not be run until after all the indexes have migrated. closes #83953 --- .../migration/MultiFeatureMigrationIT.java | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java index 2fc6358f5c468..f0838dd571637 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java @@ -37,6 +37,8 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Function; @@ -91,6 +93,8 @@ public void testMultipleFeatureMigration() throws Exception { ensureGreen(); + CountDownLatch hooksCalled = new CountDownLatch(4); + SetOnce preMigrationHookCalled = new SetOnce<>(); SetOnce postMigrationHookCalled = new SetOnce<>(); SetOnce secondPluginPreMigrationHookCalled = new SetOnce<>(); @@ -109,6 +113,7 @@ public void testMultipleFeatureMigration() throws Exception { assertThat(currentResults, nullValue()); preMigrationHookCalled.set(true); + hooksCalled.countDown(); return metadata; }); @@ -125,6 +130,7 @@ public void testMultipleFeatureMigration() throws Exception { assertThat(currentResults, nullValue()); postMigrationHookCalled.set(true); + hooksCalled.countDown(); }); SecondPlugin.preMigrationHook.set(clusterState -> { @@ -145,6 +151,7 @@ public void testMultipleFeatureMigration() throws Exception { assertThat(currentResults.getFeatureStatuses().get(FEATURE_NAME).getException(), nullValue()); secondPluginPreMigrationHookCalled.set(true); + hooksCalled.countDown(); return metadata; }); @@ -165,6 +172,7 @@ public void testMultipleFeatureMigration() throws Exception { assertThat(currentResults.getFeatureStatuses().get(FEATURE_NAME).getException(), nullValue()); secondPluginPostMigrationHookCalled.set(true); + hooksCalled.countDown(); }); PostFeatureUpgradeRequest migrationRequest = new PostFeatureUpgradeRequest(); @@ -177,6 +185,9 @@ public void testMultipleFeatureMigration() throws Exception { .collect(Collectors.toSet()); assertThat(migratingFeatures, hasItems(FEATURE_NAME, SECOND_FEATURE_NAME)); + // wait for all the plugin methods to have been called before assertBusy since that will exponentially backoff + assertThat(hooksCalled.await(30, TimeUnit.SECONDS), is(true)); + GetFeatureUpgradeStatusRequest getStatusRequest = new GetFeatureUpgradeStatusRequest(); assertBusy(() -> { GetFeatureUpgradeStatusResponse statusResponse = client().execute(GetFeatureUpgradeStatusAction.INSTANCE, getStatusRequest) From 74b5bfdb73dda73e99d03ee8ced63dfbe45ae160 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 16 Feb 2022 08:31:41 +0100 Subject: [PATCH 119/167] Feature usage actions for archive (#83931) Relates #81210 --- .../xcontent/monitor_cluster_stats.json | 5 ++ docs/reference/rest-api/info.asciidoc | 4 + docs/reference/rest-api/usage.asciidoc | 5 ++ .../snapshots/RestoreService.java | 17 +--- .../xpack/core/XPackClientPlugin.java | 5 +- .../elasticsearch/xpack/core/XPackField.java | 2 + .../core/action/XPackInfoFeatureAction.java | 4 +- .../core/action/XPackUsageFeatureAction.java | 4 +- .../core/archive/ArchiveFeatureSetUsage.java | 73 ++++++++++++++++ .../archive/ArchiveFeatureSetUsageTests.java | 39 +++++++++ .../lucene/bwc/ArchiveLicenseIntegTests.java | 23 +++++ .../bwc/ArchiveInfoTransportAction.java | 44 ++++++++++ .../bwc/ArchiveUsageTransportAction.java | 70 +++++++++++++++ .../xpack/lucene/bwc/OldLuceneVersions.java | 15 +++- .../xpack/security/operator/Constants.java | 2 + .../oldrepos/DocValueOnlyFieldsIT.java | 9 -- .../oldrepos/OldRepositoryAccessIT.java | 87 ++++++++----------- 17 files changed, 331 insertions(+), 77 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/archive/ArchiveFeatureSetUsage.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/archive/ArchiveFeatureSetUsageTests.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveInfoTransportAction.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTransportAction.java diff --git a/benchmarks/src/main/resources/org/elasticsearch/benchmark/xcontent/monitor_cluster_stats.json b/benchmarks/src/main/resources/org/elasticsearch/benchmark/xcontent/monitor_cluster_stats.json index de460d770d249..eea13dec75ffd 100644 --- a/benchmarks/src/main/resources/org/elasticsearch/benchmark/xcontent/monitor_cluster_stats.json +++ b/benchmarks/src/main/resources/org/elasticsearch/benchmark/xcontent/monitor_cluster_stats.json @@ -1233,6 +1233,11 @@ "total" : 0, "failed" : 0 } + }, + "archive" : { + "available" : false, + "enabled" : true, + "indices_count" : 0 } } } diff --git a/docs/reference/rest-api/info.asciidoc b/docs/reference/rest-api/info.asciidoc index e4d533c8378d3..5292b6e8967cb 100644 --- a/docs/reference/rest-api/info.asciidoc +++ b/docs/reference/rest-api/info.asciidoc @@ -81,6 +81,10 @@ Example response: "available" : true, "enabled" : true }, + "archive" : { + "available" : true, + "enabled" : true + }, "enrich" : { "available" : true, "enabled" : true diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 786a21f576423..13773b02fe417 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -395,6 +395,11 @@ GET /_xpack/usage "aggregate_metric" : { "available" : true, "enabled" : true + }, + "archive" : { + "available" : true, + "enabled" : true, + "indices_count" : 0 } } ------------------------------------------------------------ diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index d1b996978aa31..de137cde1f331 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.StepListener; @@ -978,7 +977,8 @@ static void validateSnapshotRestorable(RestoreSnapshotRequest request, Repositor + "]" ); } - if (skipVersionChecks(repository) == false && snapshotInfo.version().before(Version.CURRENT.minimumIndexCompatibilityVersion())) { + if (ALLOW_BWC_INDICES_SETTING.get(repository.settings()) == false + && snapshotInfo.version().before(Version.CURRENT.minimumIndexCompatibilityVersion())) { throw new SnapshotRestoreException( new Snapshot(repository.name(), snapshotInfo.snapshotId()), "the snapshot was created with Elasticsearch version [" @@ -1002,19 +1002,6 @@ static void validateSnapshotRestorable(RestoreSnapshotRequest request, Repositor Setting.Property.NodeScope ); - private static boolean skipVersionChecks(RepositoryMetadata repositoryMetadata) { - if (Build.CURRENT.isSnapshot()) { - return ALLOW_BWC_INDICES_SETTING.get(repositoryMetadata.settings()); - } else { - if (ALLOW_BWC_INDICES_SETTING.exists(repositoryMetadata.settings())) { - throw new IllegalArgumentException( - "Repository setting [" + ALLOW_BWC_INDICES_SETTING.getKey() + "] only allowed in release builds" - ); - } - return false; - } - } - public static boolean failed(SnapshotInfo snapshot, String index) { for (SnapshotShardFailure failure : snapshot.shardFailures()) { if (index.equals(failure.index())) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 8d78275266a86..4bd7ce835dcdb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -36,6 +36,7 @@ import org.elasticsearch.xpack.core.action.XPackUsageAction; import org.elasticsearch.xpack.core.aggregatemetric.AggregateMetricFeatureSetUsage; import org.elasticsearch.xpack.core.analytics.AnalyticsFeatureSetUsage; +import org.elasticsearch.xpack.core.archive.ArchiveFeatureSetUsage; import org.elasticsearch.xpack.core.async.DeleteAsyncResultAction; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.datastreams.DataStreamFeatureSetUsage; @@ -552,7 +553,9 @@ public List getNamedWriteables() { // Data Streams new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.DATA_STREAMS, DataStreamFeatureSetUsage::new), // Data Tiers - new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.DATA_TIERS, DataTiersFeatureSetUsage::new) + new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.DATA_TIERS, DataTiersFeatureSetUsage::new), + // Archive + new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.ARCHIVE, ArchiveFeatureSetUsage::new) ) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java index 59343705b9098..dbc100e62ac1e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java @@ -71,6 +71,8 @@ public final class XPackField { public static final String AGGREGATE_METRIC = "aggregate_metric"; /** Name constant for the operator privileges feature. */ public static final String OPERATOR_PRIVILEGES = "operator_privileges"; + /** Name constant for the archive feature. */ + public static final String ARCHIVE = "archive"; private XPackField() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java index c6c941ef3092d..83e835d4bb6dc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java @@ -47,6 +47,7 @@ public class XPackInfoFeatureAction extends ActionType public static final XPackInfoFeatureAction DATA_STREAMS = new XPackInfoFeatureAction(XPackField.DATA_STREAMS); public static final XPackInfoFeatureAction DATA_TIERS = new XPackInfoFeatureAction(XPackField.DATA_TIERS); public static final XPackInfoFeatureAction AGGREGATE_METRIC = new XPackInfoFeatureAction(XPackField.AGGREGATE_METRIC); + public static final XPackInfoFeatureAction ARCHIVE = new XPackInfoFeatureAction(XPackField.ARCHIVE); public static final List ALL; static { @@ -74,7 +75,8 @@ public class XPackInfoFeatureAction extends ActionType DATA_STREAMS, SEARCHABLE_SNAPSHOTS, DATA_TIERS, - AGGREGATE_METRIC + AGGREGATE_METRIC, + ARCHIVE ) ); ALL = Collections.unmodifiableList(actions); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java index cd310064ffa0f..bfbac109012e5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java @@ -44,6 +44,7 @@ public class XPackUsageFeatureAction extends ActionType ALL = List.of( AGGREGATE_METRIC, @@ -66,7 +67,8 @@ public class XPackUsageFeatureAction extends ActionType { + + @Override + protected ArchiveFeatureSetUsage createTestInstance() { + boolean available = randomBoolean(); + return new ArchiveFeatureSetUsage(available, randomIntBetween(0, 100000)); + } + + @Override + protected ArchiveFeatureSetUsage mutateInstance(ArchiveFeatureSetUsage instance) throws IOException { + boolean available = instance.available(); + int numArchiveIndices = instance.getNumberOfArchiveIndices(); + switch (between(0, 1)) { + case 0 -> available = available == false; + case 1 -> numArchiveIndices = randomValueOtherThan(numArchiveIndices, () -> randomIntBetween(0, 100000)); + default -> throw new AssertionError("Illegal randomisation branch"); + } + return new ArchiveFeatureSetUsage(available, numArchiveIndices); + } + + @Override + protected Writeable.Reader instanceReader() { + return ArchiveFeatureSetUsage::new; + } + +} diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java index 0c37eac048853..4d2c8113c02ba 100644 --- a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.license.PostStartTrialResponse; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.Repository; @@ -42,6 +43,9 @@ import org.elasticsearch.snapshots.mockstore.MockRepository; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; +import org.elasticsearch.xpack.core.archive.ArchiveFeatureSetUsage; import org.junit.Before; import java.io.IOException; @@ -52,6 +56,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.oneOf; @ESIntegTestCase.ClusterScope(supportsDedicatedMasters = false, numClientNodes = 0, scope = ESIntegTestCase.Scope.TEST) @@ -130,6 +135,24 @@ public void createAndRestoreArchive() throws Exception { client().execute(PostStartTrialAction.INSTANCE, request).get(); } + public void testFeatureUsage() throws Exception { + XPackUsageFeatureResponse usage = client().execute(XPackUsageFeatureAction.ARCHIVE, new XPackUsageRequest()).get(); + assertThat(usage.getUsage(), instanceOf(ArchiveFeatureSetUsage.class)); + ArchiveFeatureSetUsage archiveUsage = (ArchiveFeatureSetUsage) usage.getUsage(); + assertEquals(0, archiveUsage.getNumberOfArchiveIndices()); + + final RestoreSnapshotRequest req = new RestoreSnapshotRequest(repoName, snapshotName).indices(indexName).waitForCompletion(true); + + final RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster().restoreSnapshot(req).get(); + assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(0)); + ensureGreen(indexName); + + usage = client().execute(XPackUsageFeatureAction.ARCHIVE, new XPackUsageRequest()).get(); + assertThat(usage.getUsage(), instanceOf(ArchiveFeatureSetUsage.class)); + archiveUsage = (ArchiveFeatureSetUsage) usage.getUsage(); + assertEquals(1, archiveUsage.getNumberOfArchiveIndices()); + } + public void testFailRestoreOnInvalidLicense() throws Exception { assertAcked(client().execute(DeleteLicenseAction.INSTANCE, new DeleteLicenseRequest()).get()); assertAcked(client().execute(PostStartBasicAction.INSTANCE, new PostStartBasicRequest()).get()); diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveInfoTransportAction.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveInfoTransportAction.java new file mode 100644 index 0000000000000..702559a4810d8 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveInfoTransportAction.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureTransportAction; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.ARCHIVE_FEATURE; + +public class ArchiveInfoTransportAction extends XPackInfoFeatureTransportAction { + + private final XPackLicenseState licenseState; + + @Inject + public ArchiveInfoTransportAction(TransportService transportService, ActionFilters actionFilters, XPackLicenseState licenseState) { + super(XPackInfoFeatureAction.ARCHIVE.name(), transportService, actionFilters); + this.licenseState = licenseState; + } + + @Override + public String name() { + return XPackField.ARCHIVE; + } + + @Override + public boolean available() { + return ARCHIVE_FEATURE.checkWithoutTracking(licenseState); + } + + @Override + public boolean enabled() { + return true; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTransportAction.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTransportAction.java new file mode 100644 index 0000000000000..d209db2f9ce37 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTransportAction.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; +import org.elasticsearch.xpack.core.archive.ArchiveFeatureSetUsage; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.ARCHIVE_FEATURE; + +public class ArchiveUsageTransportAction extends XPackUsageFeatureTransportAction { + + private final XPackLicenseState licenseState; + + @Inject + public ArchiveUsageTransportAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + XPackLicenseState licenseState + ) { + super( + XPackUsageFeatureAction.ARCHIVE.name(), + transportService, + clusterService, + threadPool, + actionFilters, + indexNameExpressionResolver + ); + this.licenseState = licenseState; + } + + @Override + protected void masterOperation( + Task task, + XPackUsageRequest request, + ClusterState state, + ActionListener listener + ) { + int numArchiveIndices = 0; + for (IndexMetadata indexMetadata : state.metadata()) { + if (OldLuceneVersions.isArchiveIndex(indexMetadata.getCreationVersion())) { + numArchiveIndices++; + } + } + listener.onResponse( + new XPackUsageFeatureResponse(new ArchiveFeatureSetUsage(ARCHIVE_FEATURE.checkWithoutTracking(licenseState), numArchiveIndices)) + ); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java index 631de49d4fa1d..69ac9777960de 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java @@ -12,6 +12,8 @@ import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.util.SetOnce; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -35,6 +37,7 @@ import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.LicensedFeature; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.Plugin; @@ -45,6 +48,8 @@ import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.lucene.bwc.codecs.BWCCodec; import java.io.IOException; @@ -56,7 +61,7 @@ import java.util.function.Consumer; import java.util.function.Supplier; -public class OldLuceneVersions extends Plugin implements IndexStorePlugin, ClusterPlugin, RepositoryPlugin { +public class OldLuceneVersions extends Plugin implements IndexStorePlugin, ClusterPlugin, RepositoryPlugin, ActionPlugin { public static final LicensedFeature.Momentary ARCHIVE_FEATURE = LicensedFeature.momentary( null, @@ -95,6 +100,14 @@ public Collection createComponents( return List.of(); } + @Override + public List> getActions() { + return List.of( + new ActionPlugin.ActionHandler<>(XPackUsageFeatureAction.ARCHIVE, ArchiveUsageTransportAction.class), + new ActionPlugin.ActionHandler<>(XPackInfoFeatureAction.ARCHIVE, ArchiveInfoTransportAction.class) + ); + } + // overridable by tests protected XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 1cff2e41c26fb..94378f91d0ebc 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -274,6 +274,7 @@ public class Constants { "cluster:monitor/xpack/info", "cluster:monitor/xpack/info/aggregate_metric", "cluster:monitor/xpack/info/analytics", + "cluster:monitor/xpack/info/archive", "cluster:monitor/xpack/info/ccr", "cluster:monitor/xpack/info/data_streams", "cluster:monitor/xpack/info/data_tiers", @@ -329,6 +330,7 @@ public class Constants { "cluster:monitor/xpack/usage", "cluster:monitor/xpack/usage/aggregate_metric", "cluster:monitor/xpack/usage/analytics", + "cluster:monitor/xpack/usage/archive", "cluster:monitor/xpack/usage/ccr", "cluster:monitor/xpack/usage/data_streams", "cluster:monitor/xpack/usage/data_tiers", diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java index 7df801a174e9d..ab1105d989ff1 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java @@ -12,7 +12,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.http.HttpHost; -import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; @@ -65,12 +64,6 @@ protected Settings restClientSettings() { return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); } - @Override - public void test() throws IOException { - assumeTrue("feature currently only enabled in snapshot builds", Build.CURRENT.isSnapshot()); - super.test(); - } - @Override protected boolean skipSetupSections() { // setup in the YAML file is replaced by the method below @@ -79,8 +72,6 @@ protected boolean skipSetupSections() { @Before public void setupIndex() throws IOException { - assumeTrue("feature currently only enabled in snapshot builds", Build.CURRENT.isSnapshot()); - final boolean afterRestart = Booleans.parseBoolean(System.getProperty("tests.after_restart")); if (afterRestart) { return; diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index 6174c029c47cb..0f77bfb8ee964 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.oldrepos; import org.apache.http.HttpHost; -import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; @@ -131,11 +130,9 @@ public void runTest(boolean sourceOnlyRepository) throws IOException { } private void afterRestart(String indexName) throws IOException { - if (Build.CURRENT.isSnapshot()) { - ensureGreen("restored_" + indexName); - ensureGreen("mounted_full_copy_" + indexName); - ensureGreen("mounted_shared_cache_" + indexName); - } + ensureGreen("restored_" + indexName); + ensureGreen("mounted_full_copy_" + indexName); + ensureGreen("mounted_shared_cache_" + indexName); } @SuppressWarnings("removal") @@ -207,9 +204,7 @@ private void beforeRestart( if (sourceOnlyRepository) { repoSettingsBuilder.put("delegate_type", "fs"); } - if (Build.CURRENT.isSnapshot()) { - repoSettingsBuilder.put("allow_bwc_indices", true); - } + repoSettingsBuilder.put("allow_bwc_indices", true); ElasticsearchAssertions.assertAcked( client.snapshot() .createRepository( @@ -263,48 +258,42 @@ private void beforeRestart( assertThat(snapshotStatus.getStats().getTotalSize(), greaterThan(0L)); assertThat(snapshotStatus.getStats().getTotalFileCount(), greaterThan(0)); - if (Build.CURRENT.isSnapshot()) { - // restore / mount and check whether searches work - restoreMountAndVerify( - numDocs, - expectedIds, - client, - numberOfShards, - sourceOnlyRepository, - oldVersion, - indexName, - repoName, - snapshotName - ); + // restore / mount and check whether searches work + restoreMountAndVerify( + numDocs, + expectedIds, + client, + numberOfShards, + sourceOnlyRepository, + oldVersion, + indexName, + repoName, + snapshotName + ); - // close indices - assertTrue( - client.indices().close(new CloseIndexRequest("restored_" + indexName), RequestOptions.DEFAULT).isShardsAcknowledged() - ); - assertTrue( - client.indices() - .close(new CloseIndexRequest("mounted_full_copy_" + indexName), RequestOptions.DEFAULT) - .isShardsAcknowledged() - ); - assertTrue( - client.indices() - .close(new CloseIndexRequest("mounted_shared_cache_" + indexName), RequestOptions.DEFAULT) - .isShardsAcknowledged() - ); + // close indices + assertTrue(client.indices().close(new CloseIndexRequest("restored_" + indexName), RequestOptions.DEFAULT).isShardsAcknowledged()); + assertTrue( + client.indices().close(new CloseIndexRequest("mounted_full_copy_" + indexName), RequestOptions.DEFAULT).isShardsAcknowledged() + ); + assertTrue( + client.indices() + .close(new CloseIndexRequest("mounted_shared_cache_" + indexName), RequestOptions.DEFAULT) + .isShardsAcknowledged() + ); - // restore / mount again - restoreMountAndVerify( - numDocs, - expectedIds, - client, - numberOfShards, - sourceOnlyRepository, - oldVersion, - indexName, - repoName, - snapshotName - ); - } + // restore / mount again + restoreMountAndVerify( + numDocs, + expectedIds, + client, + numberOfShards, + sourceOnlyRepository, + oldVersion, + indexName, + repoName, + snapshotName + ); } private String getType(Version oldVersion, String id) { From 494da68e50ea7e73079ce03a78b4c8f6d3a6d15e Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Wed, 16 Feb 2022 10:33:47 +0100 Subject: [PATCH 120/167] Optimize spliterator for ImmutableOpenMap (#83899) We know the exact amount of nodes, so we can return a sized spliterator which allows the Stream pipeline to allocate memory more granularly. --- .../org/elasticsearch/common/collect/ImmutableOpenIntMap.java | 2 +- .../java/org/elasticsearch/common/collect/ImmutableOpenMap.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java index aa9f5ee41567f..5acc18df3f8a3 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java +++ b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java @@ -241,7 +241,7 @@ public boolean remove(Object o) { } public Spliterator> spliterator() { - return Spliterators.spliteratorUnknownSize(iterator(), 0); + return Spliterators.spliterator(iterator(), size(), Spliterator.SIZED); } public void forEach(Consumer> action) { diff --git a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java index 20427cc734638..8afef238aae50 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java +++ b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java @@ -209,7 +209,7 @@ public boolean remove(Object o) { } public Spliterator> spliterator() { - return Spliterators.spliteratorUnknownSize(iterator(), 0); + return Spliterators.spliterator(iterator(), size(), Spliterator.SIZED); } public void forEach(Consumer> action) { From d1bd822161572f7e443ef7dcebde43ba8adba299 Mon Sep 17 00:00:00 2001 From: Kevin Lacabane Date: Wed, 16 Feb 2022 10:46:29 +0100 Subject: [PATCH 121/167] [Stack Monitoring] add kibana_stats version alias to -mb template (#83930) * add kibana_stats version alias * increment version number Co-authored-by: James Baiera Co-authored-by: James Baiera --- .../plugin/core/src/main/resources/monitoring-kibana-mb.json | 4 ++++ .../xpack/monitoring/MonitoringTemplateRegistry.java | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json b/x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json index e155f74ae0486..262e07d37c5ea 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json @@ -492,6 +492,10 @@ "uuid": { "type": "alias", "path": "service.id" + }, + "version": { + "type": "alias", + "path": "service.version" } } }, diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java index 4e2ed262bece1..c72a7ddfb9f3b 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @@ -78,7 +78,7 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { * writes monitoring data in ECS format as of 8.0. These templates define the ECS schema as well as alias fields for the old monitoring * mappings that point to the corresponding ECS fields. */ - public static final int STACK_MONITORING_REGISTRY_VERSION = Version.V_8_0_0.id; + public static final int STACK_MONITORING_REGISTRY_VERSION = Version.V_8_0_0.id + 1; private static final String STACK_MONITORING_REGISTRY_VERSION_VARIABLE = "xpack.stack.monitoring.template.release.version"; private static final String STACK_TEMPLATE_VERSION = "8"; private static final String STACK_TEMPLATE_VERSION_VARIABLE = "xpack.stack.monitoring.template.version"; From e6abd9fe3d26b7233255a9fa7868d0642da57a66 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Wed, 16 Feb 2022 11:48:49 +0100 Subject: [PATCH 122/167] QL: Add leniency option to SQL CLI (#83795) by default the query behaviour from SQL CLI is strict (ie. non-lenient), so queries that return multi-value fields return an error. We now add an option to allow lenient behaviour (ie. in case of multi-value fields, return the first value). This behaviour can be enabled with the following command: lenient = true --- docs/changelog/83795.yaml | 6 ++ .../xpack/sql/qa/multi_node/CliLenientIT.java | 11 ++++ .../xpack/sql/qa/security/CliLenientIT.java | 28 +++++++++ .../sql/qa/single_node/CliLenientIT.java | 11 ++++ .../xpack/sql/qa/cli/LenientTestCase.java | 46 ++++++++++++++ .../org/elasticsearch/xpack/sql/cli/Cli.java | 6 +- .../cli/command/AbstractServerCliCommand.java | 2 +- .../xpack/sql/cli/command/CliSession.java | 42 ++----------- .../cli/command/CliSessionConfiguration.java | 60 +++++++++++++++++++ .../cli/command/FetchSeparatorCliCommand.java | 4 +- .../sql/cli/command/FetchSizeCliCommand.java | 4 +- .../sql/cli/command/LenientCliCommand.java | 31 ++++++++++ .../cli/command/ServerQueryCliCommand.java | 6 +- .../sql/cli/command/BuiltinCommandTests.java | 33 +++++++--- .../command/ServerQueryCliCommandTests.java | 30 +++++----- .../xpack/sql/client/HttpClient.java | 6 +- .../sql/client/HttpClientRequestTests.java | 2 +- 17 files changed, 255 insertions(+), 73 deletions(-) create mode 100644 docs/changelog/83795.yaml create mode 100644 x-pack/plugin/sql/qa/server/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java create mode 100644 x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliLenientIT.java create mode 100644 x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java create mode 100644 x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java create mode 100644 x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSessionConfiguration.java create mode 100644 x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/LenientCliCommand.java diff --git a/docs/changelog/83795.yaml b/docs/changelog/83795.yaml new file mode 100644 index 0000000000000..af5a670918a7a --- /dev/null +++ b/docs/changelog/83795.yaml @@ -0,0 +1,6 @@ +pr: 83795 +summary: Add leniency option to SQL CLI +area: SQL +type: enhancement +issues: + - 67436 diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java b/x-pack/plugin/sql/qa/server/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java new file mode 100644 index 0000000000000..fc4a04570ff67 --- /dev/null +++ b/x-pack/plugin/sql/qa/server/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.qa.multi_node; + +import org.elasticsearch.xpack.sql.qa.cli.LenientTestCase; + +public class CliLenientIT extends LenientTestCase {} diff --git a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliLenientIT.java b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliLenientIT.java new file mode 100644 index 0000000000000..87e056baa6751 --- /dev/null +++ b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliLenientIT.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.qa.security; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.sql.qa.cli.EmbeddedCli.SecurityConfig; +import org.elasticsearch.xpack.sql.qa.cli.LenientTestCase; + +public class CliLenientIT extends LenientTestCase { + @Override + protected Settings restClientSettings() { + return RestSqlIT.securitySettings(); + } + + @Override + protected String getProtocol() { + return RestSqlIT.SSL_ENABLED ? "https" : "http"; + } + + @Override + protected SecurityConfig securityConfig() { + return CliSecurityIT.adminSecurityConfig(); + } +} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java b/x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java new file mode 100644 index 0000000000000..afcfca0a01ed2 --- /dev/null +++ b/x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.qa.single_node; + +import org.elasticsearch.xpack.sql.qa.cli.LenientTestCase; + +public class CliLenientIT extends LenientTestCase {} diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java new file mode 100644 index 0000000000000..76f84541e5bb9 --- /dev/null +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.qa.cli; + +import org.elasticsearch.test.hamcrest.RegexMatcher; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; + +public abstract class LenientTestCase extends CliIntegrationTestCase { + + public void testLenientCommand() throws IOException { + index("test", body -> body.field("name", "foo").field("tags", new String[] { "bar", "bar" })); + assertEquals("[?1l>[?1000l[?2004llenient set to [90mtrue[0m", command("lenient = true")); + assertThat(command("SELECT * FROM test"), RegexMatcher.matches("\\s*name\\s*\\|\\s*tags\\s*")); + assertThat(readLine(), containsString("----------")); + assertThat(readLine(), RegexMatcher.matches("\\s*foo\\s*\\|\\s*bar\\s*")); + assertEquals("", readLine()); + } + + public void testDefaultNoLenient() throws IOException { + index("test", body -> body.field("name", "foo").field("tags", new String[] { "bar", "bar" })); + assertThat( + command("SELECT * FROM test"), + containsString("Server encountered an error [Arrays (returned by [tags]) are not supported]") + ); + while ("][23;31;1m][0m".equals(readLine()) == false) + ; // clean console to avoid failures on shutdown + } + + public void testExplicitNoLenient() throws IOException { + index("test", body -> body.field("name", "foo").field("tags", new String[] { "bar", "bar" })); + assertEquals("[?1l>[?1000l[?2004llenient set to [90mfalse[0m", command("lenient = false")); + assertThat( + command("SELECT * FROM test"), + containsString("Server encountered an error [Arrays (returned by [tags]) are not supported]") + ); + while ("][23;31;1m][0m".equals(readLine()) == false) + ; // clean console to avoid failures on shutdown + } +} diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java index 8ccc079860937..97d5bcc3da927 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.sql.cli.command.CliSession; import org.elasticsearch.xpack.sql.cli.command.FetchSeparatorCliCommand; import org.elasticsearch.xpack.sql.cli.command.FetchSizeCliCommand; +import org.elasticsearch.xpack.sql.cli.command.LenientCliCommand; import org.elasticsearch.xpack.sql.cli.command.PrintLogoCommand; import org.elasticsearch.xpack.sql.cli.command.ServerInfoCliCommand; import org.elasticsearch.xpack.sql.cli.command.ServerQueryCliCommand; @@ -128,6 +129,7 @@ private void execute(String uri, boolean debug, boolean binary, String keystoreL new PrintLogoCommand(), new ClearScreenCliCommand(), new FetchSizeCliCommand(), + new LenientCliCommand(), new FetchSeparatorCliCommand(), new ServerInfoCliCommand(), new ServerQueryCliCommand() @@ -136,7 +138,7 @@ private void execute(String uri, boolean debug, boolean binary, String keystoreL ConnectionBuilder connectionBuilder = new ConnectionBuilder(cliTerminal); ConnectionConfiguration con = connectionBuilder.buildConnection(uri, keystoreLocation, binary); CliSession cliSession = new CliSession(new HttpClient(con)); - cliSession.setDebug(debug); + cliSession.cfg().setDebug(debug); if (checkConnection) { checkConnection(cliSession, cliTerminal, con); } @@ -150,7 +152,7 @@ private void checkConnection(CliSession cliSession, CliTerminal cliTerminal, Con try { cliSession.checkConnection(); } catch (ClientException ex) { - if (cliSession.isDebug()) { + if (cliSession.cfg().isDebug()) { cliTerminal.error("Client Exception", ex.getMessage()); cliTerminal.println(); cliTerminal.printStackTrace(ex); diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java index a3ede76da53a7..89f8a71ca9f5c 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java @@ -34,7 +34,7 @@ protected void handleExceptionWhileCommunicatingWithServer(CliTerminal terminal, .param(e.getMessage() == null ? e.getClass().getName() : e.getMessage()) .error("]") .ln(); - if (cliSession.isDebug()) { + if (cliSession.cfg().isDebug()) { terminal.printStackTrace(e); } } diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java index 34502aab9db3f..b48c4b84cd0cf 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java @@ -9,7 +9,6 @@ import org.elasticsearch.xpack.sql.client.ClientException; import org.elasticsearch.xpack.sql.client.ClientVersion; import org.elasticsearch.xpack.sql.client.HttpClient; -import org.elasticsearch.xpack.sql.proto.CoreProtocol; import org.elasticsearch.xpack.sql.proto.MainResponse; import org.elasticsearch.xpack.sql.proto.SqlVersion; @@ -20,52 +19,19 @@ */ public class CliSession { private final HttpClient httpClient; - private int fetchSize = CoreProtocol.FETCH_SIZE; - private String fetchSeparator = ""; - private boolean debug; - private boolean binary; + private final CliSessionConfiguration configuration; public CliSession(HttpClient httpClient) { this.httpClient = httpClient; + this.configuration = new CliSessionConfiguration(); } public HttpClient getClient() { return httpClient; } - public void setFetchSize(int fetchSize) { - if (fetchSize <= 0) { - throw new IllegalArgumentException("Must be > 0."); - } - this.fetchSize = fetchSize; - } - - public int getFetchSize() { - return fetchSize; - } - - public void setFetchSeparator(String fetchSeparator) { - this.fetchSeparator = fetchSeparator; - } - - public String getFetchSeparator() { - return fetchSeparator; - } - - public void setDebug(boolean debug) { - this.debug = debug; - } - - public boolean isDebug() { - return debug; - } - - public void setBinary(boolean binary) { - this.binary = binary; - } - - public boolean isBinary() { - return binary; + public CliSessionConfiguration cfg() { + return configuration; } public void checkConnection() throws ClientException { diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSessionConfiguration.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSessionConfiguration.java new file mode 100644 index 0000000000000..4507d36946bde --- /dev/null +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSessionConfiguration.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.sql.cli.command; + +import org.elasticsearch.xpack.sql.proto.CoreProtocol; + +/** + * Configuration for CLI session + */ +public class CliSessionConfiguration { + private int fetchSize; + private String fetchSeparator = ""; + private boolean debug; + private boolean lenient; + + public CliSessionConfiguration() { + this.fetchSize = CoreProtocol.FETCH_SIZE; + this.lenient = CoreProtocol.FIELD_MULTI_VALUE_LENIENCY; + } + + public void setFetchSize(int fetchSize) { + if (fetchSize <= 0) { + throw new IllegalArgumentException("Must be > 0."); + } + this.fetchSize = fetchSize; + } + + public int getFetchSize() { + return fetchSize; + } + + public void setFetchSeparator(String fetchSeparator) { + this.fetchSeparator = fetchSeparator; + } + + public String getFetchSeparator() { + return fetchSeparator; + } + + public void setDebug(boolean debug) { + this.debug = debug; + } + + public boolean isDebug() { + return debug; + } + + public boolean isLenient() { + return lenient; + } + + public void setLenient(boolean lenient) { + this.lenient = lenient; + } +} diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSeparatorCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSeparatorCliCommand.java index bd07a5b9f04e2..efb6c9c054775 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSeparatorCliCommand.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSeparatorCliCommand.java @@ -22,8 +22,8 @@ public FetchSeparatorCliCommand() { @Override protected boolean doHandle(CliTerminal terminal, CliSession cliSession, Matcher m, String line) { - cliSession.setFetchSeparator(m.group(1)); - terminal.line().text("fetch separator set to \"").em(cliSession.getFetchSeparator()).text("\"").end(); + cliSession.cfg().setFetchSeparator(m.group(1)); + terminal.line().text("fetch separator set to \"").em(cliSession.cfg().getFetchSeparator()).text("\"").end(); return true; } } diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSizeCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSizeCliCommand.java index c4b3f1aeeb0ae..f17b3c469aa2d 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSizeCliCommand.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSizeCliCommand.java @@ -23,7 +23,7 @@ public FetchSizeCliCommand() { @Override protected boolean doHandle(CliTerminal terminal, CliSession cliSession, Matcher m, String line) { try { - cliSession.setFetchSize(Integer.parseInt(m.group(1))); + cliSession.cfg().setFetchSize(Integer.parseInt(m.group(1))); } catch (NumberFormatException e) { terminal.line().error("Invalid fetch size [").param(m.group(1)).error("]").end(); return true; @@ -31,7 +31,7 @@ protected boolean doHandle(CliTerminal terminal, CliSession cliSession, Matcher terminal.line().error("Invalid fetch size [").param(m.group(1)).error("]. " + e.getMessage()).end(); return true; } - terminal.line().text("fetch size set to ").em(Integer.toString(cliSession.getFetchSize())).end(); + terminal.line().text("fetch size set to ").em(Integer.toString(cliSession.cfg().getFetchSize())).end(); return true; } } diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/LenientCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/LenientCliCommand.java new file mode 100644 index 0000000000000..fd285a35c96e5 --- /dev/null +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/LenientCliCommand.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.cli.command; + +import org.elasticsearch.xpack.sql.cli.CliTerminal; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * lenient command, enables/disables fields multi-value leniency. + * ie. with lenient = true, in case of array values, return the first value, with no guarantee of consistent results. + * + */ +public class LenientCliCommand extends AbstractCliCommand { + + public LenientCliCommand() { + super(Pattern.compile("lenient *= *(.+)", Pattern.CASE_INSENSITIVE)); + } + + @Override + protected boolean doHandle(CliTerminal terminal, CliSession cliSession, Matcher m, String line) { + cliSession.cfg().setLenient(Boolean.parseBoolean(m.group(1))); + terminal.line().text("lenient set to ").em(Boolean.toString(cliSession.cfg().isLenient())).end(); + return true; + } +} diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java index 1d929ed7708b4..ae582837b2e9f 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java @@ -26,7 +26,7 @@ protected boolean doHandle(CliTerminal terminal, CliSession cliSession, String l SimpleFormatter formatter; String data; try { - response = cliClient.basicQuery(line, cliSession.getFetchSize()); + response = cliClient.basicQuery(line, cliSession.cfg().getFetchSize(), cliSession.cfg().isLenient()); formatter = new SimpleFormatter(response.columns(), response.rows(), CLI); data = formatter.formatWithHeader(response.columns(), response.rows()); while (true) { @@ -36,8 +36,8 @@ protected boolean doHandle(CliTerminal terminal, CliSession cliSession, String l terminal.flush(); return true; } - if (false == cliSession.getFetchSeparator().equals("")) { - terminal.println(cliSession.getFetchSeparator()); + if (false == cliSession.cfg().getFetchSeparator().equals("")) { + terminal.println(cliSession.cfg().getFetchSeparator()); } response = cliSession.getClient().nextPage(response.cursor()); data = formatter.formatWithoutHeader(response.rows()); diff --git a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/BuiltinCommandTests.java b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/BuiltinCommandTests.java index 6c935885662a4..0d809f940c820 100644 --- a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/BuiltinCommandTests.java +++ b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/BuiltinCommandTests.java @@ -44,20 +44,20 @@ public void testFetchSeparator() throws Exception { CliSession cliSession = new CliSession(httpClient); FetchSeparatorCliCommand cliCommand = new FetchSeparatorCliCommand(); assertFalse(cliCommand.handle(testTerminal, cliSession, "fetch")); - assertEquals("", cliSession.getFetchSeparator()); + assertEquals("", cliSession.cfg().getFetchSeparator()); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_separator = \"foo\"")); - assertEquals("foo", cliSession.getFetchSeparator()); + assertEquals("foo", cliSession.cfg().getFetchSeparator()); assertEquals("fetch separator set to \"foo\"", testTerminal.toString()); testTerminal.clear(); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_separator=\"bar\"")); - assertEquals("bar", cliSession.getFetchSeparator()); + assertEquals("bar", cliSession.cfg().getFetchSeparator()); assertEquals("fetch separator set to \"bar\"", testTerminal.toString()); testTerminal.clear(); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch separator=\"baz\"")); - assertEquals("baz", cliSession.getFetchSeparator()); + assertEquals("baz", cliSession.cfg().getFetchSeparator()); assertEquals("fetch separator set to \"baz\"", testTerminal.toString()); verifyNoMoreInteractions(httpClient); } @@ -68,21 +68,21 @@ public void testFetchSize() throws Exception { CliSession cliSession = new CliSession(httpClient); FetchSizeCliCommand cliCommand = new FetchSizeCliCommand(); assertFalse(cliCommand.handle(testTerminal, cliSession, "fetch")); - assertEquals(1000L, cliSession.getFetchSize()); + assertEquals(1000L, cliSession.cfg().getFetchSize()); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_size = \"foo\"")); - assertEquals(1000L, cliSession.getFetchSize()); + assertEquals(1000L, cliSession.cfg().getFetchSize()); assertEquals("Invalid fetch size [\"foo\"]", testTerminal.toString()); testTerminal.clear(); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_size = 10")); - assertEquals(10L, cliSession.getFetchSize()); + assertEquals(10L, cliSession.cfg().getFetchSize()); assertEquals("fetch size set to 10", testTerminal.toString()); testTerminal.clear(); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_size = -10")); - assertEquals(10L, cliSession.getFetchSize()); + assertEquals(10L, cliSession.cfg().getFetchSize()); assertEquals("Invalid fetch size [-10]. Must be > 0.", testTerminal.toString()); verifyNoMoreInteractions(httpClient); } @@ -98,4 +98,21 @@ public void testPrintLogo() throws Exception { verifyNoMoreInteractions(httpClient); } + public void testLenient() { + TestTerminal testTerminal = new TestTerminal(); + HttpClient httpClient = mock(HttpClient.class); + CliSession cliSession = new CliSession(httpClient); + LenientCliCommand cliCommand = new LenientCliCommand(); + assertFalse(cliCommand.handle(testTerminal, cliSession, "lenient")); + assertEquals(false, cliSession.cfg().isLenient()); + assertTrue(cliCommand.handle(testTerminal, cliSession, "lenient = true")); + assertEquals(true, cliSession.cfg().isLenient()); + assertEquals("lenient set to true", testTerminal.toString()); + testTerminal.clear(); + assertTrue(cliCommand.handle(testTerminal, cliSession, "lenient = false")); + assertEquals(false, cliSession.cfg().isLenient()); + assertEquals("lenient set to false", testTerminal.toString()); + testTerminal.clear(); + } + } diff --git a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java index e005e9f668ff9..bc1eb75bd9a76 100644 --- a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java +++ b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java @@ -32,11 +32,11 @@ public void testExceptionHandling() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - when(client.basicQuery("blah", 1000)).thenThrow(new SQLException("test exception")); + when(client.basicQuery("blah", 1000, false)).thenThrow(new SQLException("test exception")); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); assertTrue(cliCommand.handle(testTerminal, cliSession, "blah")); assertEquals("Bad request [test exception]\n", testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("blah"), eq(1000)); + verify(client, times(1)).basicQuery(eq("blah"), eq(1000), eq(false)); verifyNoMoreInteractions(client); } @@ -44,8 +44,8 @@ public void testOnePageQuery() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - cliSession.setFetchSize(10); - when(client.basicQuery("test query", 10)).thenReturn(fakeResponse("", true, "foo")); + cliSession.cfg().setFetchSize(10); + when(client.basicQuery("test query", 10, false)).thenReturn(fakeResponse("", true, "foo")); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); assertTrue(cliCommand.handle(testTerminal, cliSession, "test query")); assertEquals(""" @@ -53,7 +53,7 @@ public void testOnePageQuery() throws Exception { --------------- foo \s """, testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("test query"), eq(10)); + verify(client, times(1)).basicQuery(eq("test query"), eq(10), eq(false)); verifyNoMoreInteractions(client); } @@ -61,8 +61,8 @@ public void testThreePageQuery() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - cliSession.setFetchSize(10); - when(client.basicQuery("test query", 10)).thenReturn(fakeResponse("my_cursor1", true, "first")); + cliSession.cfg().setFetchSize(10); + when(client.basicQuery("test query", 10, false)).thenReturn(fakeResponse("my_cursor1", true, "first")); when(client.nextPage("my_cursor1")).thenReturn(fakeResponse("my_cursor2", false, "second")); when(client.nextPage("my_cursor2")).thenReturn(fakeResponse("", false, "third")); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); @@ -74,7 +74,7 @@ public void testThreePageQuery() throws Exception { second \s third \s """, testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("test query"), eq(10)); + verify(client, times(1)).basicQuery(eq("test query"), eq(10), eq(false)); verify(client, times(2)).nextPage(any()); verifyNoMoreInteractions(client); } @@ -83,10 +83,10 @@ public void testTwoPageQueryWithSeparator() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - cliSession.setFetchSize(15); + cliSession.cfg().setFetchSize(15); // Set a separator - cliSession.setFetchSeparator("-----"); - when(client.basicQuery("test query", 15)).thenReturn(fakeResponse("my_cursor1", true, "first")); + cliSession.cfg().setFetchSeparator("-----"); + when(client.basicQuery("test query", 15, false)).thenReturn(fakeResponse("my_cursor1", true, "first")); when(client.nextPage("my_cursor1")).thenReturn(fakeResponse("", false, "second")); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); assertTrue(cliCommand.handle(testTerminal, cliSession, "test query")); @@ -97,7 +97,7 @@ public void testTwoPageQueryWithSeparator() throws Exception { ----- second \s """, testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("test query"), eq(15)); + verify(client, times(1)).basicQuery(eq("test query"), eq(15), eq(false)); verify(client, times(1)).nextPage(any()); verifyNoMoreInteractions(client); } @@ -106,8 +106,8 @@ public void testCursorCleanupOnError() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - cliSession.setFetchSize(15); - when(client.basicQuery("test query", 15)).thenReturn(fakeResponse("my_cursor1", true, "first")); + cliSession.cfg().setFetchSize(15); + when(client.basicQuery("test query", 15, false)).thenReturn(fakeResponse("my_cursor1", true, "first")); when(client.nextPage("my_cursor1")).thenThrow(new SQLException("test exception")); when(client.queryClose("my_cursor1", Mode.CLI)).thenReturn(true); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); @@ -118,7 +118,7 @@ public void testCursorCleanupOnError() throws Exception { first \s Bad request [test exception] """, testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("test query"), eq(15)); + verify(client, times(1)).basicQuery(eq("test query"), eq(15), eq(false)); verify(client, times(1)).nextPage(any()); verify(client, times(1)).queryClose(eq("my_cursor1"), eq(Mode.CLI)); verifyNoMoreInteractions(client); diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java index 8c14a8008540c..d3784b70a00e2 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java @@ -61,6 +61,10 @@ public MainResponse serverInfo() throws SQLException { } public SqlQueryResponse basicQuery(String query, int fetchSize) throws SQLException { + return basicQuery(query, fetchSize, CoreProtocol.FIELD_MULTI_VALUE_LENIENCY); + } + + public SqlQueryResponse basicQuery(String query, int fetchSize, boolean fieldMultiValueLeniency) throws SQLException { // TODO allow customizing the time zone - this is what session set/reset/get should be about // method called only from CLI SqlQueryRequest sqlRequest = new SqlQueryRequest( @@ -74,7 +78,7 @@ public SqlQueryResponse basicQuery(String query, int fetchSize) throws SQLExcept Boolean.FALSE, null, new RequestInfo(Mode.CLI, ClientVersion.CURRENT), - false, + fieldMultiValueLeniency, false, cfg.binaryCommunication() ); diff --git a/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/HttpClientRequestTests.java b/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/HttpClientRequestTests.java index 6b4648702fb0f..6ff8fc6946856 100644 --- a/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/HttpClientRequestTests.java +++ b/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/HttpClientRequestTests.java @@ -106,7 +106,7 @@ private void assertBinaryRequestForCLI(XContentType xContentType) throws URISynt prepareMockResponse(); try { - httpClient.basicQuery(query, fetchSize); + httpClient.basicQuery(query, fetchSize, randomBoolean()); } catch (SQLException e) { logger.info("Ignored SQLException", e); } From d465ee1be44fcc32783e8daa6eae8b5c87f3e3ff Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 16 Feb 2022 12:10:54 +0100 Subject: [PATCH 123/167] Replace deprecated api in artifact transforms (#84015) `ArtifactAttributes.ARTIFACT_FORMAT` has been deprecated in favor of public available `ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE` --- .../InternalDistributionArchiveSetupPlugin.java | 7 ++++--- .../gradle/internal/JdkDownloadPlugin.java | 11 +++++------ .../gradle/internal/test/DistroTestPlugin.java | 3 +-- .../gradle/DistributionDownloadPlugin.java | 12 ++++++------ .../gradle/plugin/PluginBuildPlugin.java | 3 +-- .../gradle/test/YamlRestTestPlugin.java | 7 +++---- .../gradle/testclusters/ElasticsearchNode.java | 7 +++---- modules/reindex/build.gradle | 6 +++--- rest-api-spec/build.gradle | 2 +- x-pack/qa/repository-old-versions/build.gradle | 6 +++--- 10 files changed, 30 insertions(+), 34 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java index b23fb215bcffc..53b1fec01cd8f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java @@ -23,7 +23,6 @@ import java.io.File; import static org.elasticsearch.gradle.internal.conventions.GUtils.capitalize; -import static org.gradle.api.internal.artifacts.ArtifactAttributes.ARTIFACT_FORMAT; /** * Provides a DSL and common configurations to define different types of @@ -75,12 +74,14 @@ private void registerAndConfigureDistributionArchivesExtension(Project project) sub.getArtifacts().add(DEFAULT_CONFIGURATION_NAME, distributionArchive.getArchiveTask()); var extractedConfiguration = sub.getConfigurations().create(EXTRACTED_CONFIGURATION_NAME); extractedConfiguration.setCanBeResolved(false); - extractedConfiguration.getAttributes().attribute(ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + extractedConfiguration.getAttributes() + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); sub.getArtifacts().add(EXTRACTED_CONFIGURATION_NAME, distributionArchive.getExpandedDistTask()); // The "composite" configuration is specifically used for resolving transformed artifacts in an included build var compositeConfiguration = sub.getConfigurations().create(COMPOSITE_CONFIGURATION_NAME); compositeConfiguration.setCanBeResolved(false); - compositeConfiguration.getAttributes().attribute(ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + compositeConfiguration.getAttributes() + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); compositeConfiguration.getAttributes().attribute(Attribute.of("composite", Boolean.class), true); sub.getArtifacts().add(COMPOSITE_CONFIGURATION_NAME, distributionArchive.getArchiveTask()); sub.getTasks().register("extractedAssemble", task -> diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java index a1008babb3987..2bc84b36c8fa1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java @@ -19,7 +19,6 @@ import org.gradle.api.artifacts.repositories.IvyArtifactRepository; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.attributes.Attribute; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import java.util.Arrays; @@ -40,10 +39,10 @@ public void apply(Project project) { project.getDependencies().getArtifactTypes().maybeCreate(ArtifactTypeDefinition.ZIP_TYPE); project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { transformSpec.getFrom() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE) .attribute(jdkAttribute, true); transformSpec.getTo() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) .attribute(jdkAttribute, true); transformSpec.parameters(parameters -> parameters.setTrimmedPrefixPattern(JDK_TRIMMED_PREFIX)); }); @@ -51,10 +50,10 @@ public void apply(Project project) { ArtifactTypeDefinition tarArtifactTypeDefinition = project.getDependencies().getArtifactTypes().maybeCreate("tar.gz"); project.getDependencies().registerTransform(SymbolicLinkPreservingUntarTransform.class, transformSpec -> { transformSpec.getFrom() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, tarArtifactTypeDefinition.getName()) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, tarArtifactTypeDefinition.getName()) .attribute(jdkAttribute, true); transformSpec.getTo() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) .attribute(jdkAttribute, true); transformSpec.parameters(parameters -> { parameters.setTrimmedPrefixPattern(JDK_TRIMMED_PREFIX); @@ -65,7 +64,7 @@ public void apply(Project project) { NamedDomainObjectContainer jdksContainer = project.container(Jdk.class, name -> { Configuration configuration = project.getConfigurations().create("jdk_" + name); configuration.setCanBeConsumed(false); - configuration.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + configuration.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); configuration.getAttributes().attribute(jdkAttribute, true); Jdk jdk = new Jdk(name, configuration, project.getObjects()); configuration.defaultDependencies(dependencies -> { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java index 16e7328ea98ff..0c916951bcd1c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java @@ -35,7 +35,6 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.plugins.JavaBasePlugin; import org.gradle.api.provider.Provider; import org.gradle.api.specs.Specs; @@ -313,7 +312,7 @@ private static Object convertPath( private static Configuration configureExamplePlugin(Project project) { Configuration examplePlugin = project.getConfigurations().create(EXAMPLE_PLUGIN_CONFIGURATION); - examplePlugin.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); + examplePlugin.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); DependencyHandler deps = project.getDependencies(); deps.add(EXAMPLE_PLUGIN_CONFIGURATION, deps.project(Map.of("path", ":plugins:analysis-icu", "configuration", "zip"))); return examplePlugin; diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java index 9de5d161116f0..d08dc469e5ba5 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java @@ -18,7 +18,6 @@ import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.artifacts.repositories.IvyArtifactRepository; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; @@ -61,14 +60,14 @@ public void setDockerAvailability(Provider dockerAvailability) { @Override public void apply(Project project) { project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { - transformSpec.getFrom().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); - transformSpec.getTo().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + transformSpec.getFrom().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); + transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); }); ArtifactTypeDefinition tarArtifactTypeDefinition = project.getDependencies().getArtifactTypes().maybeCreate("tar.gz"); project.getDependencies().registerTransform(SymbolicLinkPreservingUntarTransform.class, transformSpec -> { - transformSpec.getFrom().attribute(ArtifactAttributes.ARTIFACT_FORMAT, tarArtifactTypeDefinition.getName()); - transformSpec.getTo().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + transformSpec.getFrom().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, tarArtifactTypeDefinition.getName()); + transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); }); setupResolutionsContainer(project); @@ -80,7 +79,8 @@ private void setupDistributionContainer(Project project, Property docke distributionsContainer = project.container(ElasticsearchDistribution.class, name -> { Configuration fileConfiguration = project.getConfigurations().create("es_distro_file_" + name); Configuration extractedConfiguration = project.getConfigurations().create(DISTRO_EXTRACTED_CONFIG_PREFIX + name); - extractedConfiguration.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + extractedConfiguration.getAttributes() + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); return new ElasticsearchDistribution( name, project.getObjects(), diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java index 2dedd25c007f5..938f5e8c8ad25 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java @@ -32,7 +32,6 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.file.RegularFile; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.plugins.BasePlugin; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.plugins.JavaPluginExtension; @@ -236,7 +235,7 @@ public Object doCall() { // also make the zip available as a configuration (used when depending on this project) Configuration configuration = project.getConfigurations().create("zip"); - configuration.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); + configuration.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); project.getArtifacts().add("zip", bundle); return bundle; diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java index 59144576333f2..8e86973826830 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java @@ -24,7 +24,6 @@ import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.attributes.Attribute; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.plugins.JavaBasePlugin; import org.gradle.api.tasks.Copy; import org.gradle.api.tasks.SourceSet; @@ -53,16 +52,16 @@ public void apply(Project project) { project.getDependencies().getArtifactTypes().maybeCreate(ArtifactTypeDefinition.JAR_TYPE); project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { transformSpec.getFrom() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.JAR_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.JAR_TYPE) .attribute(restAttribute, true); transformSpec.getTo() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) .attribute(restAttribute, true); }); ConfigurationContainer configurations = project.getConfigurations(); Configuration restTestSpecs = configurations.create(REST_TEST_SPECS_CONFIGURATION_NAME); - restTestSpecs.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + restTestSpecs.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); restTestSpecs.getAttributes().attribute(restAttribute, true); TaskProvider copyRestTestSpecs = project.getTasks().register("copyRestTestSpecs", Copy.class, t -> { diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 3ef499d4ef814..a47807ae6d326 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -37,7 +37,6 @@ import org.gradle.api.file.FileSystemOperations; import org.gradle.api.file.FileTree; import org.gradle.api.file.RegularFile; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.internal.file.FileOperations; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; @@ -343,7 +342,7 @@ public void module(Provider module) { private void registerExtractedConfig(Provider pluginProvider) { Dependency pluginDependency = this.project.getDependencies().create(project.files(pluginProvider)); Configuration extractedConfig = project.getConfigurations().detachedConfiguration(pluginDependency); - extractedConfig.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + extractedConfig.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); extractedConfig.getAttributes().attribute(bundleAttribute, true); pluginAndModuleConfiguration.from(extractedConfig); } @@ -353,10 +352,10 @@ private void configureArtifactTransforms() { project.getDependencies().getArtifactTypes().maybeCreate(ArtifactTypeDefinition.ZIP_TYPE); project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { transformSpec.getFrom() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE) .attribute(bundleAttribute, true); transformSpec.getTo() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) .attribute(bundleAttribute, true); transformSpec.getParameters().setAsFiletreeOutput(true); }); diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index ac839731b7746..91eab7ca54852 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -111,8 +111,8 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { * To avoid testing against too many old versions, always pick first and last version per major */ project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { - transformSpec.getFrom().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); - transformSpec.getTo().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + transformSpec.getFrom().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); + transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); }); def versions = ['2', '1', '090'] @@ -123,7 +123,7 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { } versions.each { version -> def oldEsDependency = configurations['es' + version] - oldEsDependency.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + oldEsDependency.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); TaskProvider fixture = tasks.register("oldEs${version}Fixture", AntFixture) { dependsOn project.configurations.oldesFixture, jdks.legacy, oldEsDependency executable = "${BuildParams.runtimeJavaHome}/bin/java" diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index f49cde09e3eee..8a8630c9846c6 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -23,7 +23,7 @@ configurations { // easy and efficient basicRestSpecs { attributes { - attribute(org.gradle.api.internal.artifacts.ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) } } } diff --git a/x-pack/qa/repository-old-versions/build.gradle b/x-pack/qa/repository-old-versions/build.gradle index 2581a4e5736ce..fc2e96ec66045 100644 --- a/x-pack/qa/repository-old-versions/build.gradle +++ b/x-pack/qa/repository-old-versions/build.gradle @@ -58,8 +58,8 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { * To avoid testing against too many old versions, always pick first and last version per major */ project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { - transformSpec.getFrom().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); - transformSpec.getTo().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + transformSpec.getFrom().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); + transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); }); for (String versionString : ['5.0.0', '5.6.16', '6.0.0', '6.8.20']) { @@ -70,7 +70,7 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { String configName = "es${versionNoDots}" def config = configurations.create(configName) - config.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + config.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); dependencies.add(configName, artifact) String repoLocation = "${buildDir}/cluster/shared/repo/${versionNoDots}" From 15e522ec414b482a4f347092128cb22b1113cc07 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Wed, 16 Feb 2022 12:41:19 +0000 Subject: [PATCH 124/167] [ML] Fix license feature test cleanup (#84020) Testing license features for can cause the .ml-stats index to be created some time after the tests complete. This can cause the post-test cleanup to fail. This change fixes the problem by waiting for pending tasks in the first stage cleanup, before the main cleanup methods run in the base classes. Fixes #83923 --- .../ml/integration/TestFeatureLicenseTrackingIT.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java index 9651115c658ee..ac2416c899028 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; +import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; import org.junit.After; import java.time.ZonedDateTime; @@ -57,7 +58,7 @@ public class TestFeatureLicenseTrackingIT extends MlSingleNodeTestCase { private final Set createdPipelines = new HashSet<>(); @After - public void cleanup() { + public void cleanup() throws Exception { for (String pipeline : createdPipelines) { try { client().execute(DeletePipelineAction.INSTANCE, new DeletePipelineRequest(pipeline)).actionGet(); @@ -65,6 +66,9 @@ public void cleanup() { logger.warn(() -> new ParameterizedMessage("error cleaning up pipeline [{}]", pipeline), ex); } } + // Some of the tests have async side effects. We need to wait for these to complete before continuing + // the cleanup, otherwise unexpected indices may get created during the cleanup process. + BaseMlIntegTestCase.waitForPendingTasks(client()); } public void testFeatureTrackingAnomalyJob() throws Exception { @@ -125,7 +129,7 @@ public void testFeatureTrackingInferenceModelPipeline() throws Exception { .setInferenceConfig(new ClassificationConfig(3)) .setParsedDefinition( new TrainedModelDefinition.Builder().setPreProcessors( - Arrays.asList(new OneHotEncoding("other.categorical", oneHotEncoding, false)) + List.of(new OneHotEncoding("other.categorical", oneHotEncoding, false)) ).setTrainedModel(buildClassification(true)) ) .build(); From e7ca5324a0f6326b579a6f6cc9f8213957ffce43 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 16 Feb 2022 14:11:42 +0100 Subject: [PATCH 125/167] Move InferenceConfigUpdate under VersionedNamedWriteable (#84022) In #81809 we introduced a mechanism to check searializability of search request to earlier version nodes already on the coordinating node. This requires knowledge about the version that NamedWritable classes have been introduced, which is why we started moving classes that are used inside the search request under the VersionedNamedWriteable interface to make sure future additions implement the mthod that provides the version information. This change moves the InferenceConfigUpdate and implementing classes under that sub-interface. I have used the versions they were first released in looking at the pull requests that introduced the classes. --- .../trainedmodel/ClassificationConfigUpdate.java | 7 +++++++ .../inference/trainedmodel/EmptyConfigUpdate.java | 6 ++++++ .../trainedmodel/FillMaskConfigUpdate.java | 7 +++++++ .../trainedmodel/InferenceConfigUpdate.java | 4 ++-- .../ml/inference/trainedmodel/NerConfigUpdate.java | 7 +++++++ .../trainedmodel/PassThroughConfigUpdate.java | 7 +++++++ .../trainedmodel/RegressionConfigUpdate.java | 8 ++++++++ .../inference/trainedmodel/ResultsFieldUpdate.java | 6 ++++++ .../TextClassificationConfigUpdate.java | 13 ++++++++++--- .../trainedmodel/TextEmbeddingConfigUpdate.java | 7 +++++++ .../ZeroShotClassificationConfigUpdate.java | 7 +++++++ 11 files changed, 74 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java index b295ba4122580..9a26d10702e5e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -121,6 +122,7 @@ public String getTopClassesResultsField() { return topClassesResultsField; } + @Override public String getResultsField() { return resultsField; } @@ -246,6 +248,11 @@ boolean isNoop(ClassificationConfig originalConfig) { && (predictionFieldType == null || predictionFieldType.equals(originalConfig.getPredictionFieldType())); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_8_0; + } + public static class Builder implements InferenceConfigUpdate.Builder { private Integer numTopClasses; private String topClassesResultsField; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java index 9696afe098885..58fb78d53b8b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java @@ -65,6 +65,11 @@ public int hashCode() { return EmptyConfigUpdate.class.hashCode(); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_9_0; + } + public static class Builder implements InferenceConfigUpdate.Builder { @Override @@ -72,6 +77,7 @@ public Builder setResultsField(String resultsField) { return this; } + @Override public EmptyConfigUpdate build() { return new EmptyConfigUpdate(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java index 78274da77ab10..da0beaa6785d8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -101,6 +102,11 @@ public String getName() { return NAME; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + @Override public InferenceConfig apply(InferenceConfig originalConfig) { if (originalConfig instanceof FillMaskConfig == false) { @@ -191,6 +197,7 @@ public FillMaskConfigUpdate.Builder setTokenizationUpdate(TokenizationUpdate tok return this; } + @Override public FillMaskConfigUpdate build() { return new FillMaskConfigUpdate(this.numTopClasses, this.resultsField, this.tokenizationUpdate); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java index b00ee7134bac9..30ecac00a3b80 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; -import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -15,7 +15,7 @@ import java.util.HashSet; import java.util.Set; -public interface InferenceConfigUpdate extends NamedWriteable { +public interface InferenceConfigUpdate extends VersionedNamedWriteable { Set RESERVED_ML_FIELD_NAMES = new HashSet<>( Arrays.asList(WarningInferenceResults.WARNING.getPreferredName(), TrainedModelConfig.MODEL_ID.getPreferredName()) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java index f3dbd03dcbf3b..d52463eb092c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -144,6 +145,11 @@ public int hashCode() { return Objects.hash(resultsField, tokenizationUpdate); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + public static class Builder implements InferenceConfigUpdate.Builder { private String resultsField; private TokenizationUpdate tokenizationUpdate; @@ -159,6 +165,7 @@ public NerConfigUpdate.Builder setTokenizationUpdate(TokenizationUpdate tokeniza return this; } + @Override public NerConfigUpdate build() { return new NerConfigUpdate(resultsField, tokenizationUpdate); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java index a317e3c6c9e89..92bfe1bf9ea79 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -146,6 +147,11 @@ public int hashCode() { return Objects.hash(resultsField, tokenizationUpdate); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + public static class Builder implements InferenceConfigUpdate.Builder { private String resultsField; private TokenizationUpdate tokenizationUpdate; @@ -161,6 +167,7 @@ public PassThroughConfigUpdate.Builder setTokenizationUpdate(TokenizationUpdate return this; } + @Override public PassThroughConfigUpdate build() { return new PassThroughConfigUpdate(this.resultsField, tokenizationUpdate); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java index 2400bc1c670a8..c655c391317fb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -84,6 +85,7 @@ public Integer getNumTopFeatureImportanceValues() { return numTopFeatureImportanceValues; } + @Override public String getResultsField() { return resultsField; } @@ -109,6 +111,11 @@ public String getName() { return NAME.getPreferredName(); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_8_0; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -186,6 +193,7 @@ public Builder setNumTopFeatureImportanceValues(Integer numTopFeatureImportanceV return this; } + @Override public RegressionConfigUpdate build() { return new RegressionConfigUpdate(resultsField, numTopFeatureImportanceValues); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java index 2266fb5ca3e44..0eaf101c25c8e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -68,6 +69,11 @@ public String getWriteableName() { return NAME; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_9_0; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(resultsField); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java index 72a0858117e61..a62f9e3a197fb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -21,10 +22,10 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.CLASSIFICATION_LABELS; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.NUM_TOP_CLASSES; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION; -import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextClassificationConfig.CLASSIFICATION_LABELS; -import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextClassificationConfig.NUM_TOP_CLASSES; -import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextClassificationConfig.RESULTS_FIELD; public class TextClassificationConfigUpdate extends NlpConfigUpdate implements NamedXContentObject { @@ -96,6 +97,11 @@ public String getName() { return NAME; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -237,6 +243,7 @@ public TextClassificationConfigUpdate.Builder setTokenizationUpdate(Tokenization return this; } + @Override public TextClassificationConfigUpdate build() { return new TextClassificationConfigUpdate( this.classificationLabels, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java index 1ca7d04fb1eee..589b71bd631d0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -95,6 +96,11 @@ public String getName() { return NAME; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + @Override public InferenceConfig apply(InferenceConfig originalConfig) { if ((resultsField == null || resultsField.equals(originalConfig.getResultsField())) && super.isNoop()) { @@ -160,6 +166,7 @@ public TextEmbeddingConfigUpdate.Builder setTokenizationUpdate(TokenizationUpdat return this; } + @Override public TextEmbeddingConfigUpdate build() { return new TextEmbeddingConfigUpdate(resultsField, tokenizationUpdate); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java index edfc675fe6ec4..3cf9f8c8f8354 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -235,8 +236,14 @@ public Builder setTokenizationUpdate(TokenizationUpdate tokenizationUpdate) { return this; } + @Override public ZeroShotClassificationConfigUpdate build() { return new ZeroShotClassificationConfigUpdate(labels, isMultiLabel, resultsField, tokenizationUpdate); } } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } } From b44fcfbb3a8be9ece5d653118db0da14d1db96d3 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 16 Feb 2022 07:31:25 -0600 Subject: [PATCH 126/167] Script: Fields API for Dense Vector (#83550) Adds the fields API for `dense_vector` field mapper. Adds a `DenseVector` interface for the value type. Implemented by: * `KnnDenseVector` which wraps a decoded float array from `VectorValues` * `BinaryDenseVector` which lazily decodes a `BytesRef` from `BinaryDocValues` The vector operations have moved into those implements from `BinaryDenseVectorScriptDocValues.java` and `KnnDenseVectorScriptDocValues.java`, respectively. The `DenseVector` API is: ``` float getMagnitude(); double dotProduct(float[] | List); double l1Norm(float[] | List); double l2Norm(float[] | List); float[] getVector(); int dims(); boolean isEmpty(); // does the value exist int size(); // 0 if isEmpty(), 1 otherwise Iterator iterator() ``` `dotProduct`, `l1Norm` and `l2Norm` take a `float[]` or a `List` via the a delegating `default` method on the `DenseVector` interface. The `DenseVectorDocValuesField` abstract class contains two getter APIS. It is implemented by `KnnDenseVectorDocValuesField` and `BinaryDenseVectorDocValuesField`. ``` DenseVector get() DenseVector get(DenseVector defaultValue) ``` The `get()` method is included because there isn't a good default dense vector, so that API returns an empty `DenseVector` which throws an `IllegalArgumentException` for all method calls other than `isEmpty()`, `size()` and `iterator()`. The empty dense vector will always be `DenseVector.EMPTY` in case users want to use equality checks. Refs: #79105 --- docs/changelog/83550.yaml | 5 + .../60_knn_and_binary_dv_fields_api.yml | 848 ++++++++++++++++++ .../vectors/query/BinaryDenseVector.java | 141 +++ .../BinaryDenseVectorDocValuesField.java | 70 ++ .../BinaryDenseVectorScriptDocValues.java | 119 --- .../xpack/vectors/query/DenseVector.java | 227 +++++ .../query/DenseVectorDocValuesField.java | 51 ++ .../query/DenseVectorScriptDocValues.java | 90 +- .../query/DocValuesWhitelistExtension.java | 5 +- .../xpack/vectors/query/KnnDenseVector.java | 109 +++ .../query/KnnDenseVectorDocValuesField.java | 79 ++ .../query/KnnDenseVectorScriptDocValues.java | 122 --- .../xpack/vectors/query/ScoreScriptUtils.java | 34 +- .../vectors/query/VectorDVLeafFieldData.java | 34 +- ...xt => org.elasticsearch.xpack.vectors.txt} | 37 + ...BinaryDenseVectorScriptDocValuesTests.java | 66 +- .../query/DenseVectorFunctionTests.java | 74 +- .../xpack/vectors/query/DenseVectorTests.java | 84 ++ .../KnnDenseVectorScriptDocValuesTests.java | 78 +- 19 files changed, 1883 insertions(+), 390 deletions(-) create mode 100644 docs/changelog/83550.yaml create mode 100644 x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/60_knn_and_binary_dv_fields_api.yml create mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVector.java create mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorDocValuesField.java delete mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValues.java create mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVector.java create mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorDocValuesField.java create mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVector.java create mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorDocValuesField.java delete mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValues.java rename x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/{whitelist.txt => org.elasticsearch.xpack.vectors.txt} (52%) create mode 100644 x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorTests.java diff --git a/docs/changelog/83550.yaml b/docs/changelog/83550.yaml new file mode 100644 index 0000000000000..51ab72f642fe6 --- /dev/null +++ b/docs/changelog/83550.yaml @@ -0,0 +1,5 @@ +pr: 83550 +summary: "Script: Fields API for Dense Vector" +area: Infra/Scripting +type: enhancement +issues: [] diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/60_knn_and_binary_dv_fields_api.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/60_knn_and_binary_dv_fields_api.yml new file mode 100644 index 0000000000000..b583a25738215 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/60_knn_and_binary_dv_fields_api.yml @@ -0,0 +1,848 @@ +--- +"size and isEmpty code works for any vector, including empty": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + def dv = field(params.field).get(); + if (dv.isEmpty()) { + return dv.size(); + } + return dv.vector[2] * dv.size() + params: + field: bdv + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 3 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 2 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 1 } + - match: { hits.hits.3._id: "missing_vector" } + - match: { hits.hits.3._score: 0 } + + - do: + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + def dv = field(params.field).get(); + if (dv.isEmpty()) { + return dv.size(); + } + return dv.vector[2] * dv.size() + params: + field: knn + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 3 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 2 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 1 } + - match: { hits.hits.3._id: "missing_vector" } + - match: { hits.hits.3._score: 0 } + +--- +"null can be used for default value": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + DenseVector dv = field(params.field).get(null); + if (dv == null) { + return 1; + } + return dv.vector[2]; + params: + field: bdv + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 3 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 2 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 1 } + - match: { hits.hits.3._id: "missing_vector" } + - match: { hits.hits.3._score: 1 } + + - do: + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + DenseVector dv = field(params.field).get(null); + if (dv == null) { + return 1; + } + return dv.vector[2]; + params: + field: knn + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 3 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 2 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 1 } + - match: { hits.hits.3._id: "missing_vector" } + - match: { hits.hits.3._score: 1 } + +--- +"empty dense vector throws for vector accesses": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { "bool": { "must_not": { "exists": { "field": "bdv" } } } } + script: + source: | + field(params.field).get().vector[2] + params: + field: bdv + + - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Dense vector value missing for a field, use isEmpty() to check for a missing vector value" } + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { "bool": { "must_not": { "exists": { "field": "bdv" } } } } + script: + source: | + field(params.field).get().vector[2] + params: + field: knn + + - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Dense vector value missing for a field, use isEmpty() to check for a missing vector value" } + + - do: + search: + body: + query: + script_score: + query: { "bool": { "must_not": { "exists": { "field": "bdv" } } } } + script: + source: | + float[] q = new float[1]; + q[0] = 3; + DenseVector dv = field(params.field).get(); + float score = 0; + try { score += dv.magnitude } catch (IllegalArgumentException e) { score += 10; } + try { score += dv.dotProduct(q) } catch (IllegalArgumentException e) { score += 200; } + try { score += dv.l1Norm(q) } catch (IllegalArgumentException e) { score += 3000; } + try { score += dv.l2Norm(q) } catch (IllegalArgumentException e) { score += 40000; } + try { score += dv.vector[0] } catch (IllegalArgumentException e) { score += 500000; } + try { score += dv.dims } catch (IllegalArgumentException e) { score += 6000000; } + return score; + params: + field: bdv + + - match: { hits.hits.0._id: "missing_vector" } + - match: { hits.hits.0._score: 6543210 } + + - do: + search: + body: + query: + script_score: + query: { "bool": { "must_not": { "exists": { "field": "bdv" } } } } + script: + source: | + float[] q = new float[1]; + q[0] = 3; + DenseVector dv = field(params.field).get(); + float score = 0; + try { score += dv.magnitude } catch (IllegalArgumentException e) { score += 10; } + try { score += dv.dotProduct(q) } catch (IllegalArgumentException e) { score += 200; } + try { score += dv.l1Norm(q) } catch (IllegalArgumentException e) { score += 3000; } + try { score += dv.l2Norm(q) } catch (IllegalArgumentException e) { score += 40000; } + try { score += dv.cosineSimilarity(q) } catch (IllegalArgumentException e) { score += 200000; } + try { score += dv.vector[0] } catch (IllegalArgumentException e) { score += 500000; } + try { score += dv.dims } catch (IllegalArgumentException e) { score += 6000000; } + return score; + params: + field: knn + + - match: { hits.hits.0._id: "missing_vector" } + - match: { hits.hits.0._score: 6743210 } + +--- +"dot product works on dense vectors": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + field(params.field).get().dotProduct(params.query) + params: + query: [4, 5, 6] + field: bdv + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 27 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 21 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 15 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + field(params.field).get().dotProduct(query) + params: + field: bdv + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 27 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 21 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 15 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + field(params.field).get().dotProduct(params.query) + params: + query: [4, 5, 6] + field: knn + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 27 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 21 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 15 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + field(params.field).get().dotProduct(query) + params: + field: knn + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 27 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 21 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 15 } + +--- +"iterator over dense vector values": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + float sum = 0.0f; + for (def v : field(params.field)) { + sum += v; + } + return sum; + params: + field: bdv + + - match: { error.failed_shards.0.reason.caused_by.type: "unsupported_operation_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Cannot iterate over single valued dense_vector field, use get() instead" } + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + float sum = 0.0f; + for (def v : field(params.field)) { + sum += v; + } + return sum; + params: + field: knn + + - match: { error.failed_shards.0.reason.caused_by.type: "unsupported_operation_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Cannot iterate over single valued dense_vector field, use get() instead"} + +--- +"l1Norm works on dense vectors": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + field(params.field).get().l1Norm(params.query) + params: + query: [4, 5, 6] + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 12 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + field(params.field).get().l1Norm(query) + params: + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 12 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + field(params.field).get().l1Norm(params.query) + params: + query: [4, 5, 6] + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 12 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + field(params.field).get().l1Norm(query) + params: + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 12 } + +--- +"l2Norm works on dense vectors": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) field(params.field).get().l2Norm(params.query) + params: + query: [4, 5, 6] + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 7 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 6 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 5 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + (int) field(params.field).get().l2Norm(query) + params: + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 7 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 6 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 5 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) field(params.field).get().l2Norm(params.query) + params: + query: [4, 5, 6] + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 7 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 6 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 5 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + (int) field(params.field).get().l2Norm(query) + params: + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 7 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 6 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 5 } + +--- +"cosineSimilarity works on dense vectors": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + (int) (field(params.field).get().cosineSimilarity(query) * 100.0f) + params: + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 98 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 97 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 92 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) (field(params.field).get().cosineSimilarity(params.query) * 100.0f) + params: + query: [4, 5, 6] + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 98 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 97 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 92 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) (field(params.field).get().cosineSimilarity(params.query) * 100.0f) + params: + query: [4, 5, 6] + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 98 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 97 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 92 } + +--- +"query vector of wrong type errors": + - skip: + version: " - 8.0.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) field(params.field).get().l2Norm(params.query) + params: + query: "one, two, three" + field: bdv + + - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Cannot use vector [one, two, three] with class [java.lang.String] as query vector" } + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) field(params.field).get().l2Norm(params.query) + params: + query: "one, two, three" + field: knn + + - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Cannot use vector [one, two, three] with class [java.lang.String] as query vector" } diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVector.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVector.java new file mode 100644 index 0000000000000..785016bed097a --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVector.java @@ -0,0 +1,141 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.xpack.vectors.mapper.VectorEncoderDecoder; + +import java.nio.ByteBuffer; +import java.util.List; + +public class BinaryDenseVector implements DenseVector { + protected final BytesRef docVector; + protected final int dims; + protected final Version indexVersion; + + protected float[] decodedDocVector; + + public BinaryDenseVector(BytesRef docVector, int dims, Version indexVersion) { + this.docVector = docVector; + this.indexVersion = indexVersion; + this.dims = dims; + } + + @Override + public float[] getVector() { + if (decodedDocVector == null) { + decodedDocVector = new float[dims]; + VectorEncoderDecoder.decodeDenseVector(docVector, decodedDocVector); + } + return decodedDocVector; + } + + @Override + public float getMagnitude() { + return VectorEncoderDecoder.getMagnitude(indexVersion, docVector); + } + + @Override + public double dotProduct(float[] queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + + double dotProduct = 0; + for (float v : queryVector) { + dotProduct += byteBuffer.getFloat() * v; + } + return dotProduct; + } + + @Override + public double dotProduct(List queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + + double dotProduct = 0; + for (int i = 0; i < queryVector.size(); i++) { + dotProduct += byteBuffer.getFloat() * queryVector.get(i).floatValue(); + } + return dotProduct; + } + + @Override + public double l1Norm(float[] queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + + double l1norm = 0; + for (float v : queryVector) { + l1norm += Math.abs(v - byteBuffer.getFloat()); + } + return l1norm; + } + + @Override + public double l1Norm(List queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + + double l1norm = 0; + for (int i = 0; i < queryVector.size(); i++) { + l1norm += Math.abs(queryVector.get(i).floatValue() - byteBuffer.getFloat()); + } + return l1norm; + } + + @Override + public double l2Norm(float[] queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + double l2norm = 0; + for (float queryValue : queryVector) { + double diff = byteBuffer.getFloat() - queryValue; + l2norm += diff * diff; + } + return Math.sqrt(l2norm); + } + + @Override + public double l2Norm(List queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + double l2norm = 0; + for (Number number : queryVector) { + double diff = byteBuffer.getFloat() - number.floatValue(); + l2norm += diff * diff; + } + return Math.sqrt(l2norm); + } + + @Override + public double cosineSimilarity(float[] queryVector, boolean normalizeQueryVector) { + if (normalizeQueryVector) { + return dotProduct(queryVector) / (DenseVector.getMagnitude(queryVector) * getMagnitude()); + } + return dotProduct(queryVector) / getMagnitude(); + } + + @Override + public double cosineSimilarity(List queryVector) { + return dotProduct(queryVector) / (DenseVector.getMagnitude(queryVector) * getMagnitude()); + } + + @Override + public int size() { + return 1; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public int getDims() { + return dims; + } + + private static ByteBuffer wrap(BytesRef dv) { + return ByteBuffer.wrap(dv.bytes, dv.offset, dv.length); + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorDocValuesField.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorDocValuesField.java new file mode 100644 index 0000000000000..ad1d016132547 --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorDocValuesField.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; + +import java.io.IOException; + +public class BinaryDenseVectorDocValuesField extends DenseVectorDocValuesField { + + protected final BinaryDocValues input; + protected final Version indexVersion; + protected final int dims; + protected BytesRef value; + + public BinaryDenseVectorDocValuesField(BinaryDocValues input, String name, int dims, Version indexVersion) { + super(name); + this.input = input; + this.indexVersion = indexVersion; + this.dims = dims; + } + + @Override + public void setNextDocId(int docId) throws IOException { + if (input.advanceExact(docId)) { + value = input.binaryValue(); + } else { + value = null; + } + } + + @Override + public DenseVectorScriptDocValues getScriptDocValues() { + return new DenseVectorScriptDocValues(this, dims); + } + + @Override + public boolean isEmpty() { + return value == null; + } + + @Override + public DenseVector get() { + if (isEmpty()) { + return DenseVector.EMPTY; + } + + return new BinaryDenseVector(value, dims, indexVersion); + } + + @Override + public DenseVector get(DenseVector defaultValue) { + if (isEmpty()) { + return defaultValue; + } + return new BinaryDenseVector(value, dims, indexVersion); + } + + @Override + public DenseVector getInternal() { + return get(null); + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValues.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValues.java deleted file mode 100644 index 852b63500a9bf..0000000000000 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValues.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.vectors.query; - -import org.apache.lucene.index.BinaryDocValues; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.xpack.vectors.mapper.VectorEncoderDecoder; - -import java.io.IOException; -import java.nio.ByteBuffer; - -public class BinaryDenseVectorScriptDocValues extends DenseVectorScriptDocValues { - - public static class BinaryDenseVectorSupplier implements DenseVectorSupplier { - - private final BinaryDocValues in; - private BytesRef value; - - public BinaryDenseVectorSupplier(BinaryDocValues in) { - this.in = in; - } - - @Override - public void setNextDocId(int docId) throws IOException { - if (in.advanceExact(docId)) { - value = in.binaryValue(); - } else { - value = null; - } - } - - @Override - public BytesRef getInternal(int index) { - throw new UnsupportedOperationException(); - } - - public BytesRef getInternal() { - return value; - } - - @Override - public int size() { - if (value == null) { - return 0; - } else { - return 1; - } - } - } - - private final BinaryDenseVectorSupplier bdvSupplier; - private final Version indexVersion; - private final float[] vector; - - BinaryDenseVectorScriptDocValues(BinaryDenseVectorSupplier supplier, Version indexVersion, int dims) { - super(supplier, dims); - this.bdvSupplier = supplier; - this.indexVersion = indexVersion; - this.vector = new float[dims]; - } - - @Override - public int size() { - return supplier.size(); - } - - @Override - public float[] getVectorValue() { - VectorEncoderDecoder.decodeDenseVector(bdvSupplier.getInternal(), vector); - return vector; - } - - @Override - public float getMagnitude() { - return VectorEncoderDecoder.getMagnitude(indexVersion, bdvSupplier.getInternal()); - } - - @Override - public double dotProduct(float[] queryVector) { - BytesRef value = bdvSupplier.getInternal(); - ByteBuffer byteBuffer = ByteBuffer.wrap(value.bytes, value.offset, value.length); - - double dotProduct = 0; - for (float queryValue : queryVector) { - dotProduct += queryValue * byteBuffer.getFloat(); - } - return (float) dotProduct; - } - - @Override - public double l1Norm(float[] queryVector) { - BytesRef value = bdvSupplier.getInternal(); - ByteBuffer byteBuffer = ByteBuffer.wrap(value.bytes, value.offset, value.length); - - double l1norm = 0; - for (float queryValue : queryVector) { - l1norm += Math.abs(queryValue - byteBuffer.getFloat()); - } - return l1norm; - } - - @Override - public double l2Norm(float[] queryVector) { - BytesRef value = bdvSupplier.getInternal(); - ByteBuffer byteBuffer = ByteBuffer.wrap(value.bytes, value.offset, value.length); - double l2norm = 0; - for (float queryValue : queryVector) { - double diff = queryValue - byteBuffer.getFloat(); - l2norm += diff * diff; - } - return Math.sqrt(l2norm); - } -} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVector.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVector.java new file mode 100644 index 0000000000000..4ffbccbd9e415 --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVector.java @@ -0,0 +1,227 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import java.util.List; + +/** + * DenseVector value type for the painless. + */ +/* dotProduct, l1Norm, l2Norm, cosineSimilarity have three flavors depending on the type of the queryVector + * 1) float[], this is for the ScoreScriptUtils class bindings which have converted a List based query vector into an array + * 2) List, A painless script will typically use Lists since they are easy to pass as params and have an easy + * literal syntax. Working with Lists directly, instead of converting to a float[], trades off runtime operations against + * memory pressure. Dense Vectors may have high dimensionality, up to 2048. Allocating a float[] per doc per script API + * call is prohibitively expensive. + * 3) Object, the whitelisted method for the painless API. Calls into the float[] or List version based on the + class of the argument and checks dimensionality. + */ +public interface DenseVector { + float[] getVector(); + + float getMagnitude(); + + double dotProduct(float[] queryVector); + + double dotProduct(List queryVector); + + @SuppressWarnings("unchecked") + default double dotProduct(Object queryVector) { + if (queryVector instanceof float[] array) { + checkDimensions(getDims(), array.length); + return dotProduct(array); + + } else if (queryVector instanceof List list) { + checkDimensions(getDims(), list.size()); + return dotProduct((List) list); + } + + throw new IllegalArgumentException(badQueryVectorType(queryVector)); + } + + double l1Norm(float[] queryVector); + + double l1Norm(List queryVector); + + @SuppressWarnings("unchecked") + default double l1Norm(Object queryVector) { + if (queryVector instanceof float[] array) { + checkDimensions(getDims(), array.length); + return l1Norm(array); + + } else if (queryVector instanceof List list) { + checkDimensions(getDims(), list.size()); + return l1Norm((List) list); + } + + throw new IllegalArgumentException(badQueryVectorType(queryVector)); + } + + double l2Norm(float[] queryVector); + + double l2Norm(List queryVector); + + @SuppressWarnings("unchecked") + default double l2Norm(Object queryVector) { + if (queryVector instanceof float[] array) { + checkDimensions(getDims(), array.length); + return l2Norm(array); + + } else if (queryVector instanceof List list) { + checkDimensions(getDims(), list.size()); + return l2Norm((List) list); + } + + throw new IllegalArgumentException(badQueryVectorType(queryVector)); + } + + /** + * Get the cosine similarity with the un-normalized query vector + */ + default double cosineSimilarity(float[] queryVector) { + return cosineSimilarity(queryVector, true); + } + + /** + * Get the cosine similarity with the query vector + * @param normalizeQueryVector - normalize the query vector, does not change the contents of passed in query vector + */ + double cosineSimilarity(float[] queryVector, boolean normalizeQueryVector); + + /** + * Get the cosine similarity with the un-normalized query vector + */ + double cosineSimilarity(List queryVector); + + /** + * Get the cosine similarity with the un-normalized query vector. Handles queryVectors of type float[] and List. + */ + @SuppressWarnings("unchecked") + default double cosineSimilarity(Object queryVector) { + if (queryVector instanceof float[] array) { + checkDimensions(getDims(), array.length); + return cosineSimilarity(array); + + } else if (queryVector instanceof List list) { + checkDimensions(getDims(), list.size()); + return cosineSimilarity((List) list); + } + + throw new IllegalArgumentException(badQueryVectorType(queryVector)); + } + + boolean isEmpty(); + + int getDims(); + + int size(); + + static float getMagnitude(float[] vector) { + double mag = 0.0f; + for (float elem : vector) { + mag += elem * elem; + } + return (float) Math.sqrt(mag); + } + + static float getMagnitude(List vector) { + double mag = 0.0f; + for (Number number : vector) { + float elem = number.floatValue(); + mag += elem * elem; + } + return (float) Math.sqrt(mag); + } + + static void checkDimensions(int dvDims, int qvDims) { + if (dvDims != qvDims) { + throw new IllegalArgumentException( + "The query vector has a different number of dimensions [" + qvDims + "] than the document vectors [" + dvDims + "]." + ); + } + } + + private static String badQueryVectorType(Object queryVector) { + return "Cannot use vector [" + queryVector + "] with class [" + queryVector.getClass().getName() + "] as query vector"; + } + + DenseVector EMPTY = new DenseVector() { + public static final String MISSING_VECTOR_FIELD_MESSAGE = "Dense vector value missing for a field," + + " use isEmpty() to check for a missing vector value"; + + @Override + public float getMagnitude() { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double dotProduct(float[] queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double dotProduct(List queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double l1Norm(List queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double l1Norm(float[] queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double l2Norm(List queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double l2Norm(float[] queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double cosineSimilarity(float[] queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double cosineSimilarity(float[] queryVector, boolean normalizeQueryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double cosineSimilarity(List queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public float[] getVector() { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public boolean isEmpty() { + return true; + } + + @Override + public int getDims() { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public int size() { + return 0; + } + }; +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorDocValuesField.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorDocValuesField.java new file mode 100644 index 0000000000000..dd4a00fef3af0 --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorDocValuesField.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.script.field.DocValuesField; + +import java.util.Iterator; + +public abstract class DenseVectorDocValuesField implements DocValuesField, DenseVectorScriptDocValues.DenseVectorSupplier { + protected final String name; + + public DenseVectorDocValuesField(String name) { + this.name = name; + } + + @Override + public String getName() { + return name; + } + + @Override + public int size() { + return isEmpty() ? 0 : 1; + } + + @Override + public BytesRef getInternal(int index) { + throw new UnsupportedOperationException(); + } + + /** + * Get the DenseVector for a document if one exists, DenseVector.EMPTY otherwise + */ + public abstract DenseVector get(); + + public abstract DenseVector get(DenseVector defaultValue); + + public abstract DenseVectorScriptDocValues getScriptDocValues(); + + // DenseVector fields are single valued, so Iterable does not make sense. + @Override + public Iterator iterator() { + throw new UnsupportedOperationException("Cannot iterate over single valued dense_vector field, use get() instead"); + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorScriptDocValues.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorScriptDocValues.java index 650ebca1d5ee5..43d04f5ccde7a 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorScriptDocValues.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorScriptDocValues.java @@ -10,24 +10,16 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.fielddata.ScriptDocValues; -public abstract class DenseVectorScriptDocValues extends ScriptDocValues { - - public interface DenseVectorSupplier extends Supplier { - - @Override - default BytesRef getInternal(int index) { - throw new UnsupportedOperationException(); - } - - T getInternal(); - } +public class DenseVectorScriptDocValues extends ScriptDocValues { public static final String MISSING_VECTOR_FIELD_MESSAGE = "A document doesn't have a value for a vector field!"; private final int dims; + protected final DenseVectorSupplier dvSupplier; - public DenseVectorScriptDocValues(DenseVectorSupplier supplier, int dims) { + public DenseVectorScriptDocValues(DenseVectorSupplier supplier, int dims) { super(supplier); + this.dvSupplier = supplier; this.dims = dims; } @@ -35,60 +27,58 @@ public int dims() { return dims; } + private DenseVector getCheckedVector() { + DenseVector vector = dvSupplier.getInternal(); + if (vector == null) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + return vector; + } + /** * Get dense vector's value as an array of floats */ - public abstract float[] getVectorValue(); + public float[] getVectorValue() { + return getCheckedVector().getVector(); + } /** * Get dense vector's magnitude */ - public abstract float getMagnitude(); + public float getMagnitude() { + return getCheckedVector().getMagnitude(); + } - public abstract double dotProduct(float[] queryVector); + public double dotProduct(float[] queryVector) { + return getCheckedVector().dotProduct(queryVector); + } - public abstract double l1Norm(float[] queryVector); + public double l1Norm(float[] queryVector) { + return getCheckedVector().l1Norm(queryVector); + } - public abstract double l2Norm(float[] queryVector); + public double l2Norm(float[] queryVector) { + return getCheckedVector().l2Norm(queryVector); + } @Override public BytesRef get(int index) { throw new UnsupportedOperationException( - "accessing a vector field's value through 'get' or 'value' is not supported!" + "Use 'vectorValue' or 'magnitude' instead!'" + "accessing a vector field's value through 'get' or 'value' is not supported, use 'vectorValue' or 'magnitude' instead." ); } - public static DenseVectorScriptDocValues empty(DenseVectorSupplier supplier, int dims) { - return new DenseVectorScriptDocValues(supplier, dims) { - @Override - public float[] getVectorValue() { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public float getMagnitude() { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public double dotProduct(float[] queryVector) { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public double l1Norm(float[] queryVector) { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public double l2Norm(float[] queryVector) { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public int size() { - return supplier.size(); - } - }; + @Override + public int size() { + return dvSupplier.getInternal() == null ? 0 : 1; + } + + public interface DenseVectorSupplier extends Supplier { + @Override + default BytesRef getInternal(int index) { + throw new UnsupportedOperationException(); + } + + DenseVector getInternal(); } } diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DocValuesWhitelistExtension.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DocValuesWhitelistExtension.java index c53d1379dc252..953044c3a5500 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DocValuesWhitelistExtension.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DocValuesWhitelistExtension.java @@ -19,7 +19,10 @@ public class DocValuesWhitelistExtension implements PainlessExtension { - private static final Whitelist WHITELIST = WhitelistLoader.loadFromResourceFiles(DocValuesWhitelistExtension.class, "whitelist.txt"); + private static final Whitelist WHITELIST = WhitelistLoader.loadFromResourceFiles( + DocValuesWhitelistExtension.class, + "org.elasticsearch.xpack.vectors.txt" + ); @Override public Map, List> getContextWhitelists() { diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVector.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVector.java new file mode 100644 index 0000000000000..1c240892ab2bd --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVector.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.util.VectorUtil; + +import java.util.Arrays; +import java.util.List; + +public class KnnDenseVector implements DenseVector { + protected final float[] docVector; + + public KnnDenseVector(float[] docVector) { + this.docVector = docVector; + } + + @Override + public float[] getVector() { + // we need to copy the value, since {@link VectorValues} can reuse + // the underlying array across documents + return Arrays.copyOf(docVector, docVector.length); + } + + @Override + public float getMagnitude() { + return DenseVector.getMagnitude(docVector); + } + + @Override + public double dotProduct(float[] queryVector) { + return VectorUtil.dotProduct(docVector, queryVector); + } + + @Override + public double dotProduct(List queryVector) { + double dotProduct = 0; + for (int i = 0; i < docVector.length; i++) { + dotProduct += docVector[i] * queryVector.get(i).floatValue(); + } + return dotProduct; + } + + @Override + public double l1Norm(float[] queryVector) { + double result = 0.0; + for (int i = 0; i < docVector.length; i++) { + result += Math.abs(docVector[i] - queryVector[i]); + } + return result; + } + + @Override + public double l1Norm(List queryVector) { + double result = 0.0; + for (int i = 0; i < docVector.length; i++) { + result += Math.abs(docVector[i] - queryVector.get(i).floatValue()); + } + return result; + } + + @Override + public double l2Norm(float[] queryVector) { + return Math.sqrt(VectorUtil.squareDistance(docVector, queryVector)); + } + + @Override + public double l2Norm(List queryVector) { + double l2norm = 0; + for (int i = 0; i < docVector.length; i++) { + double diff = docVector[i] - queryVector.get(i).floatValue(); + l2norm += diff * diff; + } + return Math.sqrt(l2norm); + } + + @Override + public double cosineSimilarity(float[] queryVector, boolean normalizeQueryVector) { + if (normalizeQueryVector) { + return dotProduct(queryVector) / (DenseVector.getMagnitude(queryVector) * getMagnitude()); + } + + return dotProduct(queryVector) / getMagnitude(); + } + + @Override + public double cosineSimilarity(List queryVector) { + return dotProduct(queryVector) / (DenseVector.getMagnitude(queryVector) * getMagnitude()); + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public int getDims() { + return docVector.length; + } + + @Override + public int size() { + return 1; + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorDocValuesField.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorDocValuesField.java new file mode 100644 index 0000000000000..58b2e60a0fb80 --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorDocValuesField.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.index.VectorValues; +import org.elasticsearch.core.Nullable; + +import java.io.IOException; + +import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; + +public class KnnDenseVectorDocValuesField extends DenseVectorDocValuesField { + protected VectorValues input; // null if no vectors + protected float[] vector; + protected final int dims; + + public KnnDenseVectorDocValuesField(@Nullable VectorValues input, String name, int dims) { + super(name); + this.dims = dims; + this.input = input; + } + + @Override + public void setNextDocId(int docId) throws IOException { + if (input == null) { + return; + } + int currentDoc = input.docID(); + if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { + vector = null; + } else if (docId == currentDoc) { + vector = input.vectorValue(); + } else { + currentDoc = input.advance(docId); + if (currentDoc == docId) { + vector = input.vectorValue(); + } else { + vector = null; + } + } + } + + @Override + public DenseVectorScriptDocValues getScriptDocValues() { + return new DenseVectorScriptDocValues(this, dims); + } + + public boolean isEmpty() { + return vector == null; + } + + @Override + public DenseVector get() { + if (isEmpty()) { + return DenseVector.EMPTY; + } + + return new KnnDenseVector(vector); + } + + @Override + public DenseVector get(DenseVector defaultValue) { + if (isEmpty()) { + return defaultValue; + } + + return new KnnDenseVector(vector); + } + + @Override + public DenseVector getInternal() { + return get(null); + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValues.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValues.java deleted file mode 100644 index fc6f1bdb59906..0000000000000 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValues.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.vectors.query; - -import org.apache.lucene.index.VectorValues; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.VectorUtil; - -import java.io.IOException; -import java.util.Arrays; - -import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; - -public class KnnDenseVectorScriptDocValues extends DenseVectorScriptDocValues { - - public static class KnnDenseVectorSupplier implements DenseVectorSupplier { - - private final VectorValues in; - private float[] vector; - - public KnnDenseVectorSupplier(VectorValues in) { - this.in = in; - } - - @Override - public void setNextDocId(int docId) throws IOException { - int currentDoc = in.docID(); - if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { - vector = null; - } else if (docId == currentDoc) { - vector = in.vectorValue(); - } else { - currentDoc = in.advance(docId); - if (currentDoc == docId) { - vector = in.vectorValue(); - } else { - vector = null; - } - } - } - - @Override - public BytesRef getInternal(int index) { - throw new UnsupportedOperationException(); - } - - public float[] getInternal() { - return vector; - } - - @Override - public int size() { - if (vector == null) { - return 0; - } else { - return 1; - } - } - } - - private final KnnDenseVectorSupplier kdvSupplier; - - KnnDenseVectorScriptDocValues(KnnDenseVectorSupplier supplier, int dims) { - super(supplier, dims); - this.kdvSupplier = supplier; - } - - private float[] getVectorChecked() { - if (kdvSupplier.getInternal() == null) { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - return kdvSupplier.getInternal(); - } - - @Override - public float[] getVectorValue() { - float[] vector = getVectorChecked(); - // we need to copy the value, since {@link VectorValues} can reuse - // the underlying array across documents - return Arrays.copyOf(vector, vector.length); - } - - @Override - public float getMagnitude() { - float[] vector = getVectorChecked(); - double magnitude = 0.0f; - for (float elem : vector) { - magnitude += elem * elem; - } - return (float) Math.sqrt(magnitude); - } - - @Override - public double dotProduct(float[] queryVector) { - return VectorUtil.dotProduct(getVectorChecked(), queryVector); - } - - @Override - public double l1Norm(float[] queryVector) { - float[] vectorValue = getVectorChecked(); - double result = 0.0; - for (int i = 0; i < queryVector.length; i++) { - result += Math.abs(vectorValue[i] - queryVector[i]); - } - return result; - } - - @Override - public double l2Norm(float[] queryVector) { - return Math.sqrt(VectorUtil.squareDistance(getVectorValue(), queryVector)); - } - - @Override - public int size() { - return supplier.size(); - } -} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/ScoreScriptUtils.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/ScoreScriptUtils.java index e97daf4c2f397..24e74e4a93958 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/ScoreScriptUtils.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/ScoreScriptUtils.java @@ -18,10 +18,10 @@ public class ScoreScriptUtils { public static class DenseVectorFunction { final ScoreScript scoreScript; final float[] queryVector; - final DenseVectorScriptDocValues docValues; + final DenseVectorDocValuesField field; - public DenseVectorFunction(ScoreScript scoreScript, List queryVector, String field) { - this(scoreScript, queryVector, field, false); + public DenseVectorFunction(ScoreScript scoreScript, List queryVector, String fieldName) { + this(scoreScript, queryVector, fieldName, false); } /** @@ -31,19 +31,10 @@ public DenseVectorFunction(ScoreScript scoreScript, List queryVector, St * @param queryVector The query vector. * @param normalizeQuery Whether the provided query should be normalized to unit length. */ - public DenseVectorFunction(ScoreScript scoreScript, List queryVector, String field, boolean normalizeQuery) { + public DenseVectorFunction(ScoreScript scoreScript, List queryVector, String fieldName, boolean normalizeQuery) { this.scoreScript = scoreScript; - this.docValues = (DenseVectorScriptDocValues) scoreScript.getDoc().get(field); - - if (docValues.dims() != queryVector.size()) { - throw new IllegalArgumentException( - "The query vector has a different number of dimensions [" - + queryVector.size() - + "] than the document vectors [" - + docValues.dims() - + "]." - ); - } + this.field = (DenseVectorDocValuesField) scoreScript.field(fieldName); + DenseVector.checkDimensions(field.get().getDims(), queryVector.size()); this.queryVector = new float[queryVector.size()]; double queryMagnitude = 0.0; @@ -63,11 +54,11 @@ public DenseVectorFunction(ScoreScript scoreScript, List queryVector, St void setNextVector() { try { - docValues.getSupplier().setNextDocId(scoreScript._getDocId()); + field.setNextDocId(scoreScript._getDocId()); } catch (IOException e) { throw ExceptionsHelper.convertToElastic(e); } - if (docValues.size() == 0) { + if (field.isEmpty()) { throw new IllegalArgumentException("A document doesn't have a value for a vector field!"); } } @@ -82,7 +73,7 @@ public L1Norm(ScoreScript scoreScript, List queryVector, String field) { public double l1norm() { setNextVector(); - return docValues.l1Norm(queryVector); + return field.get().l1Norm(queryVector); } } @@ -95,7 +86,7 @@ public L2Norm(ScoreScript scoreScript, List queryVector, String field) { public double l2norm() { setNextVector(); - return docValues.l2Norm(queryVector); + return field.get().l2Norm(queryVector); } } @@ -108,7 +99,7 @@ public DotProduct(ScoreScript scoreScript, List queryVector, String fiel public double dotProduct() { setNextVector(); - return docValues.dotProduct(queryVector); + return field.get().dotProduct(queryVector); } } @@ -121,7 +112,8 @@ public CosineSimilarity(ScoreScript scoreScript, List queryVector, Strin public double cosineSimilarity() { setNextVector(); - return docValues.dotProduct(queryVector) / docValues.getMagnitude(); + // query vector normalized in constructor + return field.get().cosineSimilarity(queryVector, false); } } } diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVLeafFieldData.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVLeafFieldData.java index 1d8c45e9c60c2..a4789543ded43 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVLeafFieldData.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVLeafFieldData.java @@ -15,18 +15,12 @@ import org.elasticsearch.Version; import org.elasticsearch.index.fielddata.LeafFieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.script.field.DelegateDocValuesField; import org.elasticsearch.script.field.DocValuesField; -import org.elasticsearch.xpack.vectors.query.BinaryDenseVectorScriptDocValues.BinaryDenseVectorSupplier; -import org.elasticsearch.xpack.vectors.query.DenseVectorScriptDocValues.DenseVectorSupplier; -import org.elasticsearch.xpack.vectors.query.KnnDenseVectorScriptDocValues.KnnDenseVectorSupplier; import java.io.IOException; import java.util.Collection; import java.util.Collections; -import static org.elasticsearch.xpack.vectors.query.DenseVectorScriptDocValues.MISSING_VECTOR_FIELD_MESSAGE; - final class VectorDVLeafFieldData implements LeafFieldData { private final LeafReader reader; @@ -63,31 +57,15 @@ public DocValuesField getScriptField(String name) { try { if (indexed) { VectorValues values = reader.getVectorValues(field); - if (values == null || values == VectorValues.EMPTY) { - return new DelegateDocValuesField(DenseVectorScriptDocValues.empty(new DenseVectorSupplier() { - @Override - public float[] getInternal() { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public void setNextDocId(int docId) throws IOException { - // do nothing - } - - @Override - public int size() { - return 0; - } - }, dims), name); + if (values == VectorValues.EMPTY) { + // There's no way for KnnDenseVectorDocValuesField to reliably differentiate between VectorValues.EMPTY and + // values that can be iterated through. Since VectorValues.EMPTY throws on docID(), pass a null instead. + values = null; } - return new DelegateDocValuesField(new KnnDenseVectorScriptDocValues(new KnnDenseVectorSupplier(values), dims), name); + return new KnnDenseVectorDocValuesField(values, name, dims); } else { BinaryDocValues values = DocValues.getBinary(reader, field); - return new DelegateDocValuesField( - new BinaryDenseVectorScriptDocValues(new BinaryDenseVectorSupplier(values), indexVersion, dims), - name - ); + return new BinaryDenseVectorDocValuesField(values, name, dims, indexVersion); } } catch (IOException e) { throw new IllegalStateException("Cannot load doc values for vector field!", e); diff --git a/x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/whitelist.txt b/x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/org.elasticsearch.xpack.vectors.txt similarity index 52% rename from x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/whitelist.txt rename to x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/org.elasticsearch.xpack.vectors.txt index 86583d77264a2..bcf989933b04e 100644 --- a/x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/whitelist.txt +++ b/x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/org.elasticsearch.xpack.vectors.txt @@ -11,6 +11,43 @@ class org.elasticsearch.xpack.vectors.query.DenseVectorScriptDocValues { class org.elasticsearch.script.ScoreScript @no_import { } +class org.elasticsearch.xpack.vectors.query.DenseVector { + DenseVector EMPTY + float getMagnitude() + + # handle List and float[] arguments + double dotProduct(Object) + double l1Norm(Object) + double l2Norm(Object) + double cosineSimilarity(Object) + + float[] getVector() + boolean isEmpty() + int getDims() + int size() +} + +# implementation of DenseVector +class org.elasticsearch.xpack.vectors.query.BinaryDenseVector { +} + +# implementation of DenseVector +class org.elasticsearch.xpack.vectors.query.KnnDenseVector { +} + +class org.elasticsearch.xpack.vectors.query.DenseVectorDocValuesField { + DenseVector get() + DenseVector get(DenseVector) +} + +# implementation of DenseVectorDocValuesField +class org.elasticsearch.xpack.vectors.query.KnnDenseVectorDocValuesField { +} + +# implementation of DenseVectorDocValuesField +class org.elasticsearch.xpack.vectors.query.BinaryDenseVectorDocValuesField { +} + static_import { double l1norm(org.elasticsearch.script.ScoreScript, List, String) bound_to org.elasticsearch.xpack.vectors.query.ScoreScriptUtils$L1Norm double l2norm(org.elasticsearch.script.ScoreScript, List, String) bound_to org.elasticsearch.xpack.vectors.query.ScoreScriptUtils$L2Norm diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValuesTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValuesTests.java index 2761364e51505..ddd96ba9fd0a7 100644 --- a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValuesTests.java +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValuesTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.Version; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.vectors.mapper.VectorEncoderDecoder; -import org.elasticsearch.xpack.vectors.query.BinaryDenseVectorScriptDocValues.BinaryDenseVectorSupplier; import java.io.IOException; import java.nio.ByteBuffer; @@ -29,24 +28,56 @@ public void testGetVectorValueAndGetMagnitude() throws IOException { for (Version indexVersion : Arrays.asList(Version.V_7_4_0, Version.CURRENT)) { BinaryDocValues docValues = wrap(vectors, indexVersion); - BinaryDenseVectorSupplier supplier = new BinaryDenseVectorSupplier(docValues); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues(supplier, indexVersion, dims); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, indexVersion); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); for (int i = 0; i < vectors.length; i++) { - supplier.setNextDocId(i); + field.setNextDocId(i); + assertEquals(1, field.size()); + assertEquals(dims, scriptDocValues.dims()); assertArrayEquals(vectors[i], scriptDocValues.getVectorValue(), 0.0001f); assertEquals(expectedMagnitudes[i], scriptDocValues.getMagnitude(), 0.0001f); } } } + public void testMetadataAndIterator() throws IOException { + int dims = 3; + Version indexVersion = Version.CURRENT; + float[][] vectors = fill(new float[randomIntBetween(1, 5)][dims]); + BinaryDocValues docValues = wrap(vectors, indexVersion); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, indexVersion); + for (int i = 0; i < vectors.length; i++) { + field.setNextDocId(i); + DenseVector dv = field.get(); + assertEquals(1, dv.size()); + assertFalse(dv.isEmpty()); + assertEquals(dims, dv.getDims()); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, field::iterator); + assertEquals("Cannot iterate over single valued dense_vector field, use get() instead", e.getMessage()); + } + field.setNextDocId(vectors.length); + DenseVector dv = field.get(); + assertEquals(dv, DenseVector.EMPTY); + } + + protected float[][] fill(float[][] vectors) { + for (float[] vector : vectors) { + for (int i = 0; i < vector.length; i++) { + vector[i] = randomFloat(); + } + } + return vectors; + } + public void testMissingValues() throws IOException { int dims = 3; float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; BinaryDocValues docValues = wrap(vectors, Version.CURRENT); - BinaryDenseVectorSupplier supplier = new BinaryDenseVectorSupplier(docValues); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues(supplier, Version.CURRENT, dims); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, Version.CURRENT); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(3); + field.setNextDocId(3); + assertEquals(0, field.size()); Exception e = expectThrows(IllegalArgumentException.class, scriptDocValues::getVectorValue); assertEquals("A document doesn't have a value for a vector field!", e.getMessage()); @@ -58,12 +89,17 @@ public void testGetFunctionIsNotAccessible() throws IOException { int dims = 3; float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; BinaryDocValues docValues = wrap(vectors, Version.CURRENT); - BinaryDenseVectorSupplier supplier = new BinaryDenseVectorSupplier(docValues); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues(supplier, Version.CURRENT, dims); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, Version.CURRENT); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(0); + field.setNextDocId(0); Exception e = expectThrows(UnsupportedOperationException.class, () -> scriptDocValues.get(0)); - assertThat(e.getMessage(), containsString("accessing a vector field's value through 'get' or 'value' is not supported!")); + assertThat( + e.getMessage(), + containsString( + "accessing a vector field's value through 'get' or 'value' is not supported, use 'vectorValue' or 'magnitude' instead." + ) + ); } public void testSimilarityFunctions() throws IOException { @@ -73,10 +109,10 @@ public void testSimilarityFunctions() throws IOException { for (Version indexVersion : Arrays.asList(Version.V_7_4_0, Version.CURRENT)) { BinaryDocValues docValues = wrap(new float[][] { docVector }, indexVersion); - BinaryDenseVectorSupplier supplier = new BinaryDenseVectorSupplier(docValues); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues(supplier, Version.CURRENT, dims); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, indexVersion); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(0); + field.setNextDocId(0); assertEquals( "dotProduct result is not equal to the expected value!", @@ -133,7 +169,7 @@ public long cost() { }; } - private static BytesRef mockEncodeDenseVector(float[] values, Version indexVersion) { + static BytesRef mockEncodeDenseVector(float[] values, Version indexVersion) { byte[] bytes = indexVersion.onOrAfter(Version.V_7_5_0) ? new byte[VectorEncoderDecoder.INT_BYTES * values.length + VectorEncoderDecoder.INT_BYTES] : new byte[VectorEncoderDecoder.INT_BYTES * values.length]; diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorFunctionTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorFunctionTests.java index 0ecd26f08c20c..d40d7e3abd663 100644 --- a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorFunctionTests.java +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorFunctionTests.java @@ -7,18 +7,16 @@ package org.elasticsearch.xpack.vectors.query; -import org.apache.lucene.index.BinaryDocValues; import org.elasticsearch.Version; import org.elasticsearch.script.ScoreScript; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.vectors.query.BinaryDenseVectorScriptDocValues.BinaryDenseVectorSupplier; import org.elasticsearch.xpack.vectors.query.ScoreScriptUtils.CosineSimilarity; import org.elasticsearch.xpack.vectors.query.ScoreScriptUtils.DotProduct; import org.elasticsearch.xpack.vectors.query.ScoreScriptUtils.L1Norm; import org.elasticsearch.xpack.vectors.query.ScoreScriptUtils.L2Norm; +import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.function.Supplier; @@ -28,34 +26,72 @@ public class DenseVectorFunctionTests extends ESTestCase { - public void testVectorFunctions() { - String field = "vector"; + public void testVectorClassBindings() throws IOException { + String fieldName = "vector"; int dims = 5; float[] docVector = new float[] { 230.0f, 300.33f, -34.8988f, 15.555f, -200.0f }; List queryVector = Arrays.asList(0.5f, 111.3f, -13.0f, 14.8f, -156.0f); List invalidQueryVector = Arrays.asList(0.5, 111.3); - for (Version indexVersion : Arrays.asList(Version.V_7_4_0, Version.CURRENT)) { - BinaryDocValues docValues = BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, indexVersion); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues( - new BinaryDenseVectorSupplier(docValues), - indexVersion, - dims - ); + List fields = List.of( + new BinaryDenseVectorDocValuesField( + BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, Version.V_7_4_0), + "test", + dims, + Version.V_7_4_0 + ), + new BinaryDenseVectorDocValuesField( + BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, Version.CURRENT), + "test", + dims, + Version.CURRENT + ), + new KnnDenseVectorDocValuesField(KnnDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }), "test", dims) + ); + for (DenseVectorDocValuesField field : fields) { + field.setNextDocId(0); ScoreScript scoreScript = mock(ScoreScript.class); - when(scoreScript.getDoc()).thenReturn(Collections.singletonMap(field, scriptDocValues)); + when(scoreScript.field("vector")).thenAnswer(mock -> field); // Test cosine similarity explicitly, as it must perform special logic on top of the doc values - CosineSimilarity function = new CosineSimilarity(scoreScript, queryVector, field); - assertEquals("cosineSimilarity result is not equal to the expected value!", 0.790, function.cosineSimilarity(), 0.001); + CosineSimilarity function = new CosineSimilarity(scoreScript, queryVector, fieldName); + float cosineSimilarityExpected = 0.790f; + assertEquals( + "cosineSimilarity result is not equal to the expected value!", + cosineSimilarityExpected, + function.cosineSimilarity(), + 0.001 + ); + + // Test normalization for cosineSimilarity + float[] queryVectorArray = new float[queryVector.size()]; + for (int i = 0; i < queryVectorArray.length; i++) { + queryVectorArray[i] = queryVector.get(i).floatValue(); + } + assertEquals( + "cosineSimilarity result is not equal to the expected value!", + cosineSimilarityExpected, + field.getInternal().cosineSimilarity(queryVectorArray, true), + 0.001 + ); // Check each function rejects query vectors with the wrong dimension - assertDimensionMismatch(() -> new DotProduct(scoreScript, invalidQueryVector, field)); - assertDimensionMismatch(() -> new CosineSimilarity(scoreScript, invalidQueryVector, field)); - assertDimensionMismatch(() -> new L1Norm(scoreScript, invalidQueryVector, field)); - assertDimensionMismatch(() -> new L2Norm(scoreScript, invalidQueryVector, field)); + assertDimensionMismatch(() -> new DotProduct(scoreScript, invalidQueryVector, fieldName)); + assertDimensionMismatch(() -> new CosineSimilarity(scoreScript, invalidQueryVector, fieldName)); + assertDimensionMismatch(() -> new L1Norm(scoreScript, invalidQueryVector, fieldName)); + assertDimensionMismatch(() -> new L2Norm(scoreScript, invalidQueryVector, fieldName)); + + // Check scripting infrastructure integration + DotProduct dotProduct = new DotProduct(scoreScript, queryVector, fieldName); + assertEquals(65425.6249, dotProduct.dotProduct(), 0.001); + assertEquals(485.1837, new L1Norm(scoreScript, queryVector, fieldName).l1norm(), 0.001); + assertEquals(301.3614, new L2Norm(scoreScript, queryVector, fieldName).l2norm(), 0.001); + when(scoreScript._getDocId()).thenReturn(1); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, dotProduct::dotProduct); + assertEquals("A document doesn't have a value for a vector field!", e.getMessage()); } + } private void assertDimensionMismatch(Supplier supplier) { diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorTests.java new file mode 100644 index 0000000000000..11078e4964920 --- /dev/null +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorTests.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; + +public class DenseVectorTests extends ESTestCase { + public void testBadVectorType() { + DenseVector knn = new KnnDenseVector(new float[] { 1.0f, 2.0f, 3.5f }); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> knn.dotProduct(new HashMap<>())); + assertThat(e.getMessage(), containsString("Cannot use vector [")); + assertThat(e.getMessage(), containsString("] with class [java.util.HashMap] as query vector")); + + e = expectThrows(IllegalArgumentException.class, () -> knn.l1Norm(new HashMap<>())); + assertThat(e.getMessage(), containsString("Cannot use vector [")); + assertThat(e.getMessage(), containsString("] with class [java.util.HashMap] as query vector")); + + e = expectThrows(IllegalArgumentException.class, () -> knn.l2Norm(new HashMap<>())); + assertThat(e.getMessage(), containsString("Cannot use vector [")); + assertThat(e.getMessage(), containsString("] with class [java.util.HashMap] as query vector")); + + e = expectThrows(IllegalArgumentException.class, () -> knn.cosineSimilarity(new HashMap<>())); + assertThat(e.getMessage(), containsString("Cannot use vector [")); + assertThat(e.getMessage(), containsString("] with class [java.util.HashMap] as query vector")); + } + + public void testFloatVsListQueryVector() { + int dims = randomIntBetween(1, 16); + float[] docVector = new float[dims]; + float[] arrayQV = new float[dims]; + List listQV = new ArrayList<>(dims); + for (int i = 0; i < docVector.length; i++) { + docVector[i] = randomFloat(); + float q = randomFloat(); + arrayQV[i] = q; + listQV.add(q); + } + + KnnDenseVector knn = new KnnDenseVector(docVector); + assertEquals(knn.dotProduct(arrayQV), knn.dotProduct(listQV), 0.001f); + assertEquals(knn.dotProduct((Object) listQV), knn.dotProduct((Object) arrayQV), 0.001f); + + assertEquals(knn.l1Norm(arrayQV), knn.l1Norm(listQV), 0.001f); + assertEquals(knn.l1Norm((Object) listQV), knn.l1Norm((Object) arrayQV), 0.001f); + + assertEquals(knn.l2Norm(arrayQV), knn.l2Norm(listQV), 0.001f); + assertEquals(knn.l2Norm((Object) listQV), knn.l2Norm((Object) arrayQV), 0.001f); + + assertEquals(knn.cosineSimilarity(arrayQV), knn.cosineSimilarity(listQV), 0.001f); + assertEquals(knn.cosineSimilarity((Object) listQV), knn.cosineSimilarity((Object) arrayQV), 0.001f); + + for (Version indexVersion : Arrays.asList(Version.V_7_4_0, Version.CURRENT)) { + BytesRef value = BinaryDenseVectorScriptDocValuesTests.mockEncodeDenseVector(docVector, indexVersion); + BinaryDenseVector bdv = new BinaryDenseVector(value, dims, indexVersion); + + assertEquals(bdv.dotProduct(arrayQV), bdv.dotProduct(listQV), 0.001f); + assertEquals(bdv.dotProduct((Object) listQV), bdv.dotProduct((Object) arrayQV), 0.001f); + + assertEquals(bdv.l1Norm(arrayQV), bdv.l1Norm(listQV), 0.001f); + assertEquals(bdv.l1Norm((Object) listQV), bdv.l1Norm((Object) arrayQV), 0.001f); + + assertEquals(bdv.l2Norm(arrayQV), bdv.l2Norm(listQV), 0.001f); + assertEquals(bdv.l2Norm((Object) listQV), bdv.l2Norm((Object) arrayQV), 0.001f); + + assertEquals(bdv.cosineSimilarity(arrayQV), bdv.cosineSimilarity(listQV), 0.001f); + assertEquals(bdv.cosineSimilarity((Object) listQV), bdv.cosineSimilarity((Object) arrayQV), 0.001f); + } + } + +} diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValuesTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValuesTests.java index 7005e4d7bd531..743fc2d8bb63e 100644 --- a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValuesTests.java +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValuesTests.java @@ -10,7 +10,6 @@ import org.apache.lucene.index.VectorValues; import org.apache.lucene.util.BytesRef; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.vectors.query.KnnDenseVectorScriptDocValues.KnnDenseVectorSupplier; import java.io.IOException; @@ -23,22 +22,52 @@ public void testGetVectorValueAndGetMagnitude() throws IOException { float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; float[] expectedMagnitudes = { 1.7320f, 2.4495f, 3.3166f }; - KnnDenseVectorSupplier supplier = new KnnDenseVectorSupplier(wrap(vectors)); - DenseVectorScriptDocValues scriptDocValues = new KnnDenseVectorScriptDocValues(supplier, dims); + DenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(vectors), "test", dims); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); for (int i = 0; i < vectors.length; i++) { - supplier.setNextDocId(i); + field.setNextDocId(i); + assertEquals(1, field.size()); + assertEquals(dims, scriptDocValues.dims()); assertArrayEquals(vectors[i], scriptDocValues.getVectorValue(), 0.0001f); assertEquals(expectedMagnitudes[i], scriptDocValues.getMagnitude(), 0.0001f); } } + public void testMetadataAndIterator() throws IOException { + int dims = 3; + float[][] vectors = fill(new float[randomIntBetween(1, 5)][dims]); + KnnDenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(vectors), "test", dims); + for (int i = 0; i < vectors.length; i++) { + field.setNextDocId(i); + DenseVector dv = field.get(); + assertEquals(1, dv.size()); + assertFalse(dv.isEmpty()); + assertEquals(dims, dv.getDims()); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, field::iterator); + assertEquals("Cannot iterate over single valued dense_vector field, use get() instead", e.getMessage()); + } + assertEquals(1, field.size()); + field.setNextDocId(vectors.length); + DenseVector dv = field.get(); + assertEquals(dv, DenseVector.EMPTY); + } + + protected float[][] fill(float[][] vectors) { + for (float[] vector : vectors) { + for (int i = 0; i < vector.length; i++) { + vector[i] = randomFloat(); + } + } + return vectors; + } + public void testMissingValues() throws IOException { int dims = 3; float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; - KnnDenseVectorSupplier supplier = new KnnDenseVectorSupplier(wrap(vectors)); - DenseVectorScriptDocValues scriptDocValues = new KnnDenseVectorScriptDocValues(supplier, dims); + DenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(vectors), "test", dims); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(3); + field.setNextDocId(3); Exception e = expectThrows(IllegalArgumentException.class, () -> scriptDocValues.getVectorValue()); assertEquals("A document doesn't have a value for a vector field!", e.getMessage()); @@ -49,12 +78,17 @@ public void testMissingValues() throws IOException { public void testGetFunctionIsNotAccessible() throws IOException { int dims = 3; float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; - KnnDenseVectorSupplier supplier = new KnnDenseVectorSupplier(wrap(vectors)); - DenseVectorScriptDocValues scriptDocValues = new KnnDenseVectorScriptDocValues(supplier, dims); + DenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(vectors), "test", dims); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(0); + field.setNextDocId(0); Exception e = expectThrows(UnsupportedOperationException.class, () -> scriptDocValues.get(0)); - assertThat(e.getMessage(), containsString("accessing a vector field's value through 'get' or 'value' is not supported!")); + assertThat( + e.getMessage(), + containsString( + "accessing a vector field's value through 'get' or 'value' is not supported, use 'vectorValue' or 'magnitude' instead." + ) + ); } public void testSimilarityFunctions() throws IOException { @@ -62,16 +96,30 @@ public void testSimilarityFunctions() throws IOException { float[] docVector = new float[] { 230.0f, 300.33f, -34.8988f, 15.555f, -200.0f }; float[] queryVector = new float[] { 0.5f, 111.3f, -13.0f, 14.8f, -156.0f }; - KnnDenseVectorSupplier supplier = new KnnDenseVectorSupplier(wrap(new float[][] { docVector })); - DenseVectorScriptDocValues scriptDocValues = new KnnDenseVectorScriptDocValues(supplier, dims); - supplier.setNextDocId(0); + DenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(new float[][] { docVector }), "test", dims); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); + field.setNextDocId(0); assertEquals("dotProduct result is not equal to the expected value!", 65425.624, scriptDocValues.dotProduct(queryVector), 0.001); assertEquals("l1norm result is not equal to the expected value!", 485.184, scriptDocValues.l1Norm(queryVector), 0.001); assertEquals("l2norm result is not equal to the expected value!", 301.361, scriptDocValues.l2Norm(queryVector), 0.001); } - private static VectorValues wrap(float[][] vectors) { + public void testMissingVectorValues() throws IOException { + int dims = 7; + KnnDenseVectorDocValuesField emptyKnn = new KnnDenseVectorDocValuesField(null, "test", dims); + + emptyKnn.setNextDocId(0); + assertEquals(0, emptyKnn.getScriptDocValues().size()); + assertTrue(emptyKnn.getScriptDocValues().isEmpty()); + assertEquals(DenseVector.EMPTY, emptyKnn.get()); + assertNull(emptyKnn.get(null)); + assertNull(emptyKnn.getInternal()); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, emptyKnn::iterator); + assertEquals("Cannot iterate over single valued dense_vector field, use get() instead", e.getMessage()); + } + + static VectorValues wrap(float[][] vectors) { return new VectorValues() { int index = 0; From 01eba38ee397fdde48c124f74b9157289beebb18 Mon Sep 17 00:00:00 2001 From: Przemko Robakowski Date: Wed, 16 Feb 2022 14:37:45 +0100 Subject: [PATCH 127/167] Fix GeoIpDownloader startup during rolling upgrade (#84000) If rolling upgrade was used from version prior GeoIPv2 (<`7.14`) then geoip downloader wouldn't be started so no new databases were downloaded. This is especially troubling in `8.x` as we no longer provide default databases inside ES so after upgrade no geoip enrichment can take place until downloader is started with workaround (setting `ingest.geoip.downloader.enabled` to `false` and `true` again). This is because logic that was used to lower number of requests / cluster update listeners at the startup was too optimistic about order of actions / who can be elected master at what time. This change fixes that and also cleans up logs when there are some ignorable errors and adds debug logging on start and stop of the task to ease up troubleshooting. It also adds rolling upgrade test to make sure the fix works. --- docs/changelog/84000.yaml | 5 +++ .../ingest/geoip/GeoIpDownloader.java | 9 +++- .../geoip/GeoIpDownloaderTaskExecutor.java | 43 ++++++++++++------- x-pack/qa/rolling-upgrade/build.gradle | 7 +++ .../upgrades/GeoIpUpgradeIT.java | 33 ++++++++++++++ 5 files changed, 81 insertions(+), 16 deletions(-) create mode 100644 docs/changelog/84000.yaml create mode 100644 x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/GeoIpUpgradeIT.java diff --git a/docs/changelog/84000.yaml b/docs/changelog/84000.yaml new file mode 100644 index 0000000000000..b24d357834e21 --- /dev/null +++ b/docs/changelog/84000.yaml @@ -0,0 +1,5 @@ +pr: 84000 +summary: Fix `GeoIpDownloader` startup during rolling upgrade +area: Ingest +type: bug +issues: [] diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java index 4d1c594ab7b7c..5ec08891981f6 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java @@ -66,9 +66,15 @@ public class GeoIpDownloader extends AllocatedPersistentTask { Property.Dynamic, Property.NodeScope ); + + // for overriding in tests + private static final String DEFAULT_ENDPOINT = System.getProperty( + "ingest.geoip.downloader.endpoint.default", + "https://geoip.elastic.co/v1/database" + ); public static final Setting ENDPOINT_SETTING = Setting.simpleString( "ingest.geoip.downloader.endpoint", - "https://geoip.elastic.co/v1/database", + DEFAULT_ENDPOINT, Property.NodeScope ); @@ -258,6 +264,7 @@ void runDownloader() { try { updateDatabases(); } catch (Exception e) { + stats = stats.failedDownload(); logger.error("exception during geoip databases update", e); } try { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index 16cb86953003e..9d65b17bacc5e 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -12,11 +12,13 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -29,6 +31,7 @@ import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.RemoteTransportException; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; @@ -128,14 +131,18 @@ public void clusterChanged(ClusterChangedEvent event) { // wait for state recovered return; } - // bootstrap downloader after first cluster start + + DiscoveryNode masterNode = event.state().nodes().getMasterNode(); + if (masterNode == null || masterNode.getVersion().before(Version.V_7_14_0)) { + // wait for master to be upgraded so it understands geoip task + return; + } + clusterService.removeListener(this); - if (event.localNodeMaster()) { - if (ENABLED_SETTING.get(event.state().getMetadata().settings(), settings)) { - startTask(() -> clusterService.addListener(this)); - } else { - stopTask(() -> clusterService.addListener(this)); - } + if (ENABLED_SETTING.get(event.state().getMetadata().settings(), settings)) { + startTask(() -> clusterService.addListener(this)); + } else { + stopTask(() -> clusterService.addListener(this)); } } @@ -144,8 +151,9 @@ private void startTask(Runnable onFailure) { GEOIP_DOWNLOADER, GEOIP_DOWNLOADER, new GeoIpTaskParams(), - ActionListener.wrap(r -> {}, e -> { - if (e instanceof ResourceAlreadyExistsException == false) { + ActionListener.wrap(r -> logger.debug("Started geoip downloader task"), e -> { + Throwable t = e instanceof RemoteTransportException ? e.getCause() : e; + if (t instanceof ResourceAlreadyExistsException == false) { logger.error("failed to create geoip downloader task", e); onFailure.run(); } @@ -154,18 +162,23 @@ private void startTask(Runnable onFailure) { } private void stopTask(Runnable onFailure) { - ActionListener> listener = ActionListener.wrap(r -> {}, e -> { - if (e instanceof ResourceNotFoundException == false) { - logger.error("failed to remove geoip downloader task", e); - onFailure.run(); + ActionListener> listener = ActionListener.wrap( + r -> logger.debug("Stopped geoip downloader task"), + e -> { + Throwable t = e instanceof RemoteTransportException ? e.getCause() : e; + if (t instanceof ResourceNotFoundException == false) { + logger.error("failed to remove geoip downloader task", e); + onFailure.run(); + } } - }); + ); persistentTasksService.sendRemoveRequest( GEOIP_DOWNLOADER, ActionListener.runAfter( listener, () -> client.admin().indices().prepareDelete(DATABASES_INDEX).execute(ActionListener.wrap(rr -> {}, e -> { - if (e instanceof ResourceNotFoundException == false) { + Throwable t = e instanceof RemoteTransportException ? e.getCause() : e; + if (t instanceof ResourceNotFoundException == false) { logger.warn("failed to remove " + DATABASES_INDEX, e); } })) diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 93a9a99ce3e3f..a6db46c9d0d10 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -41,6 +41,13 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> versions = [oldVersion, project.version] numberOfNodes = 3 + systemProperty 'ingest.geoip.downloader.enabled.default', 'true' + //we don't want to hit real service from each test + systemProperty 'ingest.geoip.downloader.endpoint.default', 'http://invalid.endpoint' + if (bwcVersion.onOrAfter('7.14.0')) { + setting 'ingest.geoip.downloader.endpoint', 'http://invalid.endpoint' + } + setting 'repositories.url.allowed_urls', 'http://snapshot.test*' setting 'path.repo', "['${buildDir}/cluster/shared/repo/${baseName}', '${searchableSnapshotRepository}']" setting 'xpack.license.self_generated.type', 'trial' diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/GeoIpUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/GeoIpUpgradeIT.java new file mode 100644 index 0000000000000..3dedd041d6465 --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/GeoIpUpgradeIT.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.upgrades; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.hamcrest.Matchers; + +import java.nio.charset.StandardCharsets; + +public class GeoIpUpgradeIT extends AbstractUpgradeTestCase { + + public void testGeoIpDownloader() throws Exception { + if (CLUSTER_TYPE == ClusterType.UPGRADED) { + assertBusy(() -> { + Response response = client().performRequest(new Request("GET", "_cat/tasks")); + String tasks = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); + assertThat(tasks, Matchers.containsString("geoip-downloader")); + }); + assertBusy(() -> { + Response response = client().performRequest(new Request("GET", "_ingest/geoip/stats")); + String tasks = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); + assertThat(tasks, Matchers.containsString("failed_downloads\":1")); + }); + } + } +} From 6b904d79cb18a809268cd079a04210a960750685 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Wed, 16 Feb 2022 15:52:20 +0100 Subject: [PATCH 128/167] [docs] Mention JDK 17 in the Contributing docs (#84018) ES 8+ requires JDK 17 to be built --- CONTRIBUTING.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fc9935ca69794..287b28c5718e1 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -112,11 +112,11 @@ Contributing to the Elasticsearch codebase **Repository:** [https://github.com/elastic/elasticsearch](https://github.com/elastic/elasticsearch) -JDK 16 is required to build Elasticsearch. You must have a JDK 16 installation +JDK 17 is required to build Elasticsearch. You must have a JDK 17 installation with the environment variable `JAVA_HOME` referencing the path to Java home for -your JDK 16 installation. By default, tests use the same runtime as `JAVA_HOME`. +your JDK 17 installation. By default, tests use the same runtime as `JAVA_HOME`. However, since Elasticsearch supports JDK 11, the build supports compiling with -JDK 16 and testing on a JDK 11 runtime; to do this, set `RUNTIME_JAVA_HOME` +JDK 17 and testing on a JDK 11 runtime; to do this, set `RUNTIME_JAVA_HOME` pointing to the Java home of a JDK 11 installation. Note that this mechanism can be used to test against other JDKs as well, this is not only limited to JDK 11. @@ -151,9 +151,9 @@ and then run `curl` in another window like this: ### Importing the project into IntelliJ IDEA The minimum IntelliJ IDEA version required to import the Elasticsearch project is 2020.1 -Elasticsearch builds using Java 16. When importing into IntelliJ you will need +Elasticsearch builds using Java 17. When importing into IntelliJ you will need to define an appropriate SDK. The convention is that **this SDK should be named -"16"** so that the project import will detect it automatically. For more details +"17"** so that the project import will detect it automatically. For more details on defining an SDK in IntelliJ please refer to [their documentation](https://www.jetbrains.com/help/idea/sdk.html#define-sdk). SDK definitions are global, so you can add the JDK from any project, or after project import. Importing with a missing JDK will still work, IntelliJ will From ed40e1e0c2cee4926b7ffcb697175f7171cc0fc5 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Wed, 16 Feb 2022 11:42:41 -0500 Subject: [PATCH 129/167] [ML] fix NER token grouping when special tokens are used (#84042) bug Introduced by #83835 This switches back our token tagging to take into account the tokens position when reconstituting and tagging tokens for NER. --- .../xpack/ml/inference/nlp/NerProcessor.java | 2 +- .../ml/inference/nlp/NerProcessorTests.java | 44 +++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java index 1aa9ce8e6b0f6..e8c7253d3c5d2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java @@ -229,7 +229,7 @@ static List tagTokens(TokenizationResult.Tokens tokenization, doubl int startTokenIndex = 0; int numSpecialTokens = 0; while (startTokenIndex < tokenization.tokenIds().length) { - int inputMapping = tokenization.tokenIds()[startTokenIndex]; + int inputMapping = tokenization.tokenMap()[startTokenIndex]; if (inputMapping < 0) { // This token does not map to a token in the input (special tokens) startTokenIndex++; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java index baafecf85c30a..feedcf7a7d537 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java @@ -100,6 +100,50 @@ public void testProcessResults_GivenNoTokens() { assertThat(e, instanceOf(ElasticsearchStatusException.class)); } + public void testProcessResultsWithSpecialTokens() { + NerProcessor.NerResultProcessor processor = new NerProcessor.NerResultProcessor(NerProcessor.IobTag.values(), null, true); + BertTokenizer tokenizer = BertTokenizer.builder( + List.of( + "el", + "##astic", + "##search", + "many", + "use", + "in", + "london", + BertTokenizer.PAD_TOKEN, + BertTokenizer.UNKNOWN_TOKEN, + BertTokenizer.SEPARATOR_TOKEN, + BertTokenizer.CLASS_TOKEN + ), + new BertTokenization(true, true, null, Tokenization.Truncate.NONE) + ).build(); + TokenizationResult tokenization = tokenizer.buildTokenizationResult( + List.of(tokenizer.tokenize("Many use Elasticsearch in London", Tokenization.Truncate.NONE)) + ); + + double[][][] scores = { + { + { 7, 0, 0, 0, 0, 0, 0, 0, 0 }, // cls + { 7, 0, 0, 0, 0, 0, 0, 0, 0 }, // many + { 7, 0, 0, 0, 0, 0, 0, 0, 0 }, // use + { 0.01, 0.01, 0, 0.01, 0, 7, 0, 3, 0 }, // el + { 0.01, 0.01, 0, 0, 0, 0, 0, 0, 0 }, // ##astic + { 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // ##search + { 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // in + { 0, 0, 0, 0, 0, 0, 0, 6, 0 }, // london + { 7, 0, 0, 0, 0, 0, 0, 0, 0 } // sep + } }; + NerResults result = (NerResults) processor.processResult(tokenization, new PyTorchInferenceResult("1", scores, 1L, null)); + + assertThat(result.getAnnotatedResult(), equalTo("Many use [Elasticsearch](ORG&Elasticsearch) in [London](LOC&London)")); + assertThat(result.getEntityGroups().size(), equalTo(2)); + assertThat(result.getEntityGroups().get(0).getEntity(), equalTo("elasticsearch")); + assertThat(result.getEntityGroups().get(0).getClassName(), equalTo(NerProcessor.Entity.ORG.toString())); + assertThat(result.getEntityGroups().get(1).getEntity(), equalTo("london")); + assertThat(result.getEntityGroups().get(1).getClassName(), equalTo(NerProcessor.Entity.LOC.toString())); + } + public void testProcessResults() { NerProcessor.NerResultProcessor processor = new NerProcessor.NerResultProcessor(NerProcessor.IobTag.values(), null, true); TokenizationResult tokenization = tokenize( From 900a0136182745fb29b8133ae76282516f0e50bf Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 16 Feb 2022 09:07:35 -0800 Subject: [PATCH 130/167] Make intake BWC testing a matrix job for better parallelization --- .ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc.yml b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc.yml index ab5f17d60b933..c1d7881d787be 100644 --- a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc.yml +++ b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc.yml @@ -1,7 +1,9 @@ --- -jjbb-template: generic-gradle-unix.yml +jjbb-template: matrix-gradle-unix.yml vars: - job-name: elastic+elasticsearch+%BRANCH%+intake+multijob+bwc - job-display-name: "elastic / elasticsearch # %BRANCH% - intake bwc" - job-description: Elasticsearch %BRANCH% branch intake backwards compatibility checks. - - gradle-args: "-Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files bwcTestSnapshots" + - matrix-yaml-file: ".ci/snapshotBwcVersions" + - matrix-variable: BWC_VERSION + - gradle-args: "-Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files v$BWC_VERSION#bwcTest" From a438708b2930bec84cc310353cd117deee14e115 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 16 Feb 2022 09:23:08 -0800 Subject: [PATCH 131/167] Rename BWC intake job so that old job is removed --- ... => elastic+elasticsearch+intake+multijob+bwc-snapshots.yml} | 2 +- .ci/jobs.t/elastic+elasticsearch+intake.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) rename .ci/jobs.t/{elastic+elasticsearch+intake+multijob+bwc.yml => elastic+elasticsearch+intake+multijob+bwc-snapshots.yml} (97%) diff --git a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc.yml b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc-snapshots.yml similarity index 97% rename from .ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc.yml rename to .ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc-snapshots.yml index c1d7881d787be..70509792a9d8c 100644 --- a/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc.yml +++ b/.ci/jobs.t/elastic+elasticsearch+intake+multijob+bwc-snapshots.yml @@ -1,7 +1,7 @@ --- jjbb-template: matrix-gradle-unix.yml vars: - - job-name: elastic+elasticsearch+%BRANCH%+intake+multijob+bwc + - job-name: elastic+elasticsearch+%BRANCH%+intake+multijob+bwc-snapshots - job-display-name: "elastic / elasticsearch # %BRANCH% - intake bwc" - job-description: Elasticsearch %BRANCH% branch intake backwards compatibility checks. - matrix-yaml-file: ".ci/snapshotBwcVersions" diff --git a/.ci/jobs.t/elastic+elasticsearch+intake.yml b/.ci/jobs.t/elastic+elasticsearch+intake.yml index 6da6161c56763..645408771b70a 100644 --- a/.ci/jobs.t/elastic+elasticsearch+intake.yml +++ b/.ci/jobs.t/elastic+elasticsearch+intake.yml @@ -42,7 +42,7 @@ kill-phase-on: NEVER current-parameters: true git-revision: true - - name: elastic+elasticsearch+%BRANCH%+intake+multijob+bwc + - name: elastic+elasticsearch+%BRANCH%+intake+multijob+bwc-snapshots kill-phase-on: NEVER current-parameters: true git-revision: true From 68a149072a14d415ee55e9cd1800c19d27542480 Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Wed, 16 Feb 2022 10:56:05 -0800 Subject: [PATCH 132/167] Make pull request BWC testing a matrix job for better parallelization --- ...asticsearch+pull-request+bwc-snapshots.yml} | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) rename .ci/jobs.t/{elastic+elasticsearch+pull-request+bwc.yml => elastic+elasticsearch+pull-request+bwc-snapshots.yml} (74%) diff --git a/.ci/jobs.t/elastic+elasticsearch+pull-request+bwc.yml b/.ci/jobs.t/elastic+elasticsearch+pull-request+bwc-snapshots.yml similarity index 74% rename from .ci/jobs.t/elastic+elasticsearch+pull-request+bwc.yml rename to .ci/jobs.t/elastic+elasticsearch+pull-request+bwc-snapshots.yml index 606d906556c64..2194bd986a891 100644 --- a/.ci/jobs.t/elastic+elasticsearch+pull-request+bwc.yml +++ b/.ci/jobs.t/elastic+elasticsearch+pull-request+bwc-snapshots.yml @@ -1,9 +1,11 @@ --- - job: - name: "elastic+elasticsearch+pull-request+bwc" + name: "elastic+elasticsearch+pull-request+bwc-snapshots" display-name: "elastic / elasticsearch - pull request bwc" description: "Testing of Elasticsearch pull requests - bwc" - workspace: "/dev/shm/elastic+elasticsearch+pull-request+bwc" + project-type: matrix + node: master + child-workspace: "/dev/shm/elastic+elasticsearch+pull-request+bwc" scm: - git: refspec: "+refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*" @@ -23,6 +25,16 @@ black-list-labels: - '>test-mute' - 'test-full-bwc' + axes: + - axis: + type: slave + name: nodes + values: + - "general-purpose" + - axis: + type: yaml + filename: ".ci/snapshotBwcVersions" + name: "BWC_VERSION" builders: - inject: properties-file: '.ci/java-versions.properties' @@ -34,4 +46,4 @@ JAVA16_HOME=$HOME/.java/openjdk16 - shell: | #!/usr/local/bin/runbld --redirect-stderr - $WORKSPACE/.ci/scripts/run-gradle.sh -Dignore.tests.seed bwcTestSnapshots + $WORKSPACE/.ci/scripts/run-gradle.sh -Dignore.tests.seed v$BWC_VERSION#bwcTest From 458ef91066f966cbf37e72ab41368019367004c2 Mon Sep 17 00:00:00 2001 From: Lisa Cawley Date: Wed, 16 Feb 2022 11:23:00 -0800 Subject: [PATCH 133/167] [DOCS] Move ML info and upgrade APIs (#84005) --- .../migrate_8_0/rest-api-changes.asciidoc | 2 +- .../ml/anomaly-detection/apis/index.asciidoc | 4 +--- .../{ml-apis.asciidoc => ml-ad-apis.asciidoc} | 15 ++------------- .../apis/get-ml-info.asciidoc | 0 docs/reference/ml/common/apis/index.asciidoc | 6 ++++++ .../reference/ml/common/apis/ml-apis.asciidoc | 19 +++++++++++++++++++ .../apis/set-upgrade-mode.asciidoc | 0 docs/reference/rest-api/index.asciidoc | 4 +++- 8 files changed, 32 insertions(+), 18 deletions(-) rename docs/reference/ml/anomaly-detection/apis/{ml-apis.asciidoc => ml-ad-apis.asciidoc} (91%) rename docs/reference/ml/{anomaly-detection => common}/apis/get-ml-info.asciidoc (100%) create mode 100644 docs/reference/ml/common/apis/index.asciidoc create mode 100644 docs/reference/ml/common/apis/ml-apis.asciidoc rename docs/reference/ml/{anomaly-detection => common}/apis/set-upgrade-mode.asciidoc (100%) diff --git a/docs/reference/migration/migrate_8_0/rest-api-changes.asciidoc b/docs/reference/migration/migrate_8_0/rest-api-changes.asciidoc index dad6c988a6995..a3f7e256e7c4d 100644 --- a/docs/reference/migration/migrate_8_0/rest-api-changes.asciidoc +++ b/docs/reference/migration/migrate_8_0/rest-api-changes.asciidoc @@ -679,7 +679,7 @@ The {ml} {ref}/ml-post-data.html[post data to jobs API] is deprecated starting i and will be removed in a future major version. *Impact* + -Use {ref}/ml-apis.html#ml-api-datafeed-endpoint[{dfeeds}] instead. +Use {ref}/ml-ad-apis.html#ml-api-datafeed-endpoint[{dfeeds}] instead. ==== .The `job_id` property of the Update {dfeeds} API has been removed. diff --git a/docs/reference/ml/anomaly-detection/apis/index.asciidoc b/docs/reference/ml/anomaly-detection/apis/index.asciidoc index 4603a7cd4aa04..3f8ab5a454b0a 100644 --- a/docs/reference/ml/anomaly-detection/apis/index.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/index.asciidoc @@ -1,4 +1,4 @@ -include::ml-apis.asciidoc[leveloffset=+1] +include::ml-ad-apis.asciidoc[leveloffset=+1] //ADD include::post-calendar-event.asciidoc[leveloffset=+2] include::put-calendar-job.asciidoc[leveloffset=+2] @@ -34,7 +34,6 @@ include::get-datafeed-stats.asciidoc[leveloffset=+2] include::get-influencer.asciidoc[leveloffset=+2] include::get-job.asciidoc[leveloffset=+2] include::get-job-stats.asciidoc[leveloffset=+2] -include::get-ml-info.asciidoc[leveloffset=+2] include::get-snapshot.asciidoc[leveloffset=+2] include::get-job-model-snapshot-upgrade-stats.asciidoc[leveloffset=+2] include::get-overall-buckets.asciidoc[leveloffset=+2] @@ -52,7 +51,6 @@ include::reset-job.asciidoc[leveloffset=+2] //REVERT include::revert-snapshot.asciidoc[leveloffset=+2] //SET/START/STOP -include::set-upgrade-mode.asciidoc[leveloffset=+2] include::start-datafeed.asciidoc[leveloffset=+2] include::stop-datafeed.asciidoc[leveloffset=+2] //UPDATE diff --git a/docs/reference/ml/anomaly-detection/apis/ml-apis.asciidoc b/docs/reference/ml/anomaly-detection/apis/ml-ad-apis.asciidoc similarity index 91% rename from docs/reference/ml/anomaly-detection/apis/ml-apis.asciidoc rename to docs/reference/ml/anomaly-detection/apis/ml-ad-apis.asciidoc index d44395b66046c..856232c933432 100644 --- a/docs/reference/ml/anomaly-detection/apis/ml-apis.asciidoc +++ b/docs/reference/ml/anomaly-detection/apis/ml-ad-apis.asciidoc @@ -1,10 +1,10 @@ [role="xpack"] -[[ml-apis]] +[[ml-ad-apis]] = {ml-cap} {anomaly-detect} APIs You can use the following APIs to perform {ml} {anomaly-detect} activities. -See also <>. +See also <>, <>, <>. [discrete] [[ml-api-anomaly-job-endpoint]] @@ -70,20 +70,9 @@ See also <>. * <> * <> -[discrete] -[[ml-api-ml-info-endpoint]] -== Info - -* <> - [discrete] [[ml-api-delete-expired-data-endpoint]] == Delete expired data * <> -[discrete] -[[ml-set-upgrade-mode-endpoint]] -== Set upgrade mode - -* <> diff --git a/docs/reference/ml/anomaly-detection/apis/get-ml-info.asciidoc b/docs/reference/ml/common/apis/get-ml-info.asciidoc similarity index 100% rename from docs/reference/ml/anomaly-detection/apis/get-ml-info.asciidoc rename to docs/reference/ml/common/apis/get-ml-info.asciidoc diff --git a/docs/reference/ml/common/apis/index.asciidoc b/docs/reference/ml/common/apis/index.asciidoc new file mode 100644 index 0000000000000..e7dfc8d437169 --- /dev/null +++ b/docs/reference/ml/common/apis/index.asciidoc @@ -0,0 +1,6 @@ +include::ml-apis.asciidoc[leveloffset=+1] +//GET +include::get-ml-info.asciidoc[leveloffset=+2] +//SET +include::set-upgrade-mode.asciidoc[leveloffset=+2] + diff --git a/docs/reference/ml/common/apis/ml-apis.asciidoc b/docs/reference/ml/common/apis/ml-apis.asciidoc new file mode 100644 index 0000000000000..c4a24e2e6a59b --- /dev/null +++ b/docs/reference/ml/common/apis/ml-apis.asciidoc @@ -0,0 +1,19 @@ +[role="xpack"] +[[ml-apis]] += {ml-cap} APIs + +You can use the following APIs to retrieve information related to the {stack-ml-features}. + +See also <>, <>, and <>. + +[discrete] +[[ml-api-ml-info-endpoint]] +== Info + +* <> + +[discrete] +[[ml-set-upgrade-mode-endpoint]] +== Set upgrade mode + +* <> diff --git a/docs/reference/ml/anomaly-detection/apis/set-upgrade-mode.asciidoc b/docs/reference/ml/common/apis/set-upgrade-mode.asciidoc similarity index 100% rename from docs/reference/ml/anomaly-detection/apis/set-upgrade-mode.asciidoc rename to docs/reference/ml/common/apis/set-upgrade-mode.asciidoc diff --git a/docs/reference/rest-api/index.asciidoc b/docs/reference/rest-api/index.asciidoc index 532b814ad5619..04f014b75054b 100644 --- a/docs/reference/rest-api/index.asciidoc +++ b/docs/reference/rest-api/index.asciidoc @@ -31,7 +31,8 @@ not be included yet. * <> * <> * <> -* <> +* <> +* <> * <> * <> * <> @@ -72,6 +73,7 @@ include::{es-repo-dir}/ingest/apis/index.asciidoc[] include::info.asciidoc[] include::{es-repo-dir}/licensing/index.asciidoc[] include::{xes-repo-dir}/rest-api/logstash/index.asciidoc[] +include::{es-repo-dir}/ml/common/apis/index.asciidoc[] include::{es-repo-dir}/ml/anomaly-detection/apis/index.asciidoc[] include::{es-repo-dir}/ml/df-analytics/apis/index.asciidoc[] include::{es-repo-dir}/ml/trained-models/apis/index.asciidoc[] From dd3397b2c85ef9e23ca0b3dd70bdd6db8420e0b0 Mon Sep 17 00:00:00 2001 From: James Baiera Date: Wed, 16 Feb 2022 15:47:18 -0500 Subject: [PATCH 134/167] Unmute BWC rest tests (#84058) This PR unmutes some tests that were muted for backporting #83290 --- .../resources/rest-api-spec/test/cat.snapshots/10_basic.yml | 4 ---- .../resources/rest-api-spec/test/snapshot.clone/10_basic.yml | 3 --- .../rest-api-spec/test/snapshot.create/10_basic.yml | 3 --- .../resources/rest-api-spec/test/snapshot.get/10_basic.yml | 5 +---- .../test/snapshot.get_repository/20_repository_uuid.yml | 5 ----- .../rest-api-spec/test/snapshot.restore/10_basic.yml | 3 --- .../rest-api-spec/test/snapshot.status/10_basic.yml | 3 --- .../resources/rest-api-spec/test/tsdb/30_snapshot.yml | 4 ---- 8 files changed, 1 insertion(+), 29 deletions(-) diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.snapshots/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.snapshots/10_basic.yml index 23860cb412722..f7d60671c7e88 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.snapshots/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.snapshots/10_basic.yml @@ -23,10 +23,6 @@ $/ --- "Test cat snapshots output": - - skip: - version: " - 8.1.99" - reason: "Pause BWC tests until #83290 is backported" - - do: snapshot.create_repository: repository: test_cat_snapshots_1 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.clone/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.clone/10_basic.yml index 80e7139cd8df3..fb289355e08fb 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.clone/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.clone/10_basic.yml @@ -1,8 +1,5 @@ --- setup: - - skip: - version: " - 8.1.99" - reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.create/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.create/10_basic.yml index e060e7dff5bda..f7c522b712244 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.create/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.create/10_basic.yml @@ -1,8 +1,5 @@ --- setup: - - skip: - version: " - 8.1.99" - reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get/10_basic.yml index 08753e4e732bf..b50ece87e9f88 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get/10_basic.yml @@ -1,8 +1,5 @@ --- setup: - - skip: - version: " - 8.1.99" - reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: @@ -64,7 +61,6 @@ setup: --- "Get snapshot info when verbose is false": - - do: indices.create: index: test_index @@ -202,6 +198,7 @@ setup: - skip: version: " - 7.12.99" reason: "Introduced in 7.13.0" + - do: indices.create: index: test_index diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get_repository/20_repository_uuid.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get_repository/20_repository_uuid.yml index 503c6cc7133de..0532d208d0cba 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get_repository/20_repository_uuid.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get_repository/20_repository_uuid.yml @@ -1,9 +1,4 @@ --- -setup: - - skip: - version: " - 8.1.99" - reason: "Pause BWC tests until #83290 is backported" ---- "Get repository returns UUID": - skip: version: " - 7.12.99" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.restore/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.restore/10_basic.yml index e91f38e985e43..1ea5b542625e8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.restore/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.restore/10_basic.yml @@ -1,8 +1,5 @@ --- setup: - - skip: - version: " - 8.1.99" - reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.status/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.status/10_basic.yml index 2c4573ccd58b8..c35f2419bdc91 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.status/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.status/10_basic.yml @@ -1,8 +1,5 @@ --- setup: - - skip: - version: " - 8.1.99" - reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml index 39c6dd4345bdf..104b383ae811f 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml @@ -1,9 +1,5 @@ --- setup: - - skip: - version: " - 8.1.99" - reason: "Pause BWC tests until #83290 is backported" - - do: snapshot.create_repository: repository: test_repo From b001a6fe12b96717a3406ceccc6cf139bdf4686c Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Wed, 16 Feb 2022 15:25:25 -0600 Subject: [PATCH 135/167] Adding a warning if node.attr.data is set (#84050) This adds a warning-level deprecation if a user has set the node.attr.data setting, since it is a sign that they are trying to create a hot/warm setup in the way that is no longer supported. Closes #83800 --- .../xpack/deprecation/DeprecationChecks.java | 3 ++- .../deprecation/NodeDeprecationChecks.java | 16 ++++++++++++++++ .../deprecation/NodeDeprecationChecksTests.java | 17 +++++++++++++++++ 3 files changed, 35 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java index b0869c2b41eeb..28548d71932fc 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java @@ -82,7 +82,8 @@ private DeprecationChecks() {} NodeDeprecationChecks::checkScriptContextCacheExpirationSetting, NodeDeprecationChecks::checkEnforceDefaultTierPreferenceSetting, NodeDeprecationChecks::checkLifecyleStepMasterTimeoutSetting, - NodeDeprecationChecks::checkEqlEnabledSetting + NodeDeprecationChecks::checkEqlEnabledSetting, + NodeDeprecationChecks::checkNodeAttrData ); static List> INDEX_SETTINGS_CHECKS = List.of( diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java index ab3230cd0baaf..40be68851a765 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java @@ -616,4 +616,20 @@ static DeprecationIssue checkEqlEnabledSetting(final Settings settings, final Pl ); } + static DeprecationIssue checkNodeAttrData(final Settings settings, final PluginsAndModules pluginsAndModules) { + String nodeAttrDataValue = settings.get("node.attr.data"); + if (nodeAttrDataValue == null) { + return null; + } + return new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Setting node.attributes.data is not recommended", + "https://ela.st/es-deprecation-7-node-attr-data-setting", + "One or more of your nodes is configured with node.attributes.data settings. This is typically used to create a " + + "hot/warm or tiered architecture, based on legacy guidelines. Data tiers are a recommended replacement for tiered " + + "architecture clusters.", + false, + null + ); + } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java index 3a004dc4da692..124d2f19ee62b 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java @@ -675,4 +675,21 @@ public void testEqlEnabledSetting() { ) ); } + + public void testCheckNodeAttrData() { + Settings settings = Settings.builder().put("node.attr.data", randomAlphaOfLength(randomIntBetween(4, 20))).build(); + final PluginsAndModules pluginsAndModules = new PluginsAndModules(Collections.emptyList(), Collections.emptyList()); + final List issues = getDeprecationIssues(settings, pluginsAndModules); + final DeprecationIssue expected = new DeprecationIssue( + DeprecationIssue.Level.WARNING, + "Setting node.attributes.data is not recommended", + "https://ela.st/es-deprecation-7-node-attr-data-setting", + "One or more of your nodes is configured with node.attributes.data settings. This is typically used to create a " + + "hot/warm or tiered architecture, based on legacy guidelines. Data tiers are a recommended replacement for tiered " + + "architecture clusters.", + false, + null + ); + assertThat(issues, hasItem(expected)); + } } From 476240e208a65d941f004d3597ddc3874b3d2b4f Mon Sep 17 00:00:00 2001 From: Albert Zaharovits Date: Thu, 17 Feb 2022 00:00:59 +0200 Subject: [PATCH 136/167] Security global privilege for writing profile data of applications (#83728) This PR adds a new global privilege which can be used to restrict writes for user profile data. The privilege is configurable for the names of the top level keys in the profile data maps (`data` and `access`), which by convetion are "application" names. Lastly it adds such a privilege, for the `kibana-*` application namespace, to the `kibana_system` built-in role. Eg: ``` { "global": { "application": { "manage": { "applications": [...] } }, "profile": { "write": { "applications": [...] } } } } ``` Notes: * for every role there can be only one list of application names for the write profile privilege, and the list does not support excludes (and it supports wildcards) * there is no validation that the privilege refers to valid application names (eg empty application name) --- docs/changelog/83728.yaml | 5 + .../authorization/built-in-roles.asciidoc | 9 +- .../authorization/managing-roles.asciidoc | 21 +- .../xpack/core/XPackClientPlugin.java | 5 + .../profile/UpdateProfileDataRequest.java | 4 +- .../core/security/authz/RoleDescriptor.java | 22 +- .../ConfigurableClusterPrivilege.java | 3 +- .../ConfigurableClusterPrivileges.java | 142 ++++++++- .../authz/store/ReservedRolesStore.java | 8 +- .../action/role/PutRoleRequestTests.java | 33 ++- .../ConfigurableClusterPrivilegesTests.java | 14 +- .../ManageApplicationPrivilegesTests.java | 2 +- .../WriteProfileDataPrivilegesTests.java | 270 ++++++++++++++++++ .../authz/store/ReservedRolesStoreTests.java | 87 +++++- .../user/RestGetUserPrivilegesAction.java | 8 +- .../support/SecuritySystemIndices.java | 21 ++ .../audit/logfile/LoggingAuditTrailTests.java | 16 +- .../security/authz/RoleDescriptorTests.java | 167 ++++++++++- .../RestGetUserPrivilegesActionTests.java | 14 +- .../security/audit/logfile/audited_roles.txt | 4 +- 20 files changed, 796 insertions(+), 59 deletions(-) create mode 100644 docs/changelog/83728.yaml create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/WriteProfileDataPrivilegesTests.java diff --git a/docs/changelog/83728.yaml b/docs/changelog/83728.yaml new file mode 100644 index 0000000000000..c8f78a5497e1d --- /dev/null +++ b/docs/changelog/83728.yaml @@ -0,0 +1,5 @@ +pr: 83728 +summary: Security global privilege for updating profile data of applications +area: Authorization +type: enhancement +issues: [] diff --git a/x-pack/docs/en/security/authorization/built-in-roles.asciidoc b/x-pack/docs/en/security/authorization/built-in-roles.asciidoc index 4f589d50bafb4..ad317e276d960 100644 --- a/x-pack/docs/en/security/authorization/built-in-roles.asciidoc +++ b/x-pack/docs/en/security/authorization/built-in-roles.asciidoc @@ -84,8 +84,11 @@ This role does not have access to editing tools in {kib}. [[built-in-roles-kibana-system]] `kibana_system` :: Grants access necessary for the {kib} system user to read from and write to the {kib} indices, manage index templates and tokens, and check the availability of -the {es} cluster. This role grants read access to the `.monitoring-*` indices -and read and write access to the `.reporting-*` indices. For more information, +the {es} cluster. It also permits +<>, +as well as updating user profile data for the `kibana-*` namespace. +This role grants read access to the `.monitoring-*` indices and read and write +access to the `.reporting-*` indices. For more information, see {kibana-ref}/using-kibana-with-security.html[Configuring Security in {kib}]. + NOTE: This role should not be assigned to users as the granted permissions may @@ -172,7 +175,7 @@ Grants full access to cluster management and data indices. This role also grants direct read-only access to restricted indices like `.security`. A user with the `superuser` role can <> any other user in the system. + -On {ecloud}, all standard users, including those with the `superuser` role are +On {ecloud}, all standard users, including those with the `superuser` role are restricted from performing <> actions. + IMPORTANT: This role can manage security and create roles with unlimited privileges. diff --git a/x-pack/docs/en/security/authorization/managing-roles.asciidoc b/x-pack/docs/en/security/authorization/managing-roles.asciidoc index 19ffea585bc44..49f068ad7bfd1 100644 --- a/x-pack/docs/en/security/authorization/managing-roles.asciidoc +++ b/x-pack/docs/en/security/authorization/managing-roles.asciidoc @@ -101,25 +101,32 @@ multiple data streams, indices, and aliases. [[roles-global-priv]] ==== Global Privileges -The following describes the structure of a global privileges entry: +The following describes the structure of the global privileges entry: [source,js] ------- { "application": { "manage": { <1> - "applications": [ ... ] <2> + "applications": [ ... ] <2> + } + }, + "profile": { + "write": { <3> + "applications": [ ... ] <4> } } } ------- // NOTCONSOLE -<1> The only supported global privilege is the ability to manage application - privileges +<1> The privilege for the ability to manage application privileges <2> The list of application names that may be managed. This list supports wildcards (e.g. `"myapp-*"`) and regular expressions (e.g. `"/app[0-9]*/"`) +<3> The privilege for the ability to write the `access` and `data` of any user profile +<4> The list of names, wildcards and regular expressions to which the write +privilege is restricted to [[roles-application-priv]] ==== Application Privileges @@ -195,7 +202,7 @@ see <>. === Role management UI You can manage users and roles easily in {kib}. To -manage roles, log in to {kib} and go to *Management / Security / Roles*. +manage roles, log in to {kib} and go to *Management / Security / Roles*. [discrete] [[roles-management-api]] @@ -203,8 +210,8 @@ manage roles, log in to {kib} and go to *Management / Security / Roles*. The _Role Management APIs_ enable you to add, update, remove and retrieve roles dynamically. When you use the APIs to manage roles in the `native` realm, the -roles are stored in an internal {es} index. For more information and examples, -see <>. +roles are stored in an internal {es} index. For more information and examples, +see <>. [discrete] [[roles-management-file]] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 4bd7ce835dcdb..3bdb76c15ac35 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -445,6 +445,11 @@ public List getNamedWriteables() { ConfigurableClusterPrivileges.ManageApplicationPrivileges.WRITEABLE_NAME, ConfigurableClusterPrivileges.ManageApplicationPrivileges::createFrom ), + new NamedWriteableRegistry.Entry( + ConfigurableClusterPrivilege.class, + ConfigurableClusterPrivileges.WriteProfileDataPrivileges.WRITEABLE_NAME, + ConfigurableClusterPrivileges.WriteProfileDataPrivileges::createFrom + ), // security : role-mappings new NamedWriteableRegistry.Entry(RoleMapperExpression.class, AllExpression.NAME, AllExpression::new), new NamedWriteableRegistry.Entry(RoleMapperExpression.class, AnyExpression.NAME, AnyExpression::new), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/UpdateProfileDataRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/UpdateProfileDataRequest.java index 10072f4c51a36..d007239755539 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/UpdateProfileDataRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/profile/UpdateProfileDataRequest.java @@ -81,7 +81,7 @@ public RefreshPolicy getRefreshPolicy() { return refreshPolicy; } - public Set applicationNames() { + public Set getApplicationNames() { final Set names = new HashSet<>(access.keySet()); names.addAll(data.keySet()); return Set.copyOf(names); @@ -90,7 +90,7 @@ public Set applicationNames() { @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; - final Set applicationNames = applicationNames(); + final Set applicationNames = getApplicationNames(); if (applicationNames.isEmpty()) { validationException = addValidationError("update request is empty", validationException); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index efbf987637aff..63d7027cbe78f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -37,6 +37,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Objects; @@ -110,9 +111,7 @@ public RoleDescriptor( ) { this.name = name; this.clusterPrivileges = clusterPrivileges != null ? clusterPrivileges : Strings.EMPTY_ARRAY; - this.configurableClusterPrivileges = configurableClusterPrivileges != null - ? configurableClusterPrivileges - : ConfigurableClusterPrivileges.EMPTY_ARRAY; + this.configurableClusterPrivileges = sortConfigurableClusterPrivileges(configurableClusterPrivileges); this.indicesPrivileges = indicesPrivileges != null ? indicesPrivileges : IndicesPrivileges.NONE; this.applicationPrivileges = applicationPrivileges != null ? applicationPrivileges : ApplicationResourcePrivileges.NONE; this.runAs = runAs != null ? runAs : Strings.EMPTY_ARRAY; @@ -669,6 +668,23 @@ private static RoleDescriptor.IndicesPrivileges parseIndex(String roleName, XCon .build(); } + private static ConfigurableClusterPrivilege[] sortConfigurableClusterPrivileges( + ConfigurableClusterPrivilege[] configurableClusterPrivileges + ) { + if (null == configurableClusterPrivileges) { + return ConfigurableClusterPrivileges.EMPTY_ARRAY; + } else if (configurableClusterPrivileges.length < 2) { + return configurableClusterPrivileges; + } else { + ConfigurableClusterPrivilege[] configurableClusterPrivilegesCopy = Arrays.copyOf( + configurableClusterPrivileges, + configurableClusterPrivileges.length + ); + Arrays.sort(configurableClusterPrivilegesCopy, Comparator.comparingInt(o -> o.getCategory().ordinal())); + return configurableClusterPrivilegesCopy; + } + } + private static void checkIfExceptFieldsIsSubsetOfGrantedFields(String roleName, String[] grantedFields, String[] deniedFields) { try { FieldPermissions.buildPermittedFieldsAutomaton(grantedFields, deniedFields); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilege.java index ad5c7c436ac74..f9722ca42f20d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilege.java @@ -40,7 +40,8 @@ public interface ConfigurableClusterPrivilege extends NamedWriteable, ToXContent * from the categories. */ enum Category { - APPLICATION(new ParseField("application")); + APPLICATION(new ParseField("application")), + PROFILE(new ParseField("profile")); public final ParseField field; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java index 82f7a6062013a..8b46d97d8c843 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivileges.java @@ -18,6 +18,8 @@ import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.security.action.privilege.ApplicationPrivilegesRequest; +import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; +import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataRequest; import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege.Category; import org.elasticsearch.xpack.core.security.support.StringMatcher; @@ -30,6 +32,7 @@ import java.util.Collections; import java.util.LinkedHashSet; import java.util.List; +import java.util.Map; import java.util.Set; import java.util.function.Predicate; @@ -94,13 +97,25 @@ public static List parse(XContentParser parser) th while (parser.nextToken() != XContentParser.Token.END_OBJECT) { expectedToken(parser.currentToken(), parser, XContentParser.Token.FIELD_NAME); - expectFieldName(parser, Category.APPLICATION.field); - expectedToken(parser.nextToken(), parser, XContentParser.Token.START_OBJECT); - expectedToken(parser.nextToken(), parser, XContentParser.Token.FIELD_NAME); + expectFieldName(parser, Category.APPLICATION.field, Category.PROFILE.field); + if (Category.APPLICATION.field.match(parser.currentName(), parser.getDeprecationHandler())) { + expectedToken(parser.nextToken(), parser, XContentParser.Token.START_OBJECT); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + expectedToken(parser.currentToken(), parser, XContentParser.Token.FIELD_NAME); - expectFieldName(parser, ManageApplicationPrivileges.Fields.MANAGE); - privileges.add(ManageApplicationPrivileges.parse(parser)); - expectedToken(parser.nextToken(), parser, XContentParser.Token.END_OBJECT); + expectFieldName(parser, ManageApplicationPrivileges.Fields.MANAGE); + privileges.add(ManageApplicationPrivileges.parse(parser)); + } + } else { + assert Category.PROFILE.field.match(parser.currentName(), parser.getDeprecationHandler()); + expectedToken(parser.nextToken(), parser, XContentParser.Token.START_OBJECT); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + expectedToken(parser.currentToken(), parser, XContentParser.Token.FIELD_NAME); + + expectFieldName(parser, WriteProfileDataPrivileges.Fields.WRITE); + privileges.add(WriteProfileDataPrivileges.parse(parser)); + } + } } return privileges; @@ -131,6 +146,114 @@ private static void expectFieldName(XContentParser parser, ParseField... fields) } } + /** + * The {@link WriteProfileDataPrivileges} privilege is a {@link ConfigurableClusterPrivilege} that grants the + * ability to write the {@code data} and {@code access} sections of any user profile. + * The privilege is namespace configurable such that only specific top-level keys in the {@code data} and {@code access} + * dictionary permit writes (wildcards and regexps are supported, but exclusions are not). + */ + public static class WriteProfileDataPrivileges implements ConfigurableClusterPrivilege { + public static final String WRITEABLE_NAME = "write-profile-data-privileges"; + + private final Set applicationNames; + private final Predicate applicationPredicate; + private final Predicate requestPredicate; + + public WriteProfileDataPrivileges(Set applicationNames) { + this.applicationNames = Collections.unmodifiableSet(applicationNames); + this.applicationPredicate = StringMatcher.of(applicationNames); + this.requestPredicate = request -> { + if (request instanceof final UpdateProfileDataRequest updateProfileRequest) { + assert null == updateProfileRequest.validate(); + final Collection requestApplicationNames = updateProfileRequest.getApplicationNames(); + return requestApplicationNames.stream().allMatch(application -> applicationPredicate.test(application)); + } + return false; + }; + } + + @Override + public Category getCategory() { + return Category.PROFILE; + } + + public Collection getApplicationNames() { + return this.applicationNames; + } + + @Override + public String getWriteableName() { + return WRITEABLE_NAME; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeCollection(this.applicationNames, StreamOutput::writeString); + } + + public static WriteProfileDataPrivileges createFrom(StreamInput in) throws IOException { + final Set applications = in.readSet(StreamInput::readString); + return new WriteProfileDataPrivileges(applications); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.field(Fields.WRITE.getPreferredName(), Map.of(Fields.APPLICATIONS.getPreferredName(), applicationNames)); + } + + public static WriteProfileDataPrivileges parse(XContentParser parser) throws IOException { + expectedToken(parser.currentToken(), parser, XContentParser.Token.FIELD_NAME); + expectFieldName(parser, Fields.WRITE); + expectedToken(parser.nextToken(), parser, XContentParser.Token.START_OBJECT); + expectedToken(parser.nextToken(), parser, XContentParser.Token.FIELD_NAME); + expectFieldName(parser, Fields.APPLICATIONS); + expectedToken(parser.nextToken(), parser, XContentParser.Token.START_ARRAY); + final String[] applications = XContentUtils.readStringArray(parser, false); + expectedToken(parser.nextToken(), parser, XContentParser.Token.END_OBJECT); + return new WriteProfileDataPrivileges(new LinkedHashSet<>(Arrays.asList(applications))); + } + + @Override + public String toString() { + return "{" + + getCategory() + + ":" + + Fields.WRITE.getPreferredName() + + ":" + + Fields.APPLICATIONS.getPreferredName() + + "=" + + Strings.collectionToDelimitedString(applicationNames, ",") + + "}"; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + final WriteProfileDataPrivileges that = (WriteProfileDataPrivileges) o; + return this.applicationNames.equals(that.applicationNames); + } + + @Override + public int hashCode() { + return applicationNames.hashCode(); + } + + @Override + public ClusterPermission.Builder buildPermission(ClusterPermission.Builder builder) { + return builder.add(this, Set.of(UpdateProfileDataAction.NAME), requestPredicate); + } + + private interface Fields { + ParseField WRITE = new ParseField("write"); + ParseField APPLICATIONS = new ParseField("applications"); + } + } + /** * The {@code ManageApplicationPrivileges} privilege is a {@link ConfigurableClusterPrivilege} that grants the * ability to execute actions related to the management of application privileges (Get, Put, Delete) for a subset @@ -164,7 +287,7 @@ public Category getCategory() { } public Collection getApplicationNames() { - return Collections.unmodifiableCollection(this.applicationNames); + return this.applicationNames; } @Override @@ -184,10 +307,7 @@ public static ManageApplicationPrivileges createFrom(StreamInput in) throws IOEx @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.field( - Fields.MANAGE.getPreferredName(), - Collections.singletonMap(Fields.APPLICATIONS.getPreferredName(), applicationNames) - ); + return builder.field(Fields.MANAGE.getPreferredName(), Map.of(Fields.APPLICATIONS.getPreferredName(), applicationNames)); } public static ManageApplicationPrivileges parse(XContentParser parser) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 3e7fe85c132bd..e336c1ee32460 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -22,10 +22,10 @@ import org.elasticsearch.xpack.core.security.action.privilege.GetBuiltinPrivilegesAction; import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileAction; import org.elasticsearch.xpack.core.security.action.profile.GetProfileAction; -import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges.ManageApplicationPrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges.WriteProfileDataPrivileges; import org.elasticsearch.xpack.core.security.support.MetadataUtils; import org.elasticsearch.xpack.core.security.user.KibanaSystemUser; import org.elasticsearch.xpack.core.security.user.UsernamesField; @@ -667,8 +667,6 @@ public static RoleDescriptor kibanaSystemRoleDescriptor(String name) { "delegate_pki", GetProfileAction.NAME, ActivateProfileAction.NAME, - // TODO: this cluster action will be replaced with a special privilege that grants write access to a subset of namespaces - UpdateProfileDataAction.NAME, // To facilitate ML UI functionality being controlled using Kibana security privileges "manage_ml", // The symbolic constant for this one is in SecurityActionMapper, so not accessible from X-Pack core @@ -780,7 +778,9 @@ public static RoleDescriptor kibanaSystemRoleDescriptor(String name) { .privileges("create_index", "delete_index", "read", "index") .build(), }, null, - new ConfigurableClusterPrivilege[] { new ManageApplicationPrivileges(Collections.singleton("kibana-*")) }, + new ConfigurableClusterPrivilege[] { + new ManageApplicationPrivileges(Set.of("kibana-*")), + new WriteProfileDataPrivileges(Set.of("kibana-*")) }, null, MetadataUtils.DEFAULT_RESERVED_METADATA, null diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java index 00d5f3d9aa69f..9ed8c2e2e99ca 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor.ApplicationResourcePrivileges; +import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; import java.io.IOException; @@ -183,10 +184,34 @@ private PutRoleRequest buildRandomRequest() { .build(); } request.addApplicationPrivileges(applicationPrivileges); - - if (randomBoolean()) { - final String[] appNames = randomArray(1, 4, String[]::new, stringWithInitialLowercase); - request.conditionalCluster(new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Sets.newHashSet(appNames))); + switch (randomIntBetween(0, 3)) { + case 0: + request.conditionalCluster(new ConfigurableClusterPrivilege[0]); + break; + case 1: + request.conditionalCluster( + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(randomArray(0, 3, String[]::new, stringWithInitialLowercase)) + ) + ); + break; + case 2: + request.conditionalCluster( + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(randomArray(0, 3, String[]::new, stringWithInitialLowercase)) + ) + ); + break; + case 3: + request.conditionalCluster( + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(randomArray(0, 3, String[]::new, stringWithInitialLowercase)) + ), + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(randomArray(0, 3, String[]::new, stringWithInitialLowercase)) + ) + ); + break; } request.runAs(generateRandomStringArray(4, 3, false, true)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilegesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilegesTests.java index 034a035f12cbf..9b7443080639d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilegesTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ConfigurableClusterPrivilegesTests.java @@ -62,10 +62,14 @@ public void testGenerateAndParseXContent() throws Exception { } private ConfigurableClusterPrivilege[] buildSecurityPrivileges() { - return buildSecurityPrivileges(randomIntBetween(4, 7)); - } - - private ConfigurableClusterPrivilege[] buildSecurityPrivileges(int applicationNameLength) { - return new ConfigurableClusterPrivilege[] { ManageApplicationPrivilegesTests.buildPrivileges(applicationNameLength) }; + return switch (randomIntBetween(0, 3)) { + case 0 -> new ConfigurableClusterPrivilege[0]; + case 1 -> new ConfigurableClusterPrivilege[] { ManageApplicationPrivilegesTests.buildPrivileges() }; + case 2 -> new ConfigurableClusterPrivilege[] { WriteProfileDataPrivilegesTests.buildPrivileges() }; + case 3 -> new ConfigurableClusterPrivilege[] { + ManageApplicationPrivilegesTests.buildPrivileges(), + WriteProfileDataPrivilegesTests.buildPrivileges() }; + default -> throw new IllegalStateException("Unexpected value"); + }; } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java index e396460e88f79..421b76d089c40 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/ManageApplicationPrivilegesTests.java @@ -149,7 +149,7 @@ private ManageApplicationPrivileges clone(ManageApplicationPrivileges original) return new ManageApplicationPrivileges(new LinkedHashSet<>(original.getApplicationNames())); } - private ManageApplicationPrivileges buildPrivileges() { + static ManageApplicationPrivileges buildPrivileges() { return buildPrivileges(randomIntBetween(4, 7)); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/WriteProfileDataPrivilegesTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/WriteProfileDataPrivilegesTests.java new file mode 100644 index 0000000000000..8c045e001f2b2 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/privilege/WriteProfileDataPrivilegesTests.java @@ -0,0 +1,270 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.authz.privilege; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.EqualsHashCodeTestUtils; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.XContent; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParseException; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.XPackClientPlugin; +import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.GetProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesAction; +import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; +import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataRequest; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authz.permission.ClusterPermission; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xcontent.DeprecationHandler.THROW_UNSUPPORTED_OPERATION; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; +import static org.mockito.Mockito.mock; + +public class WriteProfileDataPrivilegesTests extends ESTestCase { + + public void testSerialization() throws Exception { + final ConfigurableClusterPrivileges.WriteProfileDataPrivileges original = buildPrivileges(); + try (BytesStreamOutput out = new BytesStreamOutput()) { + original.writeTo(out); + final NamedWriteableRegistry registry = new NamedWriteableRegistry(new XPackClientPlugin().getNamedWriteables()); + try (StreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), registry)) { + final ConfigurableClusterPrivileges.WriteProfileDataPrivileges copy = + ConfigurableClusterPrivileges.WriteProfileDataPrivileges.createFrom(in); + assertThat(copy, equalTo(original)); + assertThat(original, equalTo(copy)); + } + } + } + + public void testGenerateAndParseXContent() throws Exception { + final XContent xContent = randomFrom(XContentType.values()).xContent(); + try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { + final XContentBuilder builder = new XContentBuilder(xContent, out); + + final ConfigurableClusterPrivileges.WriteProfileDataPrivileges original = buildPrivileges(); + builder.startObject(); + original.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + builder.flush(); + + final byte[] bytes = out.toByteArray(); + try (XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, THROW_UNSUPPORTED_OPERATION, bytes)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + final ConfigurableClusterPrivileges.WriteProfileDataPrivileges clone = + ConfigurableClusterPrivileges.WriteProfileDataPrivileges.parse(parser); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + + assertThat(clone, equalTo(original)); + assertThat(original, equalTo(clone)); + } + } + } + + public void testActionAndRequestPredicate() { + final String prefix = randomAlphaOfLengthBetween(0, 3); + final String name = randomAlphaOfLengthBetween(0, 5); + String other = randomAlphaOfLengthBetween(0, 7); + if (other.startsWith(prefix) || other.equals(name)) { + other = null; + } + final ConfigurableClusterPrivileges.WriteProfileDataPrivileges writeProfileDataPrivileges = + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(Sets.newHashSet(prefix + "*", name)); + final ClusterPermission writeProfileDataPermission = writeProfileDataPrivileges.buildPermission(ClusterPermission.builder()) + .build(); + assertThat(writeProfileDataPermission, notNullValue()); + + final Authentication authentication = mock(Authentication.class); + // request application name matches privilege wildcard + UpdateProfileDataRequest updateProfileDataRequest = randomBoolean() + ? newUpdateProfileDataRequest(Set.of(prefix + randomAlphaOfLengthBetween(0, 2)), Set.of()) + : newUpdateProfileDataRequest(Set.of(), Set.of(prefix + randomAlphaOfLengthBetween(0, 2))); + assertTrue( + writeProfileDataPermission.check("cluster:admin/xpack/security/profile/put/data", updateProfileDataRequest, authentication) + ); + // request application name matches privilege name + updateProfileDataRequest = randomBoolean() + ? newUpdateProfileDataRequest(Set.of(name), Set.of()) + : newUpdateProfileDataRequest(Set.of(), Set.of(name)); + assertTrue( + writeProfileDataPermission.check("cluster:admin/xpack/security/profile/put/data", updateProfileDataRequest, authentication) + ); + // different action name + assertFalse( + writeProfileDataPermission.check( + randomFrom(ActivateProfileAction.NAME, GetProfileAction.NAME, SearchProfilesAction.NAME), + updateProfileDataRequest, + authentication + ) + ); + if (other != null) { + updateProfileDataRequest = randomBoolean() + ? newUpdateProfileDataRequest( + randomBoolean() ? Set.of(prefix + randomAlphaOfLengthBetween(0, 2), other) : Set.of(other), + Set.of() + ) + : newUpdateProfileDataRequest( + Set.of(), + randomBoolean() ? Set.of(prefix + randomAlphaOfLengthBetween(0, 2), other) : Set.of(other) + ); + assertFalse(writeProfileDataPermission.check(UpdateProfileDataAction.NAME, updateProfileDataRequest, authentication)); + updateProfileDataRequest = randomBoolean() + ? newUpdateProfileDataRequest(randomBoolean() ? Set.of(name, other) : Set.of(other), Set.of()) + : newUpdateProfileDataRequest(Set.of(), randomBoolean() ? Set.of(name, other) : Set.of(other)); + assertFalse(writeProfileDataPermission.check(UpdateProfileDataAction.NAME, updateProfileDataRequest, authentication)); + } + assertFalse(writeProfileDataPermission.check(UpdateProfileDataAction.NAME, mock(TransportRequest.class), authentication)); + } + + public void testParseAbnormals() throws Exception { + final String nullApplications = "{\"write\":{\"applications\":null}}"; + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new ByteArrayInputStream(nullApplications.getBytes(StandardCharsets.UTF_8)) + ) + ) { + parser.nextToken(); // { + parser.nextToken(); // "write" field + expectThrows(XContentParseException.class, () -> ConfigurableClusterPrivileges.WriteProfileDataPrivileges.parse(parser)); + parser.nextToken(); + } + final String emptyApplications = "{\"write\":{\"applications\":[]}}"; + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new ByteArrayInputStream(emptyApplications.getBytes(StandardCharsets.UTF_8)) + ) + ) { + parser.nextToken(); // { + parser.nextToken(); // "write" field + ConfigurableClusterPrivileges.WriteProfileDataPrivileges priv = ConfigurableClusterPrivileges.WriteProfileDataPrivileges.parse( + parser + ); + parser.nextToken(); + assertThat(priv.getApplicationNames().size(), is(0)); + UpdateProfileDataRequest updateProfileDataRequest = randomBoolean() + ? newUpdateProfileDataRequest(Set.of(randomAlphaOfLengthBetween(0, 2)), Set.of()) + : newUpdateProfileDataRequest(Set.of(), Set.of(randomAlphaOfLengthBetween(0, 2))); + ClusterPermission perm = priv.buildPermission(ClusterPermission.builder()).build(); + assertFalse(perm.check(UpdateProfileDataAction.NAME, updateProfileDataRequest, mock(Authentication.class))); + } + final String aNullApplication = "{\"write\":{\"applications\":[null]}}"; + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new ByteArrayInputStream(aNullApplication.getBytes(StandardCharsets.UTF_8)) + ) + ) { + parser.nextToken(); // { + parser.nextToken(); // "write" field + expectThrows(ElasticsearchParseException.class, () -> ConfigurableClusterPrivileges.WriteProfileDataPrivileges.parse(parser)); + parser.nextToken(); + } + final String anEmptyApplication = "{\"write\":{\"applications\":[\"\"]}}"; + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser( + NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, + new ByteArrayInputStream(anEmptyApplication.getBytes(StandardCharsets.UTF_8)) + ) + ) { + parser.nextToken(); // { + parser.nextToken(); // "write" field + ConfigurableClusterPrivileges.WriteProfileDataPrivileges priv = ConfigurableClusterPrivileges.WriteProfileDataPrivileges.parse( + parser + ); + parser.nextToken(); + assertThat(priv.getApplicationNames().size(), is(1)); + assertThat(priv.getApplicationNames().stream().findFirst().get(), is("")); + UpdateProfileDataRequest updateProfileDataRequest = randomBoolean() + ? newUpdateProfileDataRequest(Set.of(randomAlphaOfLengthBetween(1, 2)), Set.of()) + : newUpdateProfileDataRequest(Set.of(), Set.of(randomAlphaOfLengthBetween(1, 2))); + ClusterPermission perm = priv.buildPermission(ClusterPermission.builder()).build(); + assertFalse(perm.check(UpdateProfileDataAction.NAME, updateProfileDataRequest, mock(Authentication.class))); + updateProfileDataRequest = randomBoolean() + ? newUpdateProfileDataRequest(Set.of(""), Set.of()) + : newUpdateProfileDataRequest(Set.of(), Set.of("")); + perm = priv.buildPermission(ClusterPermission.builder()).build(); + assertTrue(perm.check("cluster:admin/xpack/security/profile/put/data", updateProfileDataRequest, mock(Authentication.class))); + } + } + + public void testEqualsAndHashCode() { + final int applicationNameLength = randomIntBetween(4, 7); + final ConfigurableClusterPrivileges.WriteProfileDataPrivileges privileges = buildPrivileges(applicationNameLength); + final EqualsHashCodeTestUtils.MutateFunction mutate = + orig -> buildPrivileges(applicationNameLength + randomIntBetween(1, 3)); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(privileges, this::clone, mutate); + } + + private UpdateProfileDataRequest newUpdateProfileDataRequest(Set accessNames, Set dataNames) { + Map access = new HashMap<>(); + for (String accessName : accessNames) { + access.put(accessName, mock(Object.class)); + } + Map data = new HashMap<>(); + for (String dataName : dataNames) { + data.put(dataName, mock(Object.class)); + } + return new UpdateProfileDataRequest( + randomAlphaOfLengthBetween(4, 8), + access, + data, + randomLong(), + randomLong(), + randomFrom(WriteRequest.RefreshPolicy.values()) + ); + } + + private ConfigurableClusterPrivileges.WriteProfileDataPrivileges clone( + ConfigurableClusterPrivileges.WriteProfileDataPrivileges original + ) { + return new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(new LinkedHashSet<>(original.getApplicationNames())); + } + + static ConfigurableClusterPrivileges.WriteProfileDataPrivileges buildPrivileges() { + return buildPrivileges(randomIntBetween(4, 7)); + } + + static ConfigurableClusterPrivileges.WriteProfileDataPrivileges buildPrivileges(int applicationNameLength) { + Set applicationNames = Sets.newHashSet(Arrays.asList(generateRandomStringArray(5, applicationNameLength, false, false))); + return new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(applicationNames); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 4b166e15f8b56..b4aef65885f00 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -55,6 +55,7 @@ import org.elasticsearch.action.main.MainAction; import org.elasticsearch.action.search.MultiSearchAction; import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.update.UpdateAction; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -159,8 +160,12 @@ import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesAction; import org.elasticsearch.xpack.core.security.action.privilege.PutPrivilegesRequest; import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.ActivateProfileRequest; import org.elasticsearch.xpack.core.security.action.profile.GetProfileAction; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesAction; +import org.elasticsearch.xpack.core.security.action.profile.SearchProfilesRequest; import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataAction; +import org.elasticsearch.xpack.core.security.action.profile.UpdateProfileDataRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; import org.elasticsearch.xpack.core.security.action.saml.SamlAuthenticateAction; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationAction; @@ -209,6 +214,7 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.SortedMap; import static org.elasticsearch.xpack.core.security.test.TestRestrictedIndices.RESTRICTED_INDICES_AUTOMATON; @@ -451,7 +457,79 @@ public void testKibanaSystemRole() { // User profile assertThat(kibanaRole.cluster().check(GetProfileAction.NAME, request, authentication), is(true)); assertThat(kibanaRole.cluster().check(ActivateProfileAction.NAME, request, authentication), is(true)); - assertThat(kibanaRole.cluster().check(UpdateProfileDataAction.NAME, request, authentication), is(true)); + UpdateProfileDataRequest updateProfileDataRequest = randomBoolean() + ? new UpdateProfileDataRequest( + randomAlphaOfLength(10), + Map.of("kibana-" + randomAlphaOfLengthBetween(0, 4), mock(Object.class)), + Map.of(), + randomFrom(-1L, randomLong()), + randomFrom(-1L, randomLong()), + randomFrom(WriteRequest.RefreshPolicy.values()) + ) + : new UpdateProfileDataRequest( + randomAlphaOfLength(10), + Map.of(), + Map.of("kibana-" + randomAlphaOfLengthBetween(0, 4), mock(Object.class)), + randomFrom(-1L, randomLong()), + randomFrom(-1L, randomLong()), + randomFrom(WriteRequest.RefreshPolicy.values()) + ); + assertThat(kibanaRole.cluster().check(UpdateProfileDataAction.NAME, updateProfileDataRequest, authentication), is(true)); + updateProfileDataRequest = new UpdateProfileDataRequest( + randomAlphaOfLength(10), + Map.of("kibana-" + randomAlphaOfLengthBetween(0, 4), mock(Object.class)), + Map.of("kibana-" + randomAlphaOfLengthBetween(0, 4), mock(Object.class)), + randomFrom(-1L, randomLong()), + randomFrom(-1L, randomLong()), + randomFrom(WriteRequest.RefreshPolicy.values()) + ); + assertThat(kibanaRole.cluster().check(UpdateProfileDataAction.NAME, updateProfileDataRequest, authentication), is(true)); + updateProfileDataRequest = randomBoolean() + ? new UpdateProfileDataRequest( + randomAlphaOfLength(10), + Map.of(randomAlphaOfLengthBetween(0, 6), mock(Object.class)), + Map.of(), + randomFrom(-1L, randomLong()), + randomFrom(-1L, randomLong()), + randomFrom(WriteRequest.RefreshPolicy.values()) + ) + : new UpdateProfileDataRequest( + randomAlphaOfLength(10), + Map.of(), + Map.of(randomAlphaOfLengthBetween(0, 6), mock(Object.class)), + randomFrom(-1L, randomLong()), + randomFrom(-1L, randomLong()), + randomFrom(WriteRequest.RefreshPolicy.values()) + ); + assertThat(kibanaRole.cluster().check(UpdateProfileDataAction.NAME, updateProfileDataRequest, authentication), is(false)); + updateProfileDataRequest = randomBoolean() + ? new UpdateProfileDataRequest( + randomAlphaOfLength(10), + Map.of( + "kibana-" + randomAlphaOfLengthBetween(0, 4), + mock(Object.class), + randomAlphaOfLengthBetween(0, 6), + mock(Object.class) + ), + Map.of("kibana-" + randomAlphaOfLengthBetween(0, 4), mock(Object.class)), + randomFrom(-1L, randomLong()), + randomFrom(-1L, randomLong()), + randomFrom(WriteRequest.RefreshPolicy.values()) + ) + : new UpdateProfileDataRequest( + randomAlphaOfLength(10), + Map.of("kibana-" + randomAlphaOfLengthBetween(0, 4), mock(Object.class)), + Map.of( + "kibana-" + randomAlphaOfLengthBetween(0, 4), + mock(Object.class), + randomAlphaOfLengthBetween(0, 6), + mock(Object.class) + ), + randomFrom(-1L, randomLong()), + randomFrom(-1L, randomLong()), + randomFrom(WriteRequest.RefreshPolicy.values()) + ); + assertThat(kibanaRole.cluster().check(UpdateProfileDataAction.NAME, updateProfileDataRequest, authentication), is(false)); // Everything else assertThat(kibanaRole.runAs().check(randomAlphaOfLengthBetween(1, 12)), is(false)); @@ -1535,6 +1613,13 @@ public void testSuperuserRole() { assertThat(superuserRole.cluster().check(PutIndexTemplateAction.NAME, request, authentication), is(true)); assertThat(superuserRole.cluster().check(DelegatePkiAuthenticationAction.NAME, request, authentication), is(true)); assertThat(superuserRole.cluster().check("internal:admin/foo", request, authentication), is(false)); + assertThat( + superuserRole.cluster().check(UpdateProfileDataAction.NAME, mock(UpdateProfileDataRequest.class), authentication), + is(true) + ); + assertThat(superuserRole.cluster().check(GetProfileAction.NAME, mock(UpdateProfileDataRequest.class), authentication), is(true)); + assertThat(superuserRole.cluster().check(SearchProfilesAction.NAME, mock(SearchProfilesRequest.class), authentication), is(true)); + assertThat(superuserRole.cluster().check(ActivateProfileAction.NAME, mock(ActivateProfileRequest.class), authentication), is(true)); final Settings indexSettings = Settings.builder().put("index.version.created", Version.CURRENT).build(); final String internalSecurityIndex = randomFrom( diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java index d4b1268c4334d..726744cc7b684 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesAction.java @@ -24,12 +24,10 @@ import org.elasticsearch.xpack.core.security.action.user.GetUserPrivilegesResponse; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivilege; -import org.elasticsearch.xpack.core.security.authz.privilege.ConfigurableClusterPrivileges; import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.rest.action.SecurityBaseRestHandler; import java.io.IOException; -import java.util.Collections; import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -82,7 +80,11 @@ public RestResponse buildResponse(GetUserPrivilegesResponse response, XContentBu builder.field(RoleDescriptor.Fields.CLUSTER.getPreferredName(), response.getClusterPrivileges()); builder.startArray(RoleDescriptor.Fields.GLOBAL.getPreferredName()); for (ConfigurableClusterPrivilege ccp : response.getConditionalClusterPrivileges()) { - ConfigurableClusterPrivileges.toXContent(builder, ToXContent.EMPTY_PARAMS, Collections.singleton(ccp)); + builder.startObject(); + builder.startObject(ccp.getCategory().field.getPreferredName()); + ccp.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + builder.endObject(); } builder.endArray(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java index 27d509c6daabd..b48dcd31cd54a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/support/SecuritySystemIndices.java @@ -295,6 +295,27 @@ private XContentBuilder getMainIndexMappings() { builder.endObject(); } builder.endObject(); + builder.startObject("profile"); + { + builder.field("type", "object"); + builder.startObject("properties"); + { + builder.startObject("write"); + { + builder.field("type", "object"); + builder.startObject("properties"); + { + builder.startObject("applications"); + builder.field("type", "keyword"); + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); + } + builder.endObject(); } builder.endObject(); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index cc16043697a55..76989a969b273 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -135,6 +135,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -538,6 +539,18 @@ public void testSecurityConfigChangeEventFormattingForRoles() throws IOException metaMap, Map.of("ignored", 2) ); + RoleDescriptor roleDescriptor5 = new RoleDescriptor( + "role_descriptor5", + new String[] { "all" }, + new RoleDescriptor.IndicesPrivileges[0], + randomFrom((RoleDescriptor.ApplicationResourcePrivileges[]) null, new RoleDescriptor.ApplicationResourcePrivileges[0]), + new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(new LinkedHashSet<>(Arrays.asList("", "\""))), + new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Set.of("\"")) }, + new String[] { "\"[a]/" }, + Map.of(), + Map.of() + ); String keyName = randomAlphaOfLength(4); TimeValue expiration = randomFrom(new TimeValue(randomNonNegativeLong(), randomFrom(TimeUnit.values())), null); List allTestRoleDescriptors = List.of( @@ -545,7 +558,8 @@ public void testSecurityConfigChangeEventFormattingForRoles() throws IOException roleDescriptor1, roleDescriptor2, roleDescriptor3, - roleDescriptor4 + roleDescriptor4, + roleDescriptor5 ); List keyRoleDescriptors = randomSubsetOf(allTestRoleDescriptors); StringBuilder roleDescriptorsStringBuilder = new StringBuilder().append("\"role_descriptors\":["); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java index e7359467da442..7ad3bad40fa46 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RoleDescriptorTests.java @@ -90,6 +90,7 @@ public void testToString() { ApplicationResourcePrivileges.builder().application("my_app").privileges("read", "write").resources("*").build() }; final ConfigurableClusterPrivilege[] configurableClusterPrivileges = new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(new LinkedHashSet<>(Arrays.asList("app*"))), new ConfigurableClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) }; RoleDescriptor descriptor = new RoleDescriptor( @@ -107,7 +108,7 @@ public void testToString() { descriptor.toString(), is( "Role[name=test, cluster=[all,none]" - + ", global=[{APPLICATION:manage:applications=app01,app02}]" + + ", global=[{APPLICATION:manage:applications=app01,app02},{PROFILE:write:applications=app*}]" + ", indicesPrivileges=[IndicesPrivileges[indices=[i1,i2], allowRestrictedIndices=[false], privileges=[read]" + ", field_security=[grant=[body,title], except=null], query={\"match_all\": {}}],]" + ", applicationPrivileges=[ApplicationResourcePrivileges[application=my_app, privileges=[read,write], resources=[*]],]" @@ -189,7 +190,11 @@ public void testParse() throws Exception { "privileges": [ "p1", "p2" ], "allow_restricted_indices": true } - ] + ], + "global": { + "profile": { + } + } }"""; rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); assertEquals("test", rd.getName()); @@ -239,6 +244,8 @@ public void testParse() throws Exception { "manage": { "applications": [ "kibana", "logstash" ] } + }, + "profile": { } } }"""; @@ -259,7 +266,7 @@ public void testParse() throws Exception { assertThat(rd.getApplicationPrivileges()[1].getApplication(), equalTo("app2")); assertThat(rd.getConditionalClusterPrivileges(), Matchers.arrayWithSize(1)); - final ConfigurableClusterPrivilege conditionalPrivilege = rd.getConditionalClusterPrivileges()[0]; + ConfigurableClusterPrivilege conditionalPrivilege = rd.getConditionalClusterPrivileges()[0]; assertThat(conditionalPrivilege.getCategory(), equalTo(ConfigurableClusterPrivilege.Category.APPLICATION)); assertThat(conditionalPrivilege, instanceOf(ConfigurableClusterPrivileges.ManageApplicationPrivileges.class)); assertThat( @@ -267,6 +274,45 @@ public void testParse() throws Exception { containsInAnyOrder("kibana", "logstash") ); + q = """ + { + "cluster": [ "manage" ], + "global": { + "profile": { + "write": { + "applications": [ "", "kibana-*" ] + } + }, + "application": { + "manage": { + "applications": [ "apm*", "kibana-1" ] + } + } + } + }"""; + rd = RoleDescriptor.parse("testUpdateProfile", new BytesArray(q), false, XContentType.JSON); + assertThat(rd.getName(), is("testUpdateProfile")); + assertThat(rd.getClusterPrivileges(), arrayContaining("manage")); + assertThat(rd.getIndicesPrivileges(), Matchers.emptyArray()); + assertThat(rd.getRunAs(), Matchers.emptyArray()); + assertThat(rd.getApplicationPrivileges(), Matchers.emptyArray()); + assertThat(rd.getConditionalClusterPrivileges(), Matchers.arrayWithSize(2)); + + conditionalPrivilege = rd.getConditionalClusterPrivileges()[0]; + assertThat(conditionalPrivilege.getCategory(), equalTo(ConfigurableClusterPrivilege.Category.APPLICATION)); + assertThat(conditionalPrivilege, instanceOf(ConfigurableClusterPrivileges.ManageApplicationPrivileges.class)); + assertThat( + ((ConfigurableClusterPrivileges.ManageApplicationPrivileges) conditionalPrivilege).getApplicationNames(), + containsInAnyOrder("apm*", "kibana-1") + ); + conditionalPrivilege = rd.getConditionalClusterPrivileges()[1]; + assertThat(conditionalPrivilege.getCategory(), equalTo(ConfigurableClusterPrivilege.Category.PROFILE)); + assertThat(conditionalPrivilege, instanceOf(ConfigurableClusterPrivileges.WriteProfileDataPrivileges.class)); + assertThat( + ((ConfigurableClusterPrivileges.WriteProfileDataPrivileges) conditionalPrivilege).getApplicationNames(), + containsInAnyOrder("", "kibana-*") + ); + q = """ {"applications": [{"application": "myapp", "resources": ["*"], "privileges": ["login" ]}] }"""; rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON); @@ -444,6 +490,88 @@ public void testParseIndicesPrivilegesFailsWhenExceptFieldsAreNotSubsetOfGranted assertThat(epe, TestMatchers.throwableWithMessage(containsString("f3"))); } + public void testGlobalPrivilegesOrdering() throws IOException { + final String roleName = randomAlphaOfLengthBetween(3, 30); + final String[] applicationNames = generateRandomStringArray(3, randomIntBetween(0, 3), false, true); + final String[] profileNames = generateRandomStringArray(3, randomIntBetween(0, 3), false, true); + ConfigurableClusterPrivilege[] configurableClusterPrivileges = new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(Sets.newHashSet(profileNames)), + new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Sets.newHashSet(applicationNames)) }; + RoleDescriptor role1 = new RoleDescriptor( + roleName, + new String[0], + new RoleDescriptor.IndicesPrivileges[0], + new RoleDescriptor.ApplicationResourcePrivileges[0], + configurableClusterPrivileges, + new String[0], + Map.of(), + Map.of() + ); + // swap + var temp = configurableClusterPrivileges[0]; + configurableClusterPrivileges[0] = configurableClusterPrivileges[1]; + configurableClusterPrivileges[1] = temp; + RoleDescriptor role2 = new RoleDescriptor( + roleName, + new String[0], + new RoleDescriptor.IndicesPrivileges[0], + new RoleDescriptor.ApplicationResourcePrivileges[0], + configurableClusterPrivileges, + new String[0], + Map.of(), + Map.of() + ); + assertThat(role2, is(role1)); + StringBuilder applicationNamesString = new StringBuilder(); + for (int i = 0; i < applicationNames.length; i++) { + if (i > 0) { + applicationNamesString.append(", "); + } + applicationNamesString.append("\"" + applicationNames[i] + "\""); + } + StringBuilder profileNamesString = new StringBuilder(); + for (int i = 0; i < profileNames.length; i++) { + if (i > 0) { + profileNamesString.append(", "); + } + profileNamesString.append("\"" + profileNames[i] + "\""); + } + String json = """ + { + "global": { + "profile": { + "write": { + "applications": [ %s ] + } + }, + "application": { + "manage": { + "applications": [ %s ] + } + } + } + }""".formatted(profileNamesString, applicationNamesString); + RoleDescriptor role3 = RoleDescriptor.parse(roleName, new BytesArray(json), false, XContentType.JSON); + assertThat(role3, is(role1)); + json = """ + { + "global": { + "application": { + "manage": { + "applications": [ %s ] + } + }, + "profile": { + "write": { + "applications": [ %s ] + } + } + } + }""".formatted(applicationNamesString, profileNamesString); + RoleDescriptor role4 = RoleDescriptor.parse(roleName, new BytesArray(json), false, XContentType.JSON); + assertThat(role4, is(role1)); + } + public void testIsEmpty() { assertTrue(new RoleDescriptor(randomAlphaOfLengthBetween(1, 10), null, null, null, null, null, null, null).isEmpty()); @@ -483,7 +611,9 @@ public void testIsEmpty() { booleans.get(3) ? new ConfigurableClusterPrivilege[0] : new ConfigurableClusterPrivilege[] { - new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Collections.singleton("foo")) }, + randomBoolean() + ? new ConfigurableClusterPrivileges.ManageApplicationPrivileges(Collections.singleton("foo")) + : new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(Collections.singleton("bar")) }, booleans.get(4) ? new String[0] : new String[] { "foo" }, booleans.get(5) ? new HashMap<>() : Collections.singletonMap("foo", "bar"), Collections.singletonMap("foo", "bar") @@ -536,15 +666,32 @@ private RoleDescriptor randomRoleDescriptor() { } applicationPrivileges[i] = builder.build(); } - final ConfigurableClusterPrivilege[] configurableClusterPrivileges; - if (randomBoolean()) { - configurableClusterPrivileges = new ConfigurableClusterPrivilege[] { + final ConfigurableClusterPrivilege[] configurableClusterPrivileges = switch (randomIntBetween(0, 4)) { + case 0 -> new ConfigurableClusterPrivilege[0]; + case 1 -> new ConfigurableClusterPrivilege[] { new ConfigurableClusterPrivileges.ManageApplicationPrivileges( Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) ) }; - } else { - configurableClusterPrivileges = new ConfigurableClusterPrivilege[0]; - } + case 2 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + case 3 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ), + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + case 4 -> new ConfigurableClusterPrivilege[] { + new ConfigurableClusterPrivileges.ManageApplicationPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ), + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges( + Sets.newHashSet(generateRandomStringArray(3, randomIntBetween(4, 12), false, false)) + ) }; + default -> throw new IllegalStateException("Unexpected value"); + }; final Map metadata = new HashMap<>(); while (randomBoolean()) { String key = randomAlphaOfLengthBetween(4, 12); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesActionTests.java index 6e05692085275..58176f0918423 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/action/user/RestGetUserPrivilegesActionTests.java @@ -68,8 +68,11 @@ public void testSecurityDisabled() throws Exception { public void testBuildResponse() throws Exception { final RestGetUserPrivilegesAction.RestListener listener = new RestGetUserPrivilegesAction.RestListener(null); final Set cluster = new LinkedHashSet<>(Arrays.asList("monitor", "manage_ml", "manage_watcher")); - final Set conditionalCluster = Collections.singleton( - new ConfigurableClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) + final Set conditionalCluster = new LinkedHashSet<>( + Arrays.asList( + new ConfigurableClusterPrivileges.WriteProfileDataPrivileges(new LinkedHashSet<>(Arrays.asList("app*"))), + new ConfigurableClusterPrivileges.ManageApplicationPrivileges(new LinkedHashSet<>(Arrays.asList("app01", "app02"))) + ) ); final Set index = new LinkedHashSet<>( Arrays.asList( @@ -114,6 +117,13 @@ public void testBuildResponse() throws Exception { { "cluster": [ "monitor", "manage_ml", "manage_watcher" ], "global": [ + { + "profile": { + "write": { + "applications": [ "app*" ] + } + } + }, { "application": { "manage": { diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/audit/logfile/audited_roles.txt b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/audit/logfile/audited_roles.txt index 2f2d0a167bcfc..7b5e24c97d65a 100644 --- a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/audit/logfile/audited_roles.txt +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/audit/logfile/audited_roles.txt @@ -7,4 +7,6 @@ role_descriptor2 role_descriptor3 {"cluster":[],"indices":[],"applications":[{"application":"maps","privileges":["{","}","\n","\\","\""],"resources":["raster:*"]},{"application":"maps","privileges":["*:*"],"resources":["noooooo!!\n\n\f\\\\r","{"]}],"run_as":["jack","nich*","//\""],"metadata":{"some meta":42}} role_descriptor4 -{"cluster":["manage_ml","grant_api_key","manage_rollup"],"global":{"application":{"manage":{"applications":["a+b+|b+a+"]}}},"indices":[{"names":["/. ? + * | { } [ ] ( ) \" \\/","*"],"privileges":["read","read_cross_cluster"],"field_security":{"grant":["almost","all*"],"except":["denied*"]}}],"applications":[],"run_as":["//+a+\"[a]/"],"metadata":{"?list":["e1","e2","*"],"some other meta":{"r":"t"}}} \ No newline at end of file +{"cluster":["manage_ml","grant_api_key","manage_rollup"],"global":{"application":{"manage":{"applications":["a+b+|b+a+"]}},"profile":{}},"indices":[{"names":["/. ? + * | { } [ ] ( ) \" \\/","*"],"privileges":["read","read_cross_cluster"],"field_security":{"grant":["almost","all*"],"except":["denied*"]}}],"applications":[],"run_as":["//+a+\"[a]/"],"metadata":{"?list":["e1","e2","*"],"some other meta":{"r":"t"}}} +role_descriptor5 +{"cluster":["all"],"global":{"application":{"manage":{"applications":["\""]}},"profile":{"write":{"applications":["","\""]}}},"indices":[],"applications":[],"run_as":["\"[a]/"]} From f9f6ec9ed31fc938f8dcd98edf8e94f5fe91b787 Mon Sep 17 00:00:00 2001 From: Tim Vernum Date: Thu, 17 Feb 2022 10:46:06 +1100 Subject: [PATCH 137/167] Remove all "user" related methods from HLRC (#84011) Removes the following methods from the SecurityClient component of the High Level Rest Client - putUser - deleteUser - changePassword - authenticate As part of this change, I renamed the SecurityClientTestHelper class to TestSecurityClient and made it a real object rather than a set of utility methods. This was needed because different tests need different RequestOptions objects, but passing it into every method made it cumbersome. The code is clearer if we use a field in the test client itself. Relates: #83423 --- .../elasticsearch/client/SecurityClient.java | 84 -------- .../client/SecurityRequestConverters.java | 39 ---- .../security/ChangePasswordRequest.java | 63 ------ .../client/security/DeleteUserRequest.java | 60 ------ .../client/security/DeleteUserResponse.java | 42 ---- .../client/security/PutUserRequest.java | 192 ------------------ .../client/security/PutUserResponse.java | 62 ------ .../elasticsearch/test/XContentTestUtils.java | 10 +- .../xpack/idp/IdpRestTestCase.java | 59 ++++-- .../idp/WildcardServiceProviderRestIT.java | 8 +- .../SecurityOnTrialLicenseRestTestCase.java | 24 +-- .../authc/SecurityRealmSmokeTestCase.java | 28 ++- .../MultipleIndicesPermissionsTests.java | 21 +- .../security/authc/ApiKeyIntegTests.java | 65 +++--- .../security/authc/TokenAuthIntegTests.java | 42 ++-- .../esnative/ReservedRealmIntegTests.java | 19 +- .../pki/PkiAuthDelegationIntegTests.java | 144 ++++++++----- .../authz/SnapshotUserRoleIntegTests.java | 11 +- .../test/SecurityClientTestHelper.java | 24 --- .../test/SecurityIntegTestCase.java | 13 ++ .../test/TestSecurityClient.java | 119 +++++++++++ .../example/role/CustomRolesProviderIT.java | 24 ++- 22 files changed, 391 insertions(+), 762 deletions(-) delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/ChangePasswordRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/DeleteUserRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/DeleteUserResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserResponse.java delete mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityClientTestHelper.java create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java index 3ae8cd027f92a..008fe9206ab8f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityClient.java @@ -8,9 +8,6 @@ package org.elasticsearch.client; -import org.elasticsearch.client.security.AuthenticateRequest; -import org.elasticsearch.client.security.AuthenticateResponse; -import org.elasticsearch.client.security.ChangePasswordRequest; import org.elasticsearch.client.security.ClearRealmCacheRequest; import org.elasticsearch.client.security.ClearRealmCacheResponse; import org.elasticsearch.client.security.CreateTokenRequest; @@ -21,8 +18,6 @@ import org.elasticsearch.client.security.DeleteRoleMappingResponse; import org.elasticsearch.client.security.DeleteRoleRequest; import org.elasticsearch.client.security.DeleteRoleResponse; -import org.elasticsearch.client.security.DeleteUserRequest; -import org.elasticsearch.client.security.DeleteUserResponse; import org.elasticsearch.client.security.GetApiKeyRequest; import org.elasticsearch.client.security.GetApiKeyResponse; import org.elasticsearch.client.security.GetRolesRequest; @@ -37,8 +32,6 @@ import org.elasticsearch.client.security.PutRoleMappingResponse; import org.elasticsearch.client.security.PutRoleRequest; import org.elasticsearch.client.security.PutRoleResponse; -import org.elasticsearch.client.security.PutUserRequest; -import org.elasticsearch.client.security.PutUserResponse; import java.io.IOException; @@ -64,45 +57,6 @@ public final class SecurityClient { this.restHighLevelClient = restHighLevelClient; } - /** - * Create/update a user in the native realm synchronously. - * See - * the docs for more. - * - * @param request the request with the user's information - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response from the put user call - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public PutUserResponse putUser(PutUserRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - SecurityRequestConverters::putUser, - options, - PutUserResponse::fromXContent, - emptySet() - ); - } - - /** - * Removes user from the native realm synchronously. - * See - * the docs for more. - * @param request the request with the user to delete - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the response from the delete user call - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public DeleteUserResponse deleteUser(DeleteUserRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - SecurityRequestConverters::deleteUser, - options, - DeleteUserResponse::fromXContent, - singleton(404) - ); - } - /** * Create/Update a role mapping. * See @@ -122,24 +76,6 @@ public PutRoleMappingResponse putRoleMapping(final PutRoleMappingRequest request ); } - /** - * Authenticate the current user and return all the information about the authenticated user. - * See - * the docs for more. - * - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return the responsee from the authenticate user call - */ - public AuthenticateResponse authenticate(RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - AuthenticateRequest.INSTANCE, - AuthenticateRequest::getRequest, - options, - AuthenticateResponse::fromXContent, - emptySet() - ); - } - /** * Clears the cache in one or more realms. * See @@ -160,26 +96,6 @@ public ClearRealmCacheResponse clearRealmCache(ClearRealmCacheRequest request, R ); } - /** - * Change the password of a user of a native realm or built-in user synchronously. - * See - * the docs for more. - * - * @param request the request with the user's new password - * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@code true} if the request succeeded (the new password was set) - * @throws IOException in case there is a problem sending the request or parsing back the response - */ - public boolean changePassword(ChangePasswordRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequest( - request, - SecurityRequestConverters::changePassword, - options, - RestHighLevelClient::convertExistsResponse, - emptySet() - ); - } - /** * Delete a role mapping. * See diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java index d1d0e5fea692d..fe3988d79cf7c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SecurityRequestConverters.java @@ -12,13 +12,11 @@ import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; -import org.elasticsearch.client.security.ChangePasswordRequest; import org.elasticsearch.client.security.ClearRealmCacheRequest; import org.elasticsearch.client.security.CreateTokenRequest; import org.elasticsearch.client.security.DelegatePkiAuthenticationRequest; import org.elasticsearch.client.security.DeleteRoleMappingRequest; import org.elasticsearch.client.security.DeleteRoleRequest; -import org.elasticsearch.client.security.DeleteUserRequest; import org.elasticsearch.client.security.GetApiKeyRequest; import org.elasticsearch.client.security.GetRolesRequest; import org.elasticsearch.client.security.InvalidateApiKeyRequest; @@ -26,7 +24,6 @@ import org.elasticsearch.client.security.PutPrivilegesRequest; import org.elasticsearch.client.security.PutRoleMappingRequest; import org.elasticsearch.client.security.PutRoleRequest; -import org.elasticsearch.client.security.PutUserRequest; import org.elasticsearch.common.Strings; import java.io.IOException; @@ -38,42 +35,6 @@ final class SecurityRequestConverters { private SecurityRequestConverters() {} - static Request changePassword(ChangePasswordRequest changePasswordRequest) throws IOException { - String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_security/user") - .addPathPart(changePasswordRequest.getUsername()) - .addPathPartAsIs("_password") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(changePasswordRequest, REQUEST_BODY_CONTENT_TYPE)); - RequestConverters.Params params = new RequestConverters.Params(); - params.withRefreshPolicy(changePasswordRequest.getRefreshPolicy()); - request.addParameters(params.asMap()); - return request; - } - - static Request putUser(PutUserRequest putUserRequest) throws IOException { - String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_security/user") - .addPathPart(putUserRequest.getUser().getUsername()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putUserRequest, REQUEST_BODY_CONTENT_TYPE)); - RequestConverters.Params params = new RequestConverters.Params(); - params.withRefreshPolicy(putUserRequest.getRefreshPolicy()); - request.addParameters(params.asMap()); - return request; - } - - static Request deleteUser(DeleteUserRequest deleteUserRequest) { - String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_security", "user") - .addPathPart(deleteUserRequest.getName()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - params.withRefreshPolicy(deleteUserRequest.getRefreshPolicy()); - request.addParameters(params.asMap()); - return request; - } - static Request putRoleMapping(final PutRoleMappingRequest putRoleMappingRequest) throws IOException { final String endpoint = new RequestConverters.EndpointBuilder().addPathPartAsIs("_security/role_mapping") .addPathPart(putRoleMappingRequest.getName()) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/ChangePasswordRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/ChangePasswordRequest.java deleted file mode 100644 index ae13a77abc456..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/ChangePasswordRequest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.core.CharArrays; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Objects; - -/** - * Request object to change the password of a user of a native realm or a built-in user. - */ -public final class ChangePasswordRequest implements Validatable, ToXContentObject { - - private final String username; - private final char[] password; - private final RefreshPolicy refreshPolicy; - - /** - * @param username The username of the user whose password should be changed or null for the current user. - * @param password The new password. The password array is not cleared by the {@link ChangePasswordRequest} object so the - * calling code must clear it after receiving the response. - * @param refreshPolicy The refresh policy for the request. - */ - public ChangePasswordRequest(@Nullable String username, char[] password, RefreshPolicy refreshPolicy) { - this.username = username; - this.password = Objects.requireNonNull(password, "password is required"); - this.refreshPolicy = refreshPolicy == null ? RefreshPolicy.getDefault() : refreshPolicy; - } - - public String getUsername() { - return username; - } - - public char[] getPassword() { - return password; - } - - public RefreshPolicy getRefreshPolicy() { - return refreshPolicy; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - byte[] charBytes = CharArrays.toUtf8Bytes(password); - try { - return builder.startObject().field("password").utf8Value(charBytes, 0, charBytes.length).endObject(); - } finally { - Arrays.fill(charBytes, (byte) 0); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DeleteUserRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DeleteUserRequest.java deleted file mode 100644 index cb2e3ea74582e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DeleteUserRequest.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * A request to delete a user from the native realm. - */ -public final class DeleteUserRequest implements Validatable { - - private final String name; - private final RefreshPolicy refreshPolicy; - - public DeleteUserRequest(String name) { - this(name, RefreshPolicy.IMMEDIATE); - } - - public DeleteUserRequest(String name, RefreshPolicy refreshPolicy) { - this.name = Objects.requireNonNull(name, "user name is required"); - this.refreshPolicy = Objects.requireNonNull(refreshPolicy, "refresh policy is required"); - } - - public String getName() { - return name; - } - - public RefreshPolicy getRefreshPolicy() { - return refreshPolicy; - } - - @Override - public int hashCode() { - return Objects.hash(name, refreshPolicy); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final DeleteUserRequest other = (DeleteUserRequest) obj; - - return (refreshPolicy == other.refreshPolicy) && Objects.equals(name, other.name); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DeleteUserResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DeleteUserResponse.java deleted file mode 100644 index 8de14c31e3d40..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/DeleteUserResponse.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -import org.elasticsearch.client.core.AcknowledgedResponse; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; - -/** - * Response for a user being deleted from the native realm - */ -public final class DeleteUserResponse extends AcknowledgedResponse { - - private static final String PARSE_FIELD_NAME = "found"; - - private static final ConstructingObjectParser PARSER = AcknowledgedResponse.generateParser( - "delete_user_response", - DeleteUserResponse::new, - PARSE_FIELD_NAME - ); - - public DeleteUserResponse(boolean acknowledged) { - super(acknowledged); - } - - public static DeleteUserResponse fromXContent(final XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - protected String getFieldName() { - return PARSE_FIELD_NAME; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserRequest.java deleted file mode 100644 index 28ffa7bb10da6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserRequest.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.security.user.User; -import org.elasticsearch.core.CharArrays; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Objects; -import java.util.Optional; - -/** - * Request object to create or update a user in the native realm. - */ -public final class PutUserRequest implements Validatable, ToXContentObject { - - private final User user; - private final @Nullable char[] password; - private final @Nullable char[] passwordHash; - private final boolean enabled; - private final RefreshPolicy refreshPolicy; - - /** - * Create or update a user in the native realm, with the user's new or updated password specified in plaintext. - * @param user the user to be created or updated - * @param password the password of the user. The password array is not modified by this class. - * It is the responsibility of the caller to clear the password after receiving - * a response. - * @param enabled true if the user is enabled and allowed to access elasticsearch - * @param refreshPolicy the refresh policy for the request. - */ - public static PutUserRequest withPassword(User user, char[] password, boolean enabled, RefreshPolicy refreshPolicy) { - return new PutUserRequest(user, password, null, enabled, refreshPolicy); - } - - /** - * Create or update a user in the native realm, with the user's new or updated password specified as a cryptographic hash. - * @param user the user to be created or updated - * @param passwordHash the hash of the password of the user. It must be in the correct format for the password hashing algorithm in - * use on this elasticsearch cluster. The array is not modified by this class. - * It is the responsibility of the caller to clear the hash after receiving a response. - * @param enabled true if the user is enabled and allowed to access elasticsearch - * @param refreshPolicy the refresh policy for the request. - */ - public static PutUserRequest withPasswordHash(User user, char[] passwordHash, boolean enabled, RefreshPolicy refreshPolicy) { - return new PutUserRequest(user, null, passwordHash, enabled, refreshPolicy); - } - - /** - * Update an existing user in the native realm without modifying their password. - * @param user the user to be created or updated - * @param enabled true if the user is enabled and allowed to access elasticsearch - * @param refreshPolicy the refresh policy for the request. - */ - public static PutUserRequest updateUser(User user, boolean enabled, RefreshPolicy refreshPolicy) { - return new PutUserRequest(user, null, null, enabled, refreshPolicy); - } - - /** - * Creates a new request that is used to create or update a user in the native realm. - * - * @param user the user to be created or updated - * @param password the password of the user. The password array is not modified by this class. - * It is the responsibility of the caller to clear the password after receiving - * a response. - * @param enabled true if the user is enabled and allowed to access elasticsearch - * @param refreshPolicy the refresh policy for the request. - * @deprecated Use {@link #withPassword(User, char[], boolean, RefreshPolicy)} or - * {@link #updateUser(User, boolean, RefreshPolicy)} instead. - */ - @Deprecated - public PutUserRequest(User user, @Nullable char[] password, boolean enabled, @Nullable RefreshPolicy refreshPolicy) { - this(user, password, null, enabled, refreshPolicy); - } - - /** - * Creates a new request that is used to create or update a user in the native realm. - * @param user the user to be created or updated - * @param password the password of the user. The password array is not modified by this class. - * It is the responsibility of the caller to clear the password after receiving - * a response. - * @param passwordHash the hash of the password. Only one of "password" or "passwordHash" may be populated. - * The other parameter must be {@code null}. - * @param enabled true if the user is enabled and allowed to access elasticsearch - * @param refreshPolicy the refresh policy for the request. - */ - private PutUserRequest( - User user, - @Nullable char[] password, - @Nullable char[] passwordHash, - boolean enabled, - RefreshPolicy refreshPolicy - ) { - this.user = Objects.requireNonNull(user, "user is required, cannot be null"); - if (password != null && passwordHash != null) { - throw new IllegalArgumentException("cannot specify both password and passwordHash"); - } - this.password = password; - this.passwordHash = passwordHash; - this.enabled = enabled; - this.refreshPolicy = refreshPolicy == null ? RefreshPolicy.getDefault() : refreshPolicy; - } - - public User getUser() { - return user; - } - - public @Nullable char[] getPassword() { - return password; - } - - public boolean isEnabled() { - return enabled; - } - - public RefreshPolicy getRefreshPolicy() { - return refreshPolicy; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - final PutUserRequest that = (PutUserRequest) o; - return Objects.equals(user, that.user) - && Arrays.equals(password, that.password) - && Arrays.equals(passwordHash, that.passwordHash) - && enabled == that.enabled - && refreshPolicy == that.refreshPolicy; - } - - @Override - public int hashCode() { - int result = Objects.hash(user, enabled, refreshPolicy); - result = 31 * result + Arrays.hashCode(password); - result = 31 * result + Arrays.hashCode(passwordHash); - return result; - } - - @Override - public Optional validate() { - if (user.getMetadata() != null && user.getMetadata().keySet().stream().anyMatch(s -> s.startsWith("_"))) { - ValidationException validationException = new ValidationException(); - validationException.addValidationError("user metadata keys may not start with [_]"); - return Optional.of(validationException); - } - return Optional.empty(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("username", user.getUsername()); - if (password != null) { - charField(builder, "password", password); - } - if (passwordHash != null) { - charField(builder, "password_hash", passwordHash); - } - builder.field("roles", user.getRoles()); - if (user.getFullName() != null) { - builder.field("full_name", user.getFullName()); - } - if (user.getEmail() != null) { - builder.field("email", user.getEmail()); - } - builder.field("metadata", user.getMetadata()); - builder.field("enabled", enabled); - return builder.endObject(); - } - - private void charField(XContentBuilder builder, String fieldName, char[] chars) throws IOException { - byte[] charBytes = CharArrays.toUtf8Bytes(chars); - try { - builder.field(fieldName).utf8Value(charBytes, 0, charBytes.length); - } finally { - Arrays.fill(charBytes, (byte) 0); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserResponse.java deleted file mode 100644 index b03de98390158..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/security/PutUserResponse.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.security; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Response when adding a user to the native realm. Returns a - * single boolean field for whether the user was created or updated. - */ -public final class PutUserResponse { - - private final boolean created; - - public PutUserResponse(boolean created) { - this.created = created; - } - - public boolean isCreated() { - return created; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PutUserResponse that = (PutUserResponse) o; - return created == that.created; - } - - @Override - public int hashCode() { - return Objects.hash(created); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "put_user_response", - true, - args -> new PutUserResponse((boolean) args[0]) - ); - - static { - PARSER.declareBoolean(constructorArg(), new ParseField("created")); - } - - public static PutUserResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java index ce99a6e2c92ec..371d7302a61f7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/XContentTestUtils.java @@ -43,9 +43,13 @@ private XContentTestUtils() { public static Map convertToMap(ToXContent part) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - part.toXContent(builder, EMPTY_PARAMS); - builder.endObject(); + if (part.isFragment()) { + builder.startObject(); + part.toXContent(builder, EMPTY_PARAMS); + builder.endObject(); + } else { + part.toXContent(builder, EMPTY_PARAMS); + } return XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); } diff --git a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdpRestTestCase.java b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdpRestTestCase.java index 568a68181f1e2..5e4e4afb1a60b 100644 --- a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdpRestTestCase.java +++ b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/IdpRestTestCase.java @@ -6,18 +6,17 @@ */ package org.elasticsearch.xpack.idp; +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.security.ChangePasswordRequest; import org.elasticsearch.client.security.DeleteRoleRequest; -import org.elasticsearch.client.security.DeleteUserRequest; import org.elasticsearch.client.security.PutPrivilegesRequest; import org.elasticsearch.client.security.PutRoleRequest; -import org.elasticsearch.client.security.PutUserRequest; import org.elasticsearch.client.security.RefreshPolicy; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.client.security.user.privileges.ApplicationPrivilege; import org.elasticsearch.client.security.user.privileges.ApplicationResourcePrivileges; import org.elasticsearch.client.security.user.privileges.IndicesPrivileges; @@ -29,6 +28,7 @@ import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.ObjectPath; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.idp.saml.sp.SamlServiceProviderIndex; import java.io.IOException; @@ -67,18 +67,40 @@ private RestHighLevelClient getHighLevelAdminClient() { return highLevelAdminClient; } - protected User createUser(String username, SecureString password, String... roles) throws IOException { - final RestHighLevelClient client = getHighLevelAdminClient(); - final User user = new User(username, List.of(roles), Map.of(), username + " in " + getTestName(), username + "@test.example.com"); - final PutUserRequest request = PutUserRequest.withPassword(user, password.getChars(), true, RefreshPolicy.IMMEDIATE); - client.security().putUser(request, RequestOptions.DEFAULT); + protected User createUser(String username, SecureString password, String role) throws IOException { + final User user = new User( + username, + new String[] { role }, + username + " in " + getTestName(), + username + "@test.example.com", + Map.of(), + true + ); + final String endpoint = "/_security/user/" + username; + final Request request = new Request(HttpPut.METHOD_NAME, endpoint); + final String body = """ + { + "username": "%s", + "full_name": "%s", + "email": "%s", + "password": "%s", + "roles": [ "%s" ] + } + """.formatted(user.principal(), user.fullName(), user.email(), password.toString(), role); + request.setJsonEntity(body); + request.addParameters(Map.of("refresh", "true")); + request.setOptions(RequestOptions.DEFAULT); + adminClient().performRequest(request); + return user; } protected void deleteUser(String username) throws IOException { - final RestHighLevelClient client = getHighLevelAdminClient(); - final DeleteUserRequest request = new DeleteUserRequest(username, RefreshPolicy.WAIT_UNTIL); - client.security().deleteUser(request, RequestOptions.DEFAULT); + final String endpoint = "/_security/user/" + username; + final Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + request.addParameters(Map.of("refresh", "true")); + request.setOptions(RequestOptions.DEFAULT); + adminClient().performRequest(request); } protected void createRole( @@ -114,9 +136,16 @@ protected void createApplicationPrivileges(String applicationName, Map body) throws IOException { diff --git a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/WildcardServiceProviderRestIT.java b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/WildcardServiceProviderRestIT.java index a7d8c9c798a08..0a7d628a7cc3c 100644 --- a/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/WildcardServiceProviderRestIT.java +++ b/x-pack/plugin/identity-provider/qa/idp-rest-tests/src/javaRestTest/java/org/elasticsearch/xpack/idp/WildcardServiceProviderRestIT.java @@ -8,13 +8,13 @@ import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.client.security.user.privileges.ApplicationResourcePrivileges; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; +import org.elasticsearch.xpack.core.security.user.User; import org.junit.Before; import java.io.IOException; @@ -78,9 +78,9 @@ public void testInitSingleSignOnToWildcardServiceProvider() throws Exception { assertThat(samlResponse, containsString("FriendlyName=\"" + attr + "\"")); } - assertThat(samlResponse, containsString(user.getUsername())); - assertThat(samlResponse, containsString(user.getEmail())); - assertThat(samlResponse, containsString(user.getFullName())); + assertThat(samlResponse, containsString(user.principal())); + assertThat(samlResponse, containsString(user.email())); + assertThat(samlResponse, containsString(user.fullName())); assertThat(samlResponse, containsString(">admin<")); deleteUser(username); diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java index 710edf9fe8a95..4ea421c5fc454 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java @@ -12,21 +12,19 @@ import org.elasticsearch.client.security.CreateTokenRequest; import org.elasticsearch.client.security.CreateTokenResponse; import org.elasticsearch.client.security.DeleteRoleRequest; -import org.elasticsearch.client.security.DeleteUserRequest; import org.elasticsearch.client.security.GetApiKeyRequest; import org.elasticsearch.client.security.GetApiKeyResponse; import org.elasticsearch.client.security.InvalidateApiKeyRequest; import org.elasticsearch.client.security.PutRoleRequest; -import org.elasticsearch.client.security.PutUserRequest; -import org.elasticsearch.client.security.RefreshPolicy; import org.elasticsearch.client.security.support.ApiKey; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.client.security.user.privileges.Role; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Tuple; +import org.elasticsearch.test.TestSecurityClient; import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xpack.core.security.user.User; import org.hamcrest.Matchers; import java.io.IOException; @@ -36,6 +34,7 @@ @SuppressWarnings("removal") public abstract class SecurityOnTrialLicenseRestTestCase extends ESRestTestCase { private RestHighLevelClient highLevelAdminClient; + private TestSecurityClient securityClient; @Override protected Settings restAdminSettings() { @@ -49,13 +48,15 @@ protected Settings restClientSettings() { return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); } + protected TestSecurityClient getSecurityClient() { + if (securityClient == null) { + securityClient = new TestSecurityClient(adminClient()); + } + return securityClient; + } + protected void createUser(String username, SecureString password, List roles) throws IOException { - final RestHighLevelClient client = getHighLevelAdminClient(); - client.security() - .putUser( - PutUserRequest.withPassword(new User(username, roles), password.getChars(), true, RefreshPolicy.WAIT_UNTIL), - RequestOptions.DEFAULT - ); + getSecurityClient().putUser(new User(username, roles.toArray(String[]::new)), password); } protected void createRole(String name, Collection clusterPrivileges) throws IOException { @@ -75,8 +76,7 @@ protected Tuple createOAuthToken(String username, SecureString p } protected void deleteUser(String username) throws IOException { - final RestHighLevelClient client = getHighLevelAdminClient(); - client.security().deleteUser(new DeleteUserRequest(username), RequestOptions.DEFAULT); + getSecurityClient().deleteUser(username); } protected void deleteRole(String name) throws IOException { diff --git a/x-pack/plugin/security/qa/smoke-test-all-realms/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/SecurityRealmSmokeTestCase.java b/x-pack/plugin/security/qa/smoke-test-all-realms/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/SecurityRealmSmokeTestCase.java index bdccd10db561c..4562418c66533 100644 --- a/x-pack/plugin/security/qa/smoke-test-all-realms/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/SecurityRealmSmokeTestCase.java +++ b/x-pack/plugin/security/qa/smoke-test-all-realms/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/SecurityRealmSmokeTestCase.java @@ -11,21 +11,19 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.security.ChangePasswordRequest; import org.elasticsearch.client.security.DeleteRoleRequest; -import org.elasticsearch.client.security.DeleteUserRequest; import org.elasticsearch.client.security.PutRoleRequest; -import org.elasticsearch.client.security.PutUserRequest; import org.elasticsearch.client.security.RefreshPolicy; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.client.security.user.privileges.Role; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.PathUtils; +import org.elasticsearch.test.TestSecurityClient; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.security.authc.Authentication.AuthenticationType; +import org.elasticsearch.xpack.core.security.user.User; import org.junit.BeforeClass; import java.io.FileNotFoundException; @@ -48,6 +46,7 @@ public abstract class SecurityRealmSmokeTestCase extends ESRestTestCase { private static Path httpCAPath; private RestHighLevelClient highLevelAdminClient; + private TestSecurityClient securityClient; @BeforeClass public static void findHttpCertificateAuthority() throws Exception { @@ -111,18 +110,11 @@ protected void assertNoApiKeyInfo(Map authenticateResponse, Auth } protected void createUser(String username, SecureString password, List roles) throws IOException { - final RestHighLevelClient client = getHighLevelAdminClient(); - client.security() - .putUser( - PutUserRequest.withPassword(new User(username, roles), password.getChars(), true, RefreshPolicy.WAIT_UNTIL), - RequestOptions.DEFAULT - ); + getSecurityClient().putUser(new User(username, roles.toArray(String[]::new)), password); } protected void changePassword(String username, SecureString password) throws IOException { - final RestHighLevelClient client = getHighLevelAdminClient(); - client.security() - .changePassword(new ChangePasswordRequest(username, password.getChars(), RefreshPolicy.WAIT_UNTIL), RequestOptions.DEFAULT); + getSecurityClient().changePassword(username, password); } protected void createRole(String name, Collection clusterPrivileges) throws IOException { @@ -132,8 +124,7 @@ protected void createRole(String name, Collection clusterPrivileges) thr } protected void deleteUser(String username) throws IOException { - final RestHighLevelClient client = getHighLevelAdminClient(); - client.security().deleteUser(new DeleteUserRequest(username), RequestOptions.DEFAULT); + getSecurityClient().deleteUser(username); } protected void deleteRole(String name) throws IOException { @@ -148,4 +139,11 @@ private RestHighLevelClient getHighLevelAdminClient() { } return highLevelAdminClient; } + + protected TestSecurityClient getSecurityClient() { + if (securityClient == null) { + securityClient = new TestSecurityClient(adminClient()); + } + return securityClient; + } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java index 717173154d496..7e5d4f3bddaff 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/MultipleIndicesPermissionsTests.java @@ -21,25 +21,22 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.client.security.PutUserRequest; -import org.elasticsearch.client.security.RefreshPolicy; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.SecuritySettingsSourceField; +import org.elasticsearch.test.TestSecurityClient; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; +import org.elasticsearch.xpack.core.security.user.User; import org.junit.After; import org.junit.Before; import java.util.Collections; -import java.util.List; import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; -import static org.elasticsearch.test.SecuritySettingsSource.SECURITY_REQUEST_OPTIONS; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; @@ -58,16 +55,10 @@ public class MultipleIndicesPermissionsTests extends SecurityIntegTestCase { @Before public void waitForSecurityIndexWritable() throws Exception { // adds a dummy user to the native realm to force .security index creation - new TestRestHighLevelClient().security() - .putUser( - PutUserRequest.withPassword( - new User("dummy_user", List.of("missing_role")), - "password".toCharArray(), - true, - RefreshPolicy.IMMEDIATE - ), - SECURITY_REQUEST_OPTIONS - ); + new TestSecurityClient(getRestClient(), SecuritySettingsSource.SECURITY_REQUEST_OPTIONS).putUser( + new User("dummy_user", "missing_role"), + SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING + ); assertSecurityIndexActive(); } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java index 2ba3035e4993e..5e1c432750333 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/ApiKeyIntegTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.security.authc; import org.elasticsearch.ElasticsearchSecurityException; -import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; @@ -24,10 +23,9 @@ import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; -import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.client.security.AuthenticateResponse; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; @@ -37,6 +35,8 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; +import org.elasticsearch.test.TestSecurityClient; +import org.elasticsearch.test.rest.yaml.ObjectPath; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.action.ClearSecurityCacheAction; @@ -55,7 +55,9 @@ import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; import org.elasticsearch.xpack.core.security.action.user.PutUserResponse; +import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authz.RoleDescriptor; +import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.security.transport.filter.IPFilter; import org.junit.After; import org.junit.Before; @@ -70,6 +72,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; @@ -206,20 +209,18 @@ public void testCreateApiKey() throws Exception { assertThat(simple.getId(), not(containsString(new String(simple.getKey().getChars())))); assertNull(simple.getExpiration()); - // use the first ApiKey for authorized action - final String base64ApiKeyKeyValue = Base64.getEncoder() - .encodeToString((response.getId() + ":" + response.getKey().toString()).getBytes(StandardCharsets.UTF_8)); // Assert that we can authenticate with the API KEY - final RestHighLevelClient restClient = new TestRestHighLevelClient(); - AuthenticateResponse authResponse = restClient.security() - .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + base64ApiKeyKeyValue).build()); - assertThat(authResponse.getUser().getUsername(), equalTo(ES_TEST_ROOT_USER)); - assertThat(authResponse.getAuthenticationType(), equalTo("api_key")); + final Map authResponse = authenticateWithApiKey(response.getId(), response.getKey()); + assertThat(authResponse.get(User.Fields.USERNAME.getPreferredName()), equalTo(ES_TEST_ROOT_USER)); // use the first ApiKey for an unauthorized action + final Map authorizationHeaders = Collections.singletonMap( + "Authorization", + "ApiKey " + getBase64EncodedApiKeyValue(response.getId(), response.getKey()) + ); ElasticsearchSecurityException e = expectThrows( ElasticsearchSecurityException.class, - () -> client().filterWithHeader(Collections.singletonMap("Authorization", "ApiKey " + base64ApiKeyKeyValue)) + () -> client().filterWithHeader(authorizationHeaders) .admin() .cluster() .prepareUpdateSettings() @@ -373,15 +374,12 @@ public void testInvalidateApiKeyWillClearApiKeyCache() throws IOException, Execu } // Authentication with the first key should fail - final String base64ApiKeyKeyValue = Base64.getEncoder() - .encodeToString((apiKey1.v1() + ":" + apiKey1.v2()).getBytes(StandardCharsets.UTF_8)); - ElasticsearchStatusException e = expectThrows( - ElasticsearchStatusException.class, - () -> new TestRestHighLevelClient().security() - .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + base64ApiKeyKeyValue).build()) + ResponseException e = expectThrows( + ResponseException.class, + () -> authenticateWithApiKey(apiKey1.v1(), new SecureString(apiKey1.v2().toCharArray())) ); assertThat(e.getMessage(), containsString("security_exception")); - assertThat(e.status(), is(RestStatus.UNAUTHORIZED)); + assertThat(e.getResponse().getStatusLine().getStatusCode(), is(RestStatus.UNAUTHORIZED.getStatus())); } private void verifyInvalidateResponse( @@ -1400,14 +1398,33 @@ private Tuple createApiKeyAndAuthenticateWithIt() throws IOExcep .setMetadata(ApiKeyTests.randomMetadata()) .get(); final String docId = createApiKeyResponse.getId(); - final String base64ApiKeyKeyValue = Base64.getEncoder() - .encodeToString((docId + ":" + createApiKeyResponse.getKey().toString()).getBytes(StandardCharsets.UTF_8)); - AuthenticateResponse authResponse = new TestRestHighLevelClient().security() - .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "ApiKey " + base64ApiKeyKeyValue).build()); - assertEquals("api_key", authResponse.getAuthenticationType()); + authenticateWithApiKey(docId, createApiKeyResponse.getKey()); return Tuple.tuple(docId, createApiKeyResponse.getKey().toString()); } + private Map authenticateWithApiKey(String id, SecureString key) throws IOException { + final RequestOptions requestOptions = RequestOptions.DEFAULT.toBuilder() + .addHeader("Authorization", "ApiKey " + getBase64EncodedApiKeyValue(id, key)) + .build(); + final TestSecurityClient securityClient = getSecurityClient(requestOptions); + final Map response = securityClient.authenticate(); + + final String authenticationTypeString = String.valueOf(response.get(User.Fields.AUTHENTICATION_TYPE.getPreferredName())); + final Authentication.AuthenticationType authenticationType = Authentication.AuthenticationType.valueOf( + authenticationTypeString.toUpperCase(Locale.ROOT) + ); + assertThat(authenticationType, is(Authentication.AuthenticationType.API_KEY)); + + assertThat(ObjectPath.evaluate(response, "api_key.id"), is(id)); + + return response; + } + + private String getBase64EncodedApiKeyValue(String id, SecureString key) { + final String base64ApiKeyKeyValue = Base64.getEncoder().encodeToString((id + ":" + key).getBytes(StandardCharsets.UTF_8)); + return base64ApiKeyKeyValue; + } + private void assertApiKeyNotCreated(Client client, String keyName) throws ExecutionException, InterruptedException { new RefreshRequestBuilder(client, RefreshAction.INSTANCE).setIndices(SECURITY_MAIN_ALIAS).execute().get(); assertEquals( diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java index 5a6c01a0a99b7..97bec81cf5ef7 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java @@ -19,8 +19,8 @@ import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.security.AuthenticateResponse; import org.elasticsearch.client.security.CreateTokenRequest; import org.elasticsearch.client.security.CreateTokenResponse; import org.elasticsearch.client.security.InvalidateTokenRequest; @@ -34,10 +34,12 @@ import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.SecuritySettingsSourceField; +import org.elasticsearch.test.TestSecurityClient; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.authc.TokenMetadata; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.index.RestrictedIndicesNames; +import org.elasticsearch.xpack.core.security.user.User; import org.junit.After; import org.junit.Before; @@ -58,6 +60,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasItem; @SuppressWarnings("removal") @@ -749,10 +752,10 @@ public void testCreateThenRefreshAsDifferentUser() throws IOException { assertNotEquals(refreshResponse.getAccessToken(), createTokenResponse.getAccessToken()); assertNotEquals(refreshResponse.getRefreshToken(), createTokenResponse.getRefreshToken()); - AuthenticateResponse response = restClient.security().authenticate(superuserOptions); + final Map authenticateResponse = getSecurityClient(superuserOptions).authenticate(); - assertEquals(SecuritySettingsSource.ES_TEST_ROOT_USER, response.getUser().getUsername()); - assertEquals("realm", response.getAuthenticationType()); + assertThat(authenticateResponse, hasEntry(User.Fields.USERNAME.getPreferredName(), SecuritySettingsSource.ES_TEST_ROOT_USER)); + assertThat(authenticateResponse, hasEntry(User.Fields.AUTHENTICATION_TYPE.getPreferredName(), "realm")); assertAuthenticateWithToken(createTokenResponse.getAccessToken(), SecuritySettingsSource.TEST_USER_NAME); assertAuthenticateWithToken(refreshResponse.getAccessToken(), SecuritySettingsSource.TEST_USER_NAME); @@ -838,31 +841,28 @@ private String generateInvalidShortAccessToken(Version version) throws Exception } private void assertAuthenticateWithToken(String accessToken, String expectedUser) throws IOException { - final RestHighLevelClient restClient = new TestRestHighLevelClient(); - AuthenticateResponse authResponse = restClient.security() - .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + accessToken).build()); - assertThat(authResponse.getUser().getUsername(), equalTo(expectedUser)); - assertThat(authResponse.getAuthenticationType(), equalTo("token")); + final TestSecurityClient securityClient = getSecurityClient(accessToken); + final Map authResponse = securityClient.authenticate(); + assertThat(authResponse, hasEntry(User.Fields.USERNAME.getPreferredName(), expectedUser)); + assertThat(authResponse, hasEntry(User.Fields.AUTHENTICATION_TYPE.getPreferredName(), "token")); } private void assertUnauthorizedToken(String accessToken) { - final RestHighLevelClient restClient = new TestRestHighLevelClient(); - ElasticsearchStatusException e = expectThrows( - ElasticsearchStatusException.class, - () -> restClient.security() - .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + accessToken).build()) - ); - assertThat(e.status(), equalTo(RestStatus.UNAUTHORIZED)); + final TestSecurityClient securityClient = getSecurityClient(accessToken); + ResponseException e = expectThrows(ResponseException.class, securityClient::authenticate); + assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.UNAUTHORIZED.getStatus())); + } + + private TestSecurityClient getSecurityClient(String accessToken) { + return getSecurityClient(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + accessToken).build()); } private RestStatus getAuthenticationResponseCode(String accessToken) throws IOException { - final RestHighLevelClient restClient = new TestRestHighLevelClient(); try { - restClient.security() - .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + accessToken).build()); + getSecurityClient(accessToken).authenticate(); return RestStatus.OK; - } catch (ElasticsearchStatusException esse) { - return esse.status(); + } catch (ResponseException esse) { + return RestStatus.fromCode(esse.getResponse().getStatusLine().getStatusCode()); } } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java index 63cca22082a37..f73153813877e 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmIntegTests.java @@ -9,12 +9,9 @@ import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.security.ChangePasswordRequest; -import org.elasticsearch.client.security.RefreshPolicy; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.NativeRealmIntegTestCase; -import org.elasticsearch.test.SecurityClientTestHelper; import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.user.APMSystemUser; import org.elasticsearch.xpack.core.security.user.BeatsSystemUser; @@ -30,7 +27,6 @@ import java.util.List; import static java.util.Collections.singletonMap; -import static org.elasticsearch.test.SecuritySettingsSource.SECURITY_REQUEST_OPTIONS; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; @@ -86,7 +82,6 @@ public void testAuthenticate() { * the reserved realm. */ public void testAuthenticateAfterEnablingUser() throws IOException { - final RestHighLevelClient restClient = new TestRestHighLevelClient(); final List usernames = Arrays.asList( ElasticUser.NAME, KibanaUser.NAME, @@ -97,7 +92,7 @@ public void testAuthenticateAfterEnablingUser() throws IOException { RemoteMonitoringUser.NAME ); for (String username : usernames) { - SecurityClientTestHelper.setUserEnabled(getRestClient(), username, true); + getSecurityClient().setUserEnabled(username, true); ClusterHealthResponse response = client().filterWithHeader( singletonMap("Authorization", basicAuthHeaderValue(username, getReservedPassword())) @@ -126,13 +121,7 @@ public void testChangingPassword() throws IOException { assertThat(response.getClusterName(), is(cluster().getClusterName())); } - final RestHighLevelClient restClient = new TestRestHighLevelClient(); - final boolean changed = restClient.security() - .changePassword( - new ChangePasswordRequest(username, Arrays.copyOf(newPassword, newPassword.length), RefreshPolicy.IMMEDIATE), - SECURITY_REQUEST_OPTIONS - ); - assertTrue(changed); + getSecurityClient().changePassword(username, new SecureString(Arrays.copyOf(newPassword, newPassword.length))); ElasticsearchSecurityException elasticsearchSecurityException = expectThrows( ElasticsearchSecurityException.class, @@ -159,7 +148,7 @@ public void testDisablingUser() throws Exception { assertThat(response.getClusterName(), is(cluster().getClusterName())); // disable user - SecurityClientTestHelper.setUserEnabled(getRestClient(), ElasticUser.NAME, false); + getSecurityClient().setUserEnabled(ElasticUser.NAME, false); ElasticsearchSecurityException elasticsearchSecurityException = expectThrows( ElasticsearchSecurityException.class, () -> client().filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(ElasticUser.NAME, getReservedPassword()))) @@ -171,7 +160,7 @@ public void testDisablingUser() throws Exception { assertThat(elasticsearchSecurityException.getMessage(), containsString("authenticate")); // enable - SecurityClientTestHelper.setUserEnabled(getRestClient(), ElasticUser.NAME, true); + getSecurityClient().setUserEnabled(ElasticUser.NAME, true); response = client().filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(ElasticUser.NAME, getReservedPassword()))) .admin() .cluster() diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthDelegationIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthDelegationIntegTests.java index f0749679888e5..0a94a43b92895 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthDelegationIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthDelegationIntegTests.java @@ -9,10 +9,9 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.security.AuthenticateResponse; -import org.elasticsearch.client.security.AuthenticateResponse.RealmInfo; import org.elasticsearch.client.security.DelegatePkiAuthenticationRequest; import org.elasticsearch.client.security.DelegatePkiAuthenticationResponse; import org.elasticsearch.client.security.DeleteRoleMappingRequest; @@ -21,15 +20,20 @@ import org.elasticsearch.client.security.PutRoleMappingRequest; import org.elasticsearch.client.security.RefreshPolicy; import org.elasticsearch.client.security.support.expressiondsl.fields.FieldRoleMapperExpression; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.SecurityIntegTestCase; import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.test.SecuritySettingsSourceField; +import org.elasticsearch.test.TestSecurityClient; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xcontent.ObjectPath; +import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.security.action.realm.ClearRealmCacheRequestBuilder; import org.elasticsearch.xpack.core.security.authc.support.Hasher; +import org.elasticsearch.xpack.core.security.user.User.Fields; import org.junit.Before; import java.io.InputStream; @@ -39,10 +43,14 @@ import java.security.cert.X509Certificate; import java.util.Arrays; import java.util.Collections; +import java.util.List; +import java.util.Map; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.emptyCollectionOf; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; @@ -179,15 +187,16 @@ public void testDelegateThenAuthenticate() throws Exception { // authenticate optionsBuilder = RequestOptions.DEFAULT.toBuilder(); optionsBuilder.addHeader("Authorization", "Bearer " + token); - AuthenticateResponse resp = restClient.security().authenticate(optionsBuilder.build()); - User user = resp.getUser(); - assertThat(user, is(notNullValue())); - assertThat(user.getUsername(), is("Elasticsearch Test Client")); - RealmInfo authnRealm = resp.getAuthenticationRealm(); - assertThat(authnRealm, is(notNullValue())); - assertThat(authnRealm.getName(), is("pki3")); - assertThat(authnRealm.getType(), is("pki")); - assertThat(resp.getAuthenticationType(), is("token")); + + final TestSecurityClient securityClient = getSecurityClient(optionsBuilder.build()); + final Map authenticateResponse = securityClient.authenticate(); + assertThat(authenticateResponse, hasEntry(Fields.USERNAME.getPreferredName(), "Elasticsearch Test Client")); + + Map realm = assertMap(authenticateResponse, Fields.AUTHENTICATION_REALM); + assertThat(realm, hasEntry(Fields.REALM_NAME.getPreferredName(), "pki3")); + assertThat(realm, hasEntry(Fields.REALM_TYPE.getPreferredName(), "pki")); + + assertThat(authenticateResponse, hasEntry(Fields.AUTHENTICATION_TYPE.getPreferredName(), "token")); } } } @@ -220,23 +229,25 @@ public void testTokenInvalidate() throws Exception { // authenticate optionsBuilder = RequestOptions.DEFAULT.toBuilder(); optionsBuilder.addHeader("Authorization", "Bearer " + token); - AuthenticateResponse resp = restClient.security().authenticate(optionsBuilder.build()); - User user = resp.getUser(); - assertThat(user, is(notNullValue())); - assertThat(user.getUsername(), is("Elasticsearch Test Client")); - assertThat(user.getMetadata().get("pki_dn"), is(notNullValue())); - assertThat(user.getMetadata().get("pki_dn"), is("O=org, OU=Elasticsearch, CN=Elasticsearch Test Client")); - assertThat(user.getMetadata().get("pki_delegated_by_user"), is(notNullValue())); - assertThat(user.getMetadata().get("pki_delegated_by_user"), is(delegateeUsername)); - assertThat(user.getMetadata().get("pki_delegated_by_realm"), is(notNullValue())); - assertThat(user.getMetadata().get("pki_delegated_by_realm"), is("file")); + final TestSecurityClient securityClient = getSecurityClient(optionsBuilder.build()); + final Map authenticateResponse = securityClient.authenticate(); + assertThat(authenticateResponse, hasEntry(Fields.USERNAME.getPreferredName(), "Elasticsearch Test Client")); + + final Map metadata = assertMap(authenticateResponse, Fields.METADATA); + assertThat(metadata, hasEntry("pki_dn", "O=org, OU=Elasticsearch, CN=Elasticsearch Test Client")); + assertThat(metadata, hasEntry("pki_delegated_by_user", delegateeUsername)); + assertThat(metadata, hasEntry("pki_delegated_by_realm", "file")); + // no roles because no role mappings - assertThat(user.getRoles(), is(emptyCollectionOf(String.class))); - RealmInfo authnRealm = resp.getAuthenticationRealm(); - assertThat(authnRealm, is(notNullValue())); - assertThat(authnRealm.getName(), is("pki3")); - assertThat(authnRealm.getType(), is("pki")); - assertThat(resp.getAuthenticationType(), is("token")); + List roles = assertList(authenticateResponse, Fields.ROLES); + assertThat(roles, empty()); + + Map realm = assertMap(authenticateResponse, Fields.AUTHENTICATION_REALM); + assertThat(realm, hasEntry(Fields.REALM_NAME.getPreferredName(), "pki3")); + assertThat(realm, hasEntry(Fields.REALM_TYPE.getPreferredName(), "pki")); + + assertThat(authenticateResponse, hasEntry(Fields.AUTHENTICATION_TYPE.getPreferredName(), "token")); + // invalidate InvalidateTokenRequest invalidateRequest = InvalidateTokenRequest.accessToken(token); optionsBuilder = RequestOptions.DEFAULT.toBuilder(); @@ -248,12 +259,19 @@ public void testTokenInvalidate() throws Exception { assertThat(invalidateResponse.getInvalidatedTokens(), is(1)); assertThat(invalidateResponse.getErrorsCount(), is(0)); // failed authenticate - ElasticsearchStatusException e1 = expectThrows( - ElasticsearchStatusException.class, - () -> restClient.security() - .authenticate(RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + token).build()) + ResponseException ex = expectThrows( + ResponseException.class, + () -> new TestSecurityClient( + getRestClient(), + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + token).build() + ).authenticate() ); - assertThat(e1.getMessage(), is("Elasticsearch exception [type=security_exception, reason=token expired]")); + + assertThat(ex.getResponse().getStatusLine().getStatusCode(), is(RestStatus.UNAUTHORIZED.getStatus())); + + final Map response = ESRestTestCase.entityAsMap(ex.getResponse()); + assertThat(ObjectPath.eval("error.type", response), is("security_exception")); + assertThat(ObjectPath.eval("error.reason", response), is("token expired")); } } @@ -336,26 +354,27 @@ public void testDelegatePkiWithRoleMapping() throws Exception { DelegatePkiAuthenticationResponse delegatePkiResponse = restClient.security() .delegatePkiAuthentication(delegatePkiRequest, testUserOptions); // authenticate - AuthenticateResponse resp = restClient.security() - .authenticate( - RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + delegatePkiResponse.getAccessToken()).build() - ); - User user = resp.getUser(); - assertThat(user, is(notNullValue())); - assertThat(user.getUsername(), is("Elasticsearch Test Client")); - assertThat(user.getMetadata().get("pki_dn"), is(notNullValue())); - assertThat(user.getMetadata().get("pki_dn"), is("O=org, OU=Elasticsearch, CN=Elasticsearch Test Client")); - assertThat(user.getMetadata().get("pki_delegated_by_user"), is(notNullValue())); - assertThat(user.getMetadata().get("pki_delegated_by_user"), is("test_user")); - assertThat(user.getMetadata().get("pki_delegated_by_realm"), is(notNullValue())); - assertThat(user.getMetadata().get("pki_delegated_by_realm"), is("file")); + TestSecurityClient securityClient = getSecurityClient( + RequestOptions.DEFAULT.toBuilder().addHeader("Authorization", "Bearer " + delegatePkiResponse.getAccessToken()).build() + ); + final Map authenticateResponse = securityClient.authenticate(); + assertThat(authenticateResponse, hasEntry(Fields.USERNAME.getPreferredName(), "Elasticsearch Test Client")); + + final Map metadata = assertMap(authenticateResponse, Fields.METADATA); + assertThat(metadata, hasEntry("pki_dn", "O=org, OU=Elasticsearch, CN=Elasticsearch Test Client")); + assertThat(metadata, hasEntry("pki_delegated_by_user", "test_user")); + assertThat(metadata, hasEntry("pki_delegated_by_realm", "file")); + // assert roles - assertThat(user.getRoles(), containsInAnyOrder("role_by_delegated_user", "role_by_delegated_realm")); - RealmInfo authnRealm = resp.getAuthenticationRealm(); - assertThat(authnRealm, is(notNullValue())); - assertThat(authnRealm.getName(), is("pki3")); - assertThat(authnRealm.getType(), is("pki")); - assertThat(resp.getAuthenticationType(), is("token")); + List roles = assertList(authenticateResponse, Fields.ROLES); + assertThat(roles, containsInAnyOrder("role_by_delegated_user", "role_by_delegated_realm")); + + Map realm = assertMap(authenticateResponse, Fields.AUTHENTICATION_REALM); + assertThat(realm, hasEntry(Fields.REALM_NAME.getPreferredName(), "pki3")); + assertThat(realm, hasEntry(Fields.REALM_TYPE.getPreferredName(), "pki")); + + assertThat(authenticateResponse, hasEntry(Fields.AUTHENTICATION_TYPE.getPreferredName(), "token")); + // delete role mappings for delegated PKI restClient.security() .deleteRoleMapping(new DeleteRoleMappingRequest("role_by_delegated_user", RefreshPolicy.IMMEDIATE), testUserOptions); @@ -364,6 +383,13 @@ public void testDelegatePkiWithRoleMapping() throws Exception { } } + private Object evaluate(Map map, ParseField... fields) { + for (int i = 0; i < fields.length - 1; i++) { + map = assertMap(map, fields[i]); + } + return map.get(fields[fields.length - 1]); + } + public void testIncorrectCertChain() throws Exception { X509Certificate clientCertificate = readCertForPkiDelegation("testClient.crt"); X509Certificate intermediateCA = readCertForPkiDelegation("testIntermediateCA.crt"); @@ -417,4 +443,16 @@ private X509Certificate readCertForPkiDelegation(String certName) throws Excepti } } + @SuppressWarnings("unchecked") + private Map assertMap(Map map, ParseField field) { + final Object val = map.get(field.getPreferredName()); + assertThat("Field " + field + " of " + map, val, instanceOf(Map.class)); + return (Map) val; + } + + private List assertList(Map map, ParseField field) { + final Object val = map.get(field.getPreferredName()); + assertThat("Field " + field + " of " + map, val, instanceOf(List.class)); + return (List) val; + } } diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SnapshotUserRoleIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SnapshotUserRoleIntegTests.java index 5fb12ce51946c..b924e0757dc40 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SnapshotUserRoleIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authz/SnapshotUserRoleIntegTests.java @@ -17,10 +17,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.security.DeleteRoleRequest; import org.elasticsearch.client.security.PutRoleRequest; -import org.elasticsearch.client.security.PutUserRequest; -import org.elasticsearch.client.security.PutUserResponse; import org.elasticsearch.client.security.RefreshPolicy; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.client.security.user.privileges.Role; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -31,7 +28,6 @@ import java.io.IOException; import java.util.Arrays; import java.util.Collections; -import java.util.List; import java.util.Locale; import static org.elasticsearch.test.SecuritySettingsSource.SECURITY_REQUEST_OPTIONS; @@ -72,12 +68,7 @@ public void setupClusterBeforeSnapshot() throws IOException { final char[] password = new char[] { 'p', 'a', 's', 's', 'w', 'o', 'r', 'd' }; final String snapshotUserToken = basicAuthHeaderValue(user, new SecureString(password)); client = client().filterWithHeader(Collections.singletonMap("Authorization", snapshotUserToken)); - PutUserResponse response = new TestRestHighLevelClient().security() - .putUser( - PutUserRequest.withPassword(new User(user, List.of("snapshot_user")), password, true, RefreshPolicy.IMMEDIATE), - SECURITY_REQUEST_OPTIONS - ); - assertTrue(response.isCreated()); + getSecurityClient().putUser(new org.elasticsearch.xpack.core.security.user.User(user, "snapshot_user"), new SecureString(password)); ensureGreen(INTERNAL_SECURITY_MAIN_INDEX_7); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityClientTestHelper.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityClientTestHelper.java deleted file mode 100644 index 5aa0df18e80d1..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityClientTestHelper.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.test; - -import org.apache.http.client.methods.HttpPut; -import org.elasticsearch.client.Request; -import org.elasticsearch.client.RestClient; - -import java.io.IOException; - -public class SecurityClientTestHelper { - - public static void setUserEnabled(RestClient client, String username, boolean enabled) throws IOException { - final String endpoint = "/_security/user/" + username + "/" + (enabled ? "_enable" : "_disable"); - final Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setOptions(SecuritySettingsSource.SECURITY_REQUEST_OPTIONS); - client.performRequest(request); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java index 466b85a839ffe..3b327240069a0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java @@ -17,6 +17,7 @@ import org.elasticsearch.action.admin.indices.get.GetIndexRequest; import org.elasticsearch.action.admin.indices.get.GetIndexResponse; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.node.NodeClient; @@ -75,6 +76,7 @@ public abstract class SecurityIntegTestCase extends ESIntegTestCase { * to how {@link ESIntegTestCase#nodeSettings(int, Settings)} works. */ private static CustomSecuritySettingsSource customSecuritySettingsSource = null; + private TestSecurityClient securityClient; @BeforeClass public static void generateBootstrapPassword() { @@ -469,4 +471,15 @@ public TestRestHighLevelClient() { super(getRestClient(), client -> {}, List.of()); } } + + protected TestSecurityClient getSecurityClient(RequestOptions requestOptions) { + return new TestSecurityClient(getRestClient(), requestOptions); + } + + protected TestSecurityClient getSecurityClient() { + if (securityClient == null) { + securityClient = getSecurityClient(SecuritySettingsSource.SECURITY_REQUEST_OPTIONS); + } + return securityClient; + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java new file mode 100644 index 0000000000000..2530c4d1675d4 --- /dev/null +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/TestSecurityClient.java @@ -0,0 +1,119 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.test; + +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.security.user.User; + +import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.test.rest.ESRestTestCase.entityAsMap; + +public class TestSecurityClient { + + private final RestClient client; + private final RequestOptions options; + + public TestSecurityClient(RestClient client) { + this(client, RequestOptions.DEFAULT); + } + + public TestSecurityClient(RestClient client, RequestOptions options) { + this.client = client; + this.options = options; + } + + /** + * Uses the REST API to retrieve the currently authenticated user. + * @see User.Fields + * @see org.elasticsearch.xpack.security.rest.action.RestAuthenticateAction + */ + public Map authenticate() throws IOException { + final String endpoint = "/_security/_authenticate"; + final Request request = new Request(HttpGet.METHOD_NAME, endpoint); + return entityAsMap(execute(request)); + } + + /** + * Uses the REST API to create a new user in the native realm. + * @see org.elasticsearch.xpack.security.rest.action.user.RestPutUserAction + */ + public void putUser(User user, SecureString password) throws IOException { + final String endpoint = "/_security/user/" + user.principal(); + final Request request = new Request(HttpPut.METHOD_NAME, endpoint); + final Map map = XContentTestUtils.convertToMap(user); + if (password != null) { + map.put("password", password.toString()); + } + final String body = toJson(map); + request.setJsonEntity(body); + request.addParameters(Map.of("refresh", "true")); + execute(request); + } + + /** + * Uses the REST API to delete a user from the native realm. + * @see org.elasticsearch.xpack.security.rest.action.user.RestDeleteUserAction + */ + public void deleteUser(String username) throws IOException { + final String endpoint = "/_security/user/" + username; + final Request request = new Request(HttpDelete.METHOD_NAME, endpoint); + request.addParameters(Map.of("refresh", "true")); + execute(request); + } + + /** + * Uses the REST API to change the password of a user in the native/reserverd realms. + * @see org.elasticsearch.xpack.security.rest.action.user.RestChangePasswordAction + */ + public void changePassword(String username, SecureString password) throws IOException { + final String endpoint = "/_security/user/" + username + "/_password"; + final Request request = new Request(HttpPost.METHOD_NAME, endpoint); + final String body = """ + { + "password": "%s" + } + """.formatted(password.toString()); + request.setJsonEntity(body); + execute(request); + } + + /** + * Uses the REST API to enable or disable a user in the native/reserved realm. + * @see org.elasticsearch.xpack.security.rest.action.user.RestSetEnabledAction + */ + public void setUserEnabled(String username, boolean enabled) throws IOException { + final String endpoint = "/_security/user/" + username + "/" + (enabled ? "_enable" : "_disable"); + final Request request = new Request(HttpPut.METHOD_NAME, endpoint); + execute(request); + } + + private static String toJson(Map map) throws IOException { + final XContentBuilder builder = XContentFactory.jsonBuilder().map(map); + final BytesReference bytes = BytesReference.bytes(builder); + return bytes.utf8ToString(); + } + + private Response execute(Request request) throws IOException { + request.setOptions(options); + return this.client.performRequest(request); + } + +} diff --git a/x-pack/qa/security-example-spi-extension/src/javaRestTest/java/org/elasticsearch/example/role/CustomRolesProviderIT.java b/x-pack/qa/security-example-spi-extension/src/javaRestTest/java/org/elasticsearch/example/role/CustomRolesProviderIT.java index 8b997decdf5cf..963e7e5341ee5 100644 --- a/x-pack/qa/security-example-spi-extension/src/javaRestTest/java/org/elasticsearch/example/role/CustomRolesProviderIT.java +++ b/x-pack/qa/security-example-spi-extension/src/javaRestTest/java/org/elasticsearch/example/role/CustomRolesProviderIT.java @@ -6,14 +6,12 @@ */ package org.elasticsearch.example.role; +import org.apache.http.client.methods.HttpPut; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.client.security.PutUserRequest; -import org.elasticsearch.client.security.RefreshPolicy; -import org.elasticsearch.client.security.user.User; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -24,7 +22,7 @@ import java.io.IOException; import java.util.Collections; -import java.util.List; +import java.util.Map; import static org.elasticsearch.example.role.CustomInMemoryRolesProvider.INDEX; import static org.elasticsearch.example.role.CustomInMemoryRolesProvider.ROLE_A; @@ -58,11 +56,19 @@ protected Settings restClientSettings() { } public void setupTestUser(String role) throws IOException { - new TestRestHighLevelClient().security() - .putUser( - PutUserRequest.withPassword(new User(TEST_USER, List.of(role)), TEST_PWD.toCharArray(), true, RefreshPolicy.IMMEDIATE), - RequestOptions.DEFAULT - ); + final String endpoint = "/_security/user/" + TEST_USER; + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + final String body = """ + { + "username": "%s", + "password": "%s", + "roles": [ "%s" ] + } + """.formatted(TEST_USER, TEST_PWD, role); + request.setJsonEntity(body); + request.addParameters(Map.of("refresh", "true")); + request.setOptions(RequestOptions.DEFAULT); + adminClient().performRequest(request); } public void testAuthorizedCustomRoleSucceeds() throws Exception { From 7d094c3e5a1a3b4b3dd122122654f6384bdf7325 Mon Sep 17 00:00:00 2001 From: Gordon Brown Date: Wed, 16 Feb 2022 18:04:00 -0700 Subject: [PATCH 138/167] Correct documentation regarding how to restore no `feature_states` (#83814) This commit corrects the snapshot creation and restoration docs to describe the usage of `"none"` to restore no feature states. Previously, they incorrectly stated that using an empty array would accomplish this, but specifying an empty array results in the default behavior (rather than preventing feature state snapshot/restoration). --- .../snapshot-restore/apis/create-snapshot-api.asciidoc | 5 +++-- .../snapshot-restore/apis/restore-snapshot-api.asciidoc | 6 ++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc index 7a134363b3c1b..3242fd2ee80f0 100644 --- a/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/create-snapshot-api.asciidoc @@ -132,8 +132,9 @@ If `include_global_state` is `true`, the snapshot includes all feature states by default. If `include_global_state` is `false`, the snapshot includes no feature states by default. + -To exclude all feature states, regardless of the `include_global_state` value, -specify an empty array (`[]`) or `none`. +Note that specifying an empty array will result in the default behavior. To +exclude all feature states, regardless of the `include_global_state` value, +specify an array with only the value `none` (`["none"]`). `metadata`:: (Optional, object) diff --git a/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc b/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc index e86434ac7a260..5c1b38e779880 100644 --- a/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/restore-snapshot-api.asciidoc @@ -182,8 +182,10 @@ state then the restore request will fail. + If `include_global_state` is `true`, the request restores all feature states in the snapshot by default. If `include_global_state` is `false`, the request -restores no feature states by default. To restore no feature states, regardless -of the `include_global_state` value, specify an empty array (`[]`). +restores no feature states by default. Note that specifying an empty array +will result in the default behavior. To restore no feature states, regardless +of the `include_global_state` value, specify an array containing only the value +`none` (`["none"]`). [[restore-snapshot-api-index-settings]] `index_settings`:: From fb65f95757dde90223a713c4a51da17c58c92751 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Thu, 17 Feb 2022 15:52:43 +1100 Subject: [PATCH 139/167] Improve BWC for persisted authentication headers (#83913) Authentication headers are persisted as part of a task definition including ML jobs, CCR following etc. The persistence process store them into either an index or the cluster state. In both cases, the headers are retrieved from ThreadContext as a string which is the serialised form of the Authentication object. This string is always serialised with the node's version. The problem is: In a mixed cluster, the task can be created in a newer node and persisted into an index but then needs to be loaded by a older node. The older node does not understand the newer format of the serialised Authentication object and hence error out on reading it. This PR adds additional logic in places where the headers are persisted. It compares the Authentication version with minNodeVersion and rewrites it if the minNodeVersion is older. Since we already filter security headers in places where headers are persisted, the new logic is hooked into the same places and essentially another enhancement on how to handle security headers for persisted tasks. Resolves: #83567 --- docs/changelog/83913.yaml | 6 ++ .../TransportSubmitAsyncSearchAction.java | 7 +- .../xpack/ccr/CcrLicenseChecker.java | 4 +- .../ccr/action/AutoFollowCoordinator.java | 2 +- .../ccr/action/ShardFollowTasksExecutor.java | 7 +- .../TransportPutAutoFollowPatternAction.java | 5 +- .../ccr/action/TransportPutFollowAction.java | 6 +- .../action/TransportResumeFollowAction.java | 5 +- .../xpack/core/ClientHelper.java | 93 +++++++++++++++++++ .../core/ml/datafeed/DatafeedUpdate.java | 8 +- .../xpack/core/ClientHelperTests.java | 93 +++++++++++++++++++ .../core/ml/datafeed/DatafeedUpdateTests.java | 34 +++++-- .../action/TransportPutLifecycleAction.java | 2 +- .../TransportPutSnapshotLifecycleAction.java | 2 +- .../integration/DataFrameAnalyticsCRUDIT.java | 3 +- .../DataFrameAnalyticsConfigProviderIT.java | 14 ++- .../integration/DatafeedConfigProviderIT.java | 28 ++++-- .../ml/integration/MlAutoUpdateServiceIT.java | 3 +- .../xpack/ml/MachineLearning.java | 5 +- ...nsportExplainDataFrameAnalyticsAction.java | 4 +- ...nsportPreviewDataFrameAnalyticsAction.java | 10 +- .../TransportPreviewDatafeedAction.java | 6 +- .../persistence/DatafeedConfigProvider.java | 13 ++- .../DataFrameAnalyticsConfigProvider.java | 20 +++- .../ql/async/AsyncTaskManagementService.java | 5 +- .../action/TransportPutRollupJobAction.java | 8 +- .../TransportPreviewTransformAction.java | 2 +- .../action/TransportPutTransformAction.java | 5 +- .../TransportUpdateTransformAction.java | 5 +- .../actions/TransportExecuteWatchAction.java | 9 +- .../actions/TransportPutWatchAction.java | 11 ++- .../actions/TransportPutWatchActionTests.java | 28 +++++- 32 files changed, 386 insertions(+), 67 deletions(-) create mode 100644 docs/changelog/83913.yaml diff --git a/docs/changelog/83913.yaml b/docs/changelog/83913.yaml new file mode 100644 index 0000000000000..65bf661793383 --- /dev/null +++ b/docs/changelog/83913.yaml @@ -0,0 +1,6 @@ +pr: 83913 +summary: Improve BWC for persisted authentication headers +area: Authentication +type: enhancement +issues: + - 83567 diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java index b7d6240be0dc2..1c9e40f4cfda1 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java @@ -43,6 +43,7 @@ import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; public class TransportSubmitAsyncSearchAction extends HandledTransportAction { + private final ClusterService clusterService; private final NodeClient nodeClient; private final BiFunction, SearchRequest, AggregationReduceContext> requestToAggReduceContextBuilder; private final TransportSearchAction searchAction; @@ -62,6 +63,7 @@ public TransportSubmitAsyncSearchAction( BigArrays bigArrays ) { super(SubmitAsyncSearchAction.NAME, transportService, actionFilters, SubmitAsyncSearchRequest::new); + this.clusterService = clusterService; this.nodeClient = nodeClient; this.requestToAggReduceContextBuilder = (task, request) -> searchService.aggReduceContextBuilder(task, request).forFinalReduction(); this.searchAction = searchAction; @@ -144,7 +146,10 @@ public void onFailure(Exception exc) { private SearchRequest createSearchRequest(SubmitAsyncSearchRequest request, Task submitTask, TimeValue keepAlive) { String docID = UUIDs.randomBase64UUID(); - Map originHeaders = ClientHelper.filterSecurityHeaders(nodeClient.threadPool().getThreadContext().getHeaders()); + Map originHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + nodeClient.threadPool().getThreadContext(), + clusterService.state() + ); SearchRequest searchRequest = new SearchRequest(request.getSearchRequest()) { @Override public AsyncSearchTask createTask(long id, String type, String action, TaskId parentTaskId, Map taskHeaders) { diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java index 9448a8898b4e6..c9af16f1d4542 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/CcrLicenseChecker.java @@ -393,11 +393,11 @@ User getUser(final Client remoteClient) { return securityContext.getUser(); } - public static Client wrapClient(Client client, Map headers) { + public static Client wrapClient(Client client, Map headers, ClusterState clusterState) { if (headers.isEmpty()) { return client; } else { - Map filteredHeaders = ClientHelper.filterSecurityHeaders(headers); + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders(headers, clusterState); if (filteredHeaders.isEmpty()) { return client; } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java index 29a267f6fd78a..7c85c7afb72f0 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/AutoFollowCoordinator.java @@ -304,7 +304,7 @@ void createAndFollow( Runnable successHandler, Consumer failureHandler ) { - Client followerClient = CcrLicenseChecker.wrapClient(client, headers); + Client followerClient = CcrLicenseChecker.wrapClient(client, headers, clusterService.state()); followerClient.execute( PutFollowAction.INSTANCE, request, diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java index 2fcc177798b0e..fe977216b4c0c 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/ShardFollowTasksExecutor.java @@ -150,7 +150,7 @@ protected AllocatedPersistentTask createTask( Map headers ) { ShardFollowTask params = taskInProgress.getParams(); - Client followerClient = wrapClient(client, params.getHeaders()); + Client followerClient = wrapClient(client, params.getHeaders(), clusterService.state()); BiConsumer scheduler = (delay, command) -> threadPool.scheduleUnlessShuttingDown( delay, Ccr.CCR_THREAD_POOL_NAME, @@ -562,7 +562,8 @@ private String getLeaderShardHistoryUUID(ShardFollowTask params) { } private Client remoteClient(ShardFollowTask params) { - return wrapClient(client.getRemoteClusterClient(params.getRemoteCluster()), params.getHeaders()); + // TODO: do we need minNodeVersion here since it is for remote cluster + return wrapClient(client.getRemoteClusterClient(params.getRemoteCluster()), params.getHeaders(), clusterService.state()); } interface FollowerStatsInfoHandler { @@ -571,7 +572,7 @@ interface FollowerStatsInfoHandler { @Override protected void nodeOperation(final AllocatedPersistentTask task, final ShardFollowTask params, final PersistentTaskState state) { - Client followerClient = wrapClient(client, params.getHeaders()); + Client followerClient = wrapClient(client, params.getHeaders(), clusterService.state()); ShardFollowNodeTask shardFollowNodeTask = (ShardFollowNodeTask) task; logger.info("{} Starting to track leader shard {}", params.getFollowShardId(), params.getLeaderShardId()); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java index b4fbe29d2a45e..fa636eeb5d5c8 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutAutoFollowPatternAction.java @@ -95,7 +95,10 @@ protected void masterOperation( return; } final Client remoteClient = client.getRemoteClusterClient(request.getRemoteCluster()); - final Map filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + final Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + threadPool.getThreadContext(), + clusterService.state() + ); Consumer consumer = remoteClusterState -> { String[] indices = request.getLeaderIndexPatterns().toArray(new String[0]); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index e6053ce1ff818..d37dd4b237873 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -197,7 +197,11 @@ private void createFollowerIndex( .masterNodeTimeout(request.masterNodeTimeout()) .indexSettings(overrideSettings); - final Client clientWithHeaders = CcrLicenseChecker.wrapClient(this.client, threadPool.getThreadContext().getHeaders()); + final Client clientWithHeaders = CcrLicenseChecker.wrapClient( + this.client, + threadPool.getThreadContext().getHeaders(), + clusterService.state() + ); threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(new AbstractRunnable() { @Override diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java index f1367a29b31fc..bd89f833a4ad9 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportResumeFollowAction.java @@ -179,7 +179,10 @@ void start( validate(request, leaderIndexMetadata, followIndexMetadata, leaderIndexHistoryUUIDs, mapperService); final int numShards = followIndexMetadata.getNumberOfShards(); final ResponseHandler handler = new ResponseHandler(numShards, listener); - Map filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + threadPool.getThreadContext(), + clusterService.state() + ); for (int shardId = 0; shardId < numShards; shardId++) { String taskId = followIndexMetadata.getIndexUUID() + "-" + shardId; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java index 08e68ed94097f..d1a1791998bcf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ClientHelper.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; @@ -14,12 +15,19 @@ import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.core.CheckedFunction; +import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField; +import org.elasticsearch.xpack.core.security.authc.support.AuthenticationContextSerializer; import org.elasticsearch.xpack.core.security.authc.support.SecondaryAuthentication; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Set; @@ -77,6 +85,89 @@ public static Map filterSecurityHeaders(Map head } } + /** + * In addition to {@link #filterSecurityHeaders}, also check the version of Authentication objects + * and rewrite them using minNodeVersion so that they are safe to be persisted as index data + * and loaded by all nodes in the cluster. + */ + public static Map getPersistableSafeSecurityHeaders(ThreadContext threadContext, ClusterState clusterState) { + return maybeRewriteAuthenticationHeadersForVersion( + filterSecurityHeaders(threadContext.getHeaders()), + key -> new AuthenticationContextSerializer(key).readFromContext(threadContext), + clusterState.nodes().getMinNodeVersion() + ); + } + + /** + * Similar to {@link #getPersistableSafeSecurityHeaders(ThreadContext, ClusterState)}, + * but works on a Map of headers instead of ThreadContext. + */ + public static Map getPersistableSafeSecurityHeaders(Map headers, ClusterState clusterState) { + final CheckedFunction authenticationReader = key -> { + final String authHeader = headers.get(key); + return authHeader == null ? null : AuthenticationContextSerializer.decode(authHeader); + }; + return maybeRewriteAuthenticationHeadersForVersion( + filterSecurityHeaders(headers), + authenticationReader, + clusterState.nodes().getMinNodeVersion() + ); + } + + private static Map maybeRewriteAuthenticationHeadersForVersion( + Map filteredHeaders, + CheckedFunction authenticationReader, + Version minNodeVersion + ) { + Map newHeaders = null; + + final String authHeader = maybeRewriteSingleAuthenticationHeaderForVersion( + authenticationReader, + AuthenticationField.AUTHENTICATION_KEY, + minNodeVersion + ); + if (authHeader != null) { + newHeaders = new HashMap<>(); + newHeaders.put(AuthenticationField.AUTHENTICATION_KEY, authHeader); + } + + final String secondaryHeader = maybeRewriteSingleAuthenticationHeaderForVersion( + authenticationReader, + SecondaryAuthentication.THREAD_CTX_KEY, + minNodeVersion + ); + if (secondaryHeader != null) { + if (newHeaders == null) { + newHeaders = new HashMap<>(); + } + newHeaders.put(SecondaryAuthentication.THREAD_CTX_KEY, secondaryHeader); + } + + if (newHeaders != null) { + final HashMap mutableHeaders = new HashMap<>(filteredHeaders); + mutableHeaders.putAll(newHeaders); + return Map.copyOf(mutableHeaders); + } else { + return filteredHeaders; + } + } + + private static String maybeRewriteSingleAuthenticationHeaderForVersion( + CheckedFunction authenticationReader, + String authenticationHeaderKey, + Version minNodeVersion + ) { + try { + final Authentication authentication = authenticationReader.apply(authenticationHeaderKey); + if (authentication != null && authentication.getVersion().after(minNodeVersion)) { + return authentication.maybeRewriteForOlderVersion(minNodeVersion).encode(); + } + } catch (IOException e) { + throw new UncheckedIOException("failed to read authentication with key [" + authenticationHeaderKey + "]", e); + } + return null; + } + /** * . * @deprecated use ThreadContext.ACTION_ORIGIN_TRANSIENT_NAME @@ -167,6 +258,7 @@ public static T executeWithHeaders( Client client, Supplier supplier ) { + // No need to rewrite authentication header because it will be handled by Security Interceptor Map filteredHeaders = filterSecurityHeaders(headers); // no security headers, we will have to use the xpack internal user for @@ -206,6 +298,7 @@ public static v Request request, ActionListener listener ) { + // No need to rewrite authentication header because it will be handled by Security Interceptor final Map filteredHeaders = filterSecurityHeaders(headers); final ThreadContext threadContext = client.threadPool().getThreadContext(); // No headers (e.g. security not installed/in use) so execute as origin diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java index 2541a2c5c5cad..69fc5edf111bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -22,6 +23,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -38,8 +40,6 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.core.ClientHelper.filterSecurityHeaders; - /** * A datafeed update contains partial properties to update a {@link DatafeedConfig}. * The main difference between this class and {@link DatafeedConfig} is that here all @@ -334,7 +334,7 @@ public IndicesOptions getIndicesOptions() { * Applies the update to the given {@link DatafeedConfig} * @return a new {@link DatafeedConfig} that contains the update */ - public DatafeedConfig apply(DatafeedConfig datafeedConfig, Map headers) { + public DatafeedConfig apply(DatafeedConfig datafeedConfig, Map headers, ClusterState clusterState) { if (id.equals(datafeedConfig.getId()) == false) { throw new IllegalArgumentException("Cannot apply update to datafeedConfig with different id"); } @@ -384,7 +384,7 @@ public DatafeedConfig apply(DatafeedConfig datafeedConfig, Map h builder.setRuntimeMappings(runtimeMappings); } if (headers.isEmpty() == false) { - builder.setHeaders(filterSecurityHeaders(headers)); + builder.setHeaders(ClientHelper.getPersistableSafeSecurityHeaders(headers, clusterState)); } return builder.build(); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java index c272eaeb9172b..bea688e9784de 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.health.ClusterHealthAction; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; @@ -16,16 +17,24 @@ import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationField; import org.elasticsearch.xpack.core.security.authc.AuthenticationServiceField; +import org.elasticsearch.xpack.core.security.authc.support.AuthenticationContextSerializer; +import org.elasticsearch.xpack.core.security.authc.support.SecondaryAuthentication; +import org.elasticsearch.xpack.core.security.user.User; +import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -37,6 +46,7 @@ import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; @@ -372,4 +382,87 @@ public void testFilterSecurityHeaders() { expectThrows(NullPointerException.class, () -> ClientHelper.filterSecurityHeaders(null)); } } + + public void testGetPersistableSafeSecurityHeaders() throws IOException { + final ClusterState clusterState = mock(ClusterState.class); + final DiscoveryNodes discoveryNodes = mock(DiscoveryNodes.class); + when(clusterState.nodes()).thenReturn(discoveryNodes); + when(discoveryNodes.getMinNodeVersion()).thenReturn(VersionUtils.randomPreviousCompatibleVersion(random(), Version.CURRENT)); + // No security header + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + final String nonSecurityHeaderKey = "not-a-security-header"; + if (randomBoolean()) { + threadContext.putHeader(nonSecurityHeaderKey, randomAlphaOfLength(8)); + } + assertThat(ClientHelper.getPersistableSafeSecurityHeaders(threadContext, clusterState), anEmptyMap()); + + final boolean hasRunAsHeader = randomBoolean(); + if (hasRunAsHeader) { + threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as_header"); + } + + final Authentication authentication = Authentication.newRealmAuthentication( + new User(randomAlphaOfLength(8)), + new Authentication.RealmRef("name", "type", "node") + ); + + final boolean hasAuthHeader = randomBoolean(); + // There maybe a secondary header + final boolean hasSecondaryAuthHeader = randomFrom(hasAuthHeader == false, true); + if (hasAuthHeader) { + new AuthenticationContextSerializer().writeToContext(authentication, threadContext); + } + if (hasSecondaryAuthHeader) { + new AuthenticationContextSerializer(SecondaryAuthentication.THREAD_CTX_KEY).writeToContext(authentication, threadContext); + } + + // No rewriting for current version + when(discoveryNodes.getMinNodeVersion()).thenReturn(Version.CURRENT); + final Map headers1; + if (randomBoolean()) { + headers1 = ClientHelper.getPersistableSafeSecurityHeaders(threadContext, clusterState); + } else { + headers1 = ClientHelper.getPersistableSafeSecurityHeaders(threadContext.getHeaders(), clusterState); + } + assertThat(headers1, not(hasKey(nonSecurityHeaderKey))); + if (hasAuthHeader) { + assertThat(headers1, hasKey(AuthenticationField.AUTHENTICATION_KEY)); + assertThat( + headers1.get(AuthenticationField.AUTHENTICATION_KEY), + equalTo(threadContext.getHeader(AuthenticationField.AUTHENTICATION_KEY)) + ); + } + if (hasSecondaryAuthHeader) { + assertThat(headers1, hasKey(SecondaryAuthentication.THREAD_CTX_KEY)); + assertThat( + headers1.get(SecondaryAuthentication.THREAD_CTX_KEY), + equalTo(threadContext.getHeader(SecondaryAuthentication.THREAD_CTX_KEY)) + ); + } + + // Rewritten for older version + final Version previousVersion = VersionUtils.randomPreviousCompatibleVersion(random(), Version.CURRENT); + when(discoveryNodes.getMinNodeVersion()).thenReturn(previousVersion); + final Map headers2; + if (randomBoolean()) { + headers2 = ClientHelper.getPersistableSafeSecurityHeaders(threadContext, clusterState); + } else { + headers2 = ClientHelper.getPersistableSafeSecurityHeaders(threadContext.getHeaders(), clusterState); + } + assertThat(headers2, not(hasKey(nonSecurityHeaderKey))); + if (hasAuthHeader) { + final Authentication rewrittenAuth = AuthenticationContextSerializer.decode( + headers2.get(AuthenticationField.AUTHENTICATION_KEY) + ); + assertThat(rewrittenAuth.getVersion(), equalTo(previousVersion)); + assertThat(rewrittenAuth.getUser(), equalTo(authentication.getUser())); + } + if (hasSecondaryAuthHeader) { + final Authentication rewrittenSecondaryAuth = AuthenticationContextSerializer.decode( + headers2.get(SecondaryAuthentication.THREAD_CTX_KEY) + ); + assertThat(rewrittenSecondaryAuth.getVersion(), equalTo(previousVersion)); + assertThat(rewrittenSecondaryAuth.getUser(), equalTo(authentication.getUser())); + } + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java index 8c820737fda9c..ecb67833eacd6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java @@ -10,6 +10,8 @@ import org.elasticsearch.Version; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; @@ -47,6 +49,7 @@ import org.elasticsearch.xpack.core.ml.job.config.JobTests; import org.elasticsearch.xpack.core.ml.utils.QueryProvider; import org.elasticsearch.xpack.core.ml.utils.XContentObjectTransformer; +import org.junit.Before; import java.io.IOException; import java.time.ZoneOffset; @@ -63,9 +66,21 @@ import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class DatafeedUpdateTests extends AbstractSerializingTestCase { + private ClusterState clusterState; + + @Before + public void init() { + clusterState = mock(ClusterState.class); + final DiscoveryNodes discoveryNodes = mock(DiscoveryNodes.class); + when(clusterState.nodes()).thenReturn(discoveryNodes); + when(discoveryNodes.getMinNodeVersion()).thenReturn(Version.CURRENT); + } + @Override protected DatafeedUpdate createTestInstance() { return createRandomized(DatafeedConfigTests.randomValidDatafeedId()); @@ -210,20 +225,20 @@ public void testMultipleDefinedAggParse() throws IOException { public void testApply_failBecauseTargetDatafeedHasDifferentId() { DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig("foo"); - expectThrows(IllegalArgumentException.class, () -> createRandomized(datafeed.getId() + "_2").apply(datafeed, null)); + expectThrows(IllegalArgumentException.class, () -> createRandomized(datafeed.getId() + "_2").apply(datafeed, null, clusterState)); } public void testApply_failBecauseJobIdChanged() { DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig("foo"); DatafeedUpdate datafeedUpdateWithUnchangedJobId = new DatafeedUpdate.Builder(datafeed.getId()).setJobId("foo").build(); - DatafeedConfig updatedDatafeed = datafeedUpdateWithUnchangedJobId.apply(datafeed, Collections.emptyMap()); + DatafeedConfig updatedDatafeed = datafeedUpdateWithUnchangedJobId.apply(datafeed, Collections.emptyMap(), clusterState); assertThat(updatedDatafeed, equalTo(datafeed)); DatafeedUpdate datafeedUpdateWithChangedJobId = new DatafeedUpdate.Builder(datafeed.getId()).setJobId("bar").build(); ElasticsearchStatusException ex = expectThrows( ElasticsearchStatusException.class, - () -> datafeedUpdateWithChangedJobId.apply(datafeed, Collections.emptyMap()) + () -> datafeedUpdateWithChangedJobId.apply(datafeed, Collections.emptyMap(), clusterState) ); assertThat(ex.status(), equalTo(RestStatus.BAD_REQUEST)); assertThat(ex.getMessage(), equalTo(DatafeedUpdate.ERROR_MESSAGE_ON_JOB_ID_UPDATE)); @@ -231,7 +246,8 @@ public void testApply_failBecauseJobIdChanged() { public void testApply_givenEmptyUpdate() { DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig("foo"); - DatafeedConfig updatedDatafeed = new DatafeedUpdate.Builder(datafeed.getId()).build().apply(datafeed, Collections.emptyMap()); + DatafeedConfig updatedDatafeed = new DatafeedUpdate.Builder(datafeed.getId()).build() + .apply(datafeed, Collections.emptyMap(), clusterState); assertThat(datafeed, equalTo(updatedDatafeed)); } @@ -242,7 +258,7 @@ public void testApply_givenPartialUpdate() { DatafeedUpdate.Builder updated = new DatafeedUpdate.Builder(datafeed.getId()); updated.setScrollSize(datafeed.getScrollSize() + 1); - DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap()); + DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap(), clusterState); DatafeedConfig.Builder expectedDatafeed = new DatafeedConfig.Builder(datafeed); expectedDatafeed.setScrollSize(datafeed.getScrollSize() + 1); @@ -270,7 +286,7 @@ public void testApply_givenFullUpdateNoAggregations() { field.put("updated_runtime_field_foo", settings); update.setRuntimeMappings(field); - DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap()); + DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap(), clusterState); assertThat(updatedDatafeed.getJobId(), equalTo("foo-feed")); assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_2"))); @@ -303,7 +319,7 @@ public void testApply_givenAggregations() throws IOException { ); update.setAggregations(aggProvider); - DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap()); + DatafeedConfig updatedDatafeed = update.build().apply(datafeed, Collections.emptyMap(), clusterState); assertThat(updatedDatafeed.getIndices(), equalTo(Collections.singletonList("i_1"))); assertThat(updatedDatafeed.getParsedAggregations(xContentRegistry()), equalTo(aggProvider.getParsedAggs())); @@ -314,7 +330,7 @@ public void testApply_givenIndicesOptions() { DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig("foo"); DatafeedConfig updatedDatafeed = new DatafeedUpdate.Builder(datafeed.getId()).setIndicesOptions( IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN - ).build().apply(datafeed, Collections.emptyMap()); + ).build().apply(datafeed, Collections.emptyMap(), clusterState); assertThat(datafeed.getIndicesOptions(), is(not(equalTo(updatedDatafeed.getIndicesOptions())))); assertThat(updatedDatafeed.getIndicesOptions(), equalTo(IndicesOptions.LENIENT_EXPAND_OPEN_HIDDEN)); } @@ -332,7 +348,7 @@ public void testApply_GivenRandomUpdates_AssertImmutability() { update = createRandomized(datafeed.getId(), datafeed); } - DatafeedConfig updatedDatafeed = update.apply(datafeed, Collections.emptyMap()); + DatafeedConfig updatedDatafeed = update.apply(datafeed, Collections.emptyMap(), clusterState); assertThat("update was " + update, datafeed, not(equalTo(updatedDatafeed))); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportPutLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportPutLifecycleAction.java index a2d9e01ae1111..9d4076e2d4f95 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportPutLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/TransportPutLifecycleAction.java @@ -96,7 +96,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A // REST layer and the Transport layer here must be accessed within this thread and not in the // cluster state thread in the ClusterStateUpdateTask below since that thread does not share the // same context, and therefore does not have access to the appropriate security headers. - Map filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders(threadPool.getThreadContext(), state); LifecyclePolicy.validatePolicyName(request.getPolicy().getName()); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/TransportPutSnapshotLifecycleAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/TransportPutSnapshotLifecycleAction.java index 745c715de3d14..a2955fe118d91 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/TransportPutSnapshotLifecycleAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/slm/action/TransportPutSnapshotLifecycleAction.java @@ -80,7 +80,7 @@ protected void masterOperation( // REST layer and the Transport layer here must be accessed within this thread and not in the // cluster state thread in the ClusterStateUpdateTask below since that thread does not share the // same context, and therefore does not have access to the appropriate security headers. - final Map filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + final Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders(threadPool.getThreadContext(), state); LifecyclePolicy.validatePolicyName(request.getLifecycleId()); clusterService.submitStateUpdateTask( "put-snapshot-lifecycle-" + request.getLifecycleId(), diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java index aa1a2f0f35fdf..2f065bf681aaf 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsCRUDIT.java @@ -40,7 +40,8 @@ public void createComponents() throws Exception { configProvider = new DataFrameAnalyticsConfigProvider( client(), xContentRegistry(), - new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class)) + new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class)), + getInstanceFromNode(ClusterService.class) ); waitForMlTemplates(); } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java index 0248ec0a8df8a..34e04b586dbf2 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DataFrameAnalyticsConfigProviderIT.java @@ -25,6 +25,8 @@ import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfigUpdate; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsState; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsTaskState; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; import org.elasticsearch.xpack.ml.dataframe.persistence.DataFrameAnalyticsConfigProvider; import org.elasticsearch.xpack.ml.notifications.DataFrameAnalyticsAuditor; @@ -46,14 +48,20 @@ public class DataFrameAnalyticsConfigProviderIT extends MlSingleNodeTestCase { private static final TimeValue TIMEOUT = TimeValue.timeValueSeconds(5); private DataFrameAnalyticsConfigProvider configProvider; + private String dummyAuthenticationHeader; @Before public void createComponents() throws Exception { configProvider = new DataFrameAnalyticsConfigProvider( client(), xContentRegistry(), - new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class)) + new DataFrameAnalyticsAuditor(client(), getInstanceFromNode(ClusterService.class)), + getInstanceFromNode(ClusterService.class) ); + dummyAuthenticationHeader = Authentication.newRealmAuthentication( + new User("dummy"), + new Authentication.RealmRef("name", "type", "node") + ).encode(); waitForMlTemplates(); } @@ -97,7 +105,7 @@ public void testPutAndGet() throws InterruptedException { public void testPutAndGet_WithSecurityHeaders() throws InterruptedException { String configId = "config-id"; DataFrameAnalyticsConfig config = DataFrameAnalyticsConfigTests.createRandom(configId); - Map securityHeaders = Collections.singletonMap("_xpack_security_authentication", "dummy"); + Map securityHeaders = Collections.singletonMap("_xpack_security_authentication", dummyAuthenticationHeader); { // Put the config and verify the response AtomicReference configHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); @@ -275,7 +283,7 @@ public void testUpdate() throws Exception { ); } { // Update that applies security headers - Map securityHeaders = Collections.singletonMap("_xpack_security_authentication", "dummy"); + Map securityHeaders = Collections.singletonMap("_xpack_security_authentication", dummyAuthenticationHeader); AtomicReference updatedConfigHolder = new AtomicReference<>(); AtomicReference exceptionHolder = new AtomicReference<>(); diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java index 6e5c77e073657..1202d61b674ab 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/DatafeedConfigProviderIT.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.ClientHelper; @@ -20,6 +21,10 @@ import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedUpdate; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.AuthenticationField; +import org.elasticsearch.xpack.core.security.authc.support.SecondaryAuthentication; +import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider; import org.hamcrest.core.IsInstanceOf; @@ -45,6 +50,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; @@ -53,11 +59,16 @@ public class DatafeedConfigProviderIT extends MlSingleNodeTestCase { private DatafeedConfigProvider datafeedConfigProvider; + private String dummyAuthenticationHeader; @Before public void createComponents() throws Exception { - datafeedConfigProvider = new DatafeedConfigProvider(client(), xContentRegistry()); + datafeedConfigProvider = new DatafeedConfigProvider(client(), xContentRegistry(), getInstanceFromNode(ClusterService.class)); waitForMlTemplates(); + dummyAuthenticationHeader = Authentication.newRealmAuthentication( + new User("dummy"), + new Authentication.RealmRef("name", "type", "node") + ).encode(); } public void testCrud() throws InterruptedException { @@ -94,10 +105,7 @@ public void testCrud() throws InterruptedException { DatafeedUpdate.Builder update = new DatafeedUpdate.Builder(datafeedId); List updateIndices = Collections.singletonList("a-different-index"); update.setIndices(updateIndices); - Map updateHeaders = new HashMap<>(); - // Only security headers are updated, grab the first one - String securityHeader = ClientHelper.SECURITY_HEADER_FILTERS.iterator().next(); - updateHeaders.put(securityHeader, "CHANGED"); + Map updateHeaders = createSecurityHeader(); AtomicReference configHolder = new AtomicReference<>(); blockingCall( @@ -113,7 +121,7 @@ public void testCrud() throws InterruptedException { ); assertNull(exceptionHolder.get()); assertThat(configHolder.get().getIndices(), equalTo(updateIndices)); - assertThat(configHolder.get().getHeaders().get(securityHeader), equalTo("CHANGED")); + updateHeaders.forEach((key, value) -> assertThat(configHolder.get().getHeaders(), hasEntry(key, value))); // Read the updated config configBuilderHolder.set(null); @@ -124,7 +132,7 @@ public void testCrud() throws InterruptedException { ); assertNull(exceptionHolder.get()); assertThat(configBuilderHolder.get().build().getIndices(), equalTo(updateIndices)); - assertThat(configBuilderHolder.get().build().getHeaders().get(securityHeader), equalTo("CHANGED")); + updateHeaders.forEach((key, value) -> assertThat(configHolder.get().getHeaders(), hasEntry(key, value))); // Delete AtomicReference deleteResponseHolder = new AtomicReference<>(); @@ -498,7 +506,11 @@ private Map createSecurityHeader() { Map headers = new HashMap<>(); // Only security headers are updated, grab the first one String securityHeader = ClientHelper.SECURITY_HEADER_FILTERS.iterator().next(); - headers.put(securityHeader, "SECURITY_"); + if (Set.of(AuthenticationField.AUTHENTICATION_KEY, SecondaryAuthentication.THREAD_CTX_KEY).contains(securityHeader)) { + headers.put(securityHeader, dummyAuthenticationHeader); + } else { + headers.put(securityHeader, "SECURITY_"); + } return headers; } diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlAutoUpdateServiceIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlAutoUpdateServiceIT.java index 317740efe4850..a0f72d738642e 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlAutoUpdateServiceIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlAutoUpdateServiceIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.xcontent.XContentType; @@ -41,7 +42,7 @@ public class MlAutoUpdateServiceIT extends MlSingleNodeTestCase { @Before public void createComponents() throws Exception { - datafeedConfigProvider = new DatafeedConfigProvider(client(), xContentRegistry()); + datafeedConfigProvider = new DatafeedConfigProvider(client(), xContentRegistry(), getInstanceFromNode(ClusterService.class)); waitForMlTemplates(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index b775591d152ab..d0138f49a8591 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -786,7 +786,7 @@ public Collection createComponents( anomalyDetectionAuditor ); JobConfigProvider jobConfigProvider = new JobConfigProvider(client, xContentRegistry); - DatafeedConfigProvider datafeedConfigProvider = new DatafeedConfigProvider(client, xContentRegistry); + DatafeedConfigProvider datafeedConfigProvider = new DatafeedConfigProvider(client, xContentRegistry, clusterService); this.datafeedConfigProvider.set(datafeedConfigProvider); UpdateJobProcessNotifier notifier = new UpdateJobProcessNotifier(client, clusterService, threadPool); JobManager jobManager = new JobManager( @@ -969,7 +969,8 @@ public Collection createComponents( DataFrameAnalyticsConfigProvider dataFrameAnalyticsConfigProvider = new DataFrameAnalyticsConfigProvider( client, xContentRegistry, - dataFrameAnalyticsAuditor + dataFrameAnalyticsAuditor, + clusterService ); assert client instanceof NodeClient; DataFrameAnalyticsManager dataFrameAnalyticsManager = new DataFrameAnalyticsManager( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java index 777b0c2c9ad3c..e4dfd64fd8dea 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportExplainDataFrameAnalyticsAction.java @@ -27,6 +27,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ml.MachineLearningField; @@ -48,7 +49,6 @@ import java.util.Objects; import java.util.Optional; -import static org.elasticsearch.xpack.core.ClientHelper.filterSecurityHeaders; import static org.elasticsearch.xpack.ml.utils.SecondaryAuthorizationUtils.useSecondaryAuthIfAvailable; /** @@ -134,7 +134,7 @@ private void explain( // Set the auth headers (preferring the secondary headers) to the caller's. // Regardless if the config was previously stored or not. DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder(request.getConfig()).setHeaders( - filterSecurityHeaders(threadPool.getThreadContext().getHeaders()) + ClientHelper.getPersistableSafeSecurityHeaders(threadPool.getThreadContext(), clusterService.state()) ).build(); extractedFieldsDetectorFactory.createFromSource( config, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java index 6634d6c1a4425..80cd62b4831e4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.LicenseUtils; @@ -18,6 +19,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ml.MachineLearningField; @@ -37,7 +39,6 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; -import static org.elasticsearch.xpack.core.ClientHelper.filterSecurityHeaders; import static org.elasticsearch.xpack.ml.utils.SecondaryAuthorizationUtils.useSecondaryAuthIfAvailable; /** @@ -50,6 +51,7 @@ public class TransportPreviewDataFrameAnalyticsAction extends HandledTransportAc private final SecurityContext securityContext; private final ThreadPool threadPool; private final Settings settings; + private final ClusterService clusterService; @Inject public TransportPreviewDataFrameAnalyticsAction( @@ -58,7 +60,8 @@ public TransportPreviewDataFrameAnalyticsAction( NodeClient client, XPackLicenseState licenseState, Settings settings, - ThreadPool threadPool + ThreadPool threadPool, + ClusterService clusterService ) { super(PreviewDataFrameAnalyticsAction.NAME, transportService, actionFilters, Request::new); this.client = Objects.requireNonNull(client); @@ -68,6 +71,7 @@ public TransportPreviewDataFrameAnalyticsAction( this.securityContext = XPackSettings.SECURITY_ENABLED.get(settings) ? new SecurityContext(settings, threadPool.getThreadContext()) : null; + this.clusterService = clusterService; } private static Map mergeRow(DataFrameDataExtractor.Row row, List fieldNames) { @@ -87,7 +91,7 @@ protected void doExecute(Task task, Request request, ActionListener li // Set the auth headers (preferring the secondary headers) to the caller's. // Regardless if the config was previously stored or not. DataFrameAnalyticsConfig config = new DataFrameAnalyticsConfig.Builder(request.getConfig()).setHeaders( - filterSecurityHeaders(threadPool.getThreadContext().getHeaders()) + ClientHelper.getPersistableSafeSecurityHeaders(threadPool.getThreadContext(), clusterService.state()) ).build(); preview(task, config, listener); }); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java index 197e3e9aa3027..987ef5523218a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDatafeedAction.java @@ -24,6 +24,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.ml.action.PreviewDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig; @@ -48,7 +49,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeWithHeadersAsync; -import static org.elasticsearch.xpack.core.ClientHelper.filterSecurityHeaders; import static org.elasticsearch.xpack.ml.utils.SecondaryAuthorizationUtils.useSecondaryAuthIfAvailable; public class TransportPreviewDatafeedAction extends HandledTransportAction { @@ -115,7 +115,9 @@ private void previewDatafeed( ) { DatafeedConfig.Builder previewDatafeedBuilder = buildPreviewDatafeed(datafeedConfig); useSecondaryAuthIfAvailable(securityContext, () -> { - previewDatafeedBuilder.setHeaders(filterSecurityHeaders(threadPool.getThreadContext().getHeaders())); + previewDatafeedBuilder.setHeaders( + ClientHelper.getPersistableSafeSecurityHeaders(threadPool.getThreadContext(), clusterService.state()) + ); // NB: this is using the client from the transport layer, NOT the internal client. // This is important because it means the datafeed search will fail if the user // requesting the preview doesn't have permission to search the relevant indices. diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java index 0f7bc46d9a7d0..1925e1975e827 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/persistence/DatafeedConfigProvider.java @@ -26,6 +26,7 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.regex.Regex; @@ -46,6 +47,7 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.action.util.ExpandedIdsMatcher; import org.elasticsearch.xpack.core.ml.MlConfigIndex; import org.elasticsearch.xpack.core.ml.MlTasks; @@ -71,7 +73,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -import static org.elasticsearch.xpack.core.ClientHelper.filterSecurityHeaders; /** * This class implements CRUD operation for the @@ -87,12 +88,14 @@ public class DatafeedConfigProvider { private static final Logger logger = LogManager.getLogger(DatafeedConfigProvider.class); private final Client client; private final NamedXContentRegistry xContentRegistry; + private final ClusterService clusterService; public static final Map TO_XCONTENT_PARAMS = Map.of(ToXContentParams.FOR_INTERNAL_STORAGE, "true"); - public DatafeedConfigProvider(Client client, NamedXContentRegistry xContentRegistry) { + public DatafeedConfigProvider(Client client, NamedXContentRegistry xContentRegistry, ClusterService clusterService) { this.client = client; this.xContentRegistry = xContentRegistry; + this.clusterService = clusterService; } /** @@ -107,7 +110,9 @@ public void putDatafeedConfig(DatafeedConfig config, Map headers if (headers.isEmpty() == false) { // Filter any values in headers that aren't security fields - config = new DatafeedConfig.Builder(config).setHeaders(filterSecurityHeaders(headers)).build(); + config = new DatafeedConfig.Builder(config).setHeaders( + ClientHelper.getPersistableSafeSecurityHeaders(headers, clusterService.state()) + ).build(); } final String datafeedId = config.getId(); @@ -299,7 +304,7 @@ public void onResponse(GetResponse getResponse) { DatafeedConfig updatedConfig; try { - updatedConfig = update.apply(configBuilder.build(), headers); + updatedConfig = update.apply(configBuilder.build(), headers, clusterService.state()); } catch (Exception e) { delegate.onFailure(e); return; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java index 78f9885351f2c..1ab486cb061d0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.core.Nullable; @@ -41,6 +42,7 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.action.util.PageParams; import org.elasticsearch.xpack.core.ml.MlConfigIndex; import org.elasticsearch.xpack.core.ml.MlTasks; @@ -66,7 +68,6 @@ import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; -import static org.elasticsearch.xpack.core.ClientHelper.filterSecurityHeaders; public class DataFrameAnalyticsConfigProvider { @@ -79,11 +80,18 @@ public class DataFrameAnalyticsConfigProvider { private final Client client; private final NamedXContentRegistry xContentRegistry; private final DataFrameAnalyticsAuditor auditor; + private final ClusterService clusterService; - public DataFrameAnalyticsConfigProvider(Client client, NamedXContentRegistry xContentRegistry, DataFrameAnalyticsAuditor auditor) { + public DataFrameAnalyticsConfigProvider( + Client client, + NamedXContentRegistry xContentRegistry, + DataFrameAnalyticsAuditor auditor, + ClusterService clusterService + ) { this.client = Objects.requireNonNull(client); this.xContentRegistry = xContentRegistry; this.auditor = Objects.requireNonNull(auditor); + this.clusterService = clusterService; } /** @@ -113,7 +121,11 @@ public void put( } private DataFrameAnalyticsConfig prepareConfigForIndex(DataFrameAnalyticsConfig config, Map headers) { - return headers.isEmpty() ? config : new DataFrameAnalyticsConfig.Builder(config).setHeaders(filterSecurityHeaders(headers)).build(); + return headers.isEmpty() + ? config + : new DataFrameAnalyticsConfig.Builder(config).setHeaders( + ClientHelper.getPersistableSafeSecurityHeaders(headers, clusterService.state()) + ).build(); } private void exists(String jobId, ActionListener listener) { @@ -183,7 +195,7 @@ public void update( // Merge the original config with the given update object DataFrameAnalyticsConfig.Builder updatedConfigBuilder = update.mergeWithConfig(originalConfig); if (headers.isEmpty() == false) { - updatedConfigBuilder.setHeaders(filterSecurityHeaders(headers)); + updatedConfigBuilder.setHeaders(ClientHelper.getPersistableSafeSecurityHeaders(headers, clusterService.state())); } DataFrameAnalyticsConfig updatedConfig = updatedConfigBuilder.build(); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java index 4120c15f5cec7..d1607a30dabe9 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java @@ -108,7 +108,10 @@ public TaskId getParentTask() { @Override public Task createTask(long id, String type, String actionName, TaskId parentTaskId, Map headers) { - Map originHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + Map originHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + threadPool.getThreadContext(), + clusterService.state() + ); return operation.createTask( request, id, diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java index b9de612646088..a26d13450b705 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java @@ -90,6 +90,7 @@ public TransportPutRollupJobAction( ); this.persistentTasksService = persistentTasksService; this.client = client; + } @Override @@ -138,9 +139,12 @@ static void checkForDeprecatedTZ(PutRollupJobAction.Request request) { } } - private static RollupJob createRollupJob(RollupJobConfig config, ThreadPool threadPool) { + private RollupJob createRollupJob(RollupJobConfig config, ThreadPool threadPool) { // ensure we only filter for the allowed headers - Map filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + threadPool.getThreadContext(), + clusterService.state() + ); return new RollupJob(config, filteredHeaders); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java index 5d460e1c6f8b1..ea294a6cd65ca 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPreviewTransformAction.java @@ -267,7 +267,7 @@ private void getPreview( mappings.set(deducedMappings); function.preview( client, - ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()), + ClientHelper.getPersistableSafeSecurityHeaders(threadPool.getThreadContext(), clusterService.state()), source, deducedMappings, NUMBER_OF_PREVIEW_BUCKETS, diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java index 6b400c718c6b3..2f79d118a48a5 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportPutTransformAction.java @@ -94,7 +94,10 @@ protected void masterOperation(Task task, Request request, ClusterState clusterS XPackPlugin.checkReadyForXPackCustomMetadata(clusterState); // set headers to run transform as calling user - Map filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + threadPool.getThreadContext(), + clusterService.state() + ); TransformConfig config = request.getConfig().setHeaders(filteredHeaders).setCreateTime(Instant.now()).setVersion(Version.CURRENT); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java index c5d4bd518538a..a18ede68d06a3 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java @@ -119,7 +119,10 @@ protected void doExecute(Task task, Request request, ActionListener li } // set headers to run transform as calling user - Map filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + threadPool.getThreadContext(), + clusterService.state() + ); TransformConfigUpdate update = request.getUpdate(); update.setHeaders(filteredHeaders); diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportExecuteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportExecuteWatchAction.java index 0e71ee68c878b..94e1edcace189 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportExecuteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/TransportExecuteWatchAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.routing.Preference; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -69,6 +70,7 @@ public class TransportExecuteWatchAction extends WatcherTransportAction filteredHeaders = ClientHelper.filterSecurityHeaders(threadPool.getThreadContext().getHeaders()); + Map filteredHeaders = ClientHelper.getPersistableSafeSecurityHeaders( + threadPool.getThreadContext(), + clusterService.state() + ); watch.status().setHeaders(filteredHeaders); try (XContentBuilder builder = jsonBuilder()) { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/TransportPutWatchActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/TransportPutWatchActionTests.java index 770ca44137701..d3a16585ac9ac 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/TransportPutWatchActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/TransportPutWatchActionTests.java @@ -6,11 +6,15 @@ */ package org.elasticsearch.xpack.watcher.transport.actions; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.Index; @@ -21,6 +25,10 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.security.authc.Authentication; +import org.elasticsearch.xpack.core.security.authc.AuthenticationField; +import org.elasticsearch.xpack.core.security.authc.support.SecondaryAuthentication; +import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.watcher.watch.ClockMock; import org.elasticsearch.xpack.core.watcher.watch.Watch; import org.elasticsearch.xpack.watcher.ClockHolder; @@ -31,6 +39,7 @@ import java.util.Collections; import java.util.Map; +import java.util.Set; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; @@ -75,6 +84,13 @@ public void setupAction() throws Exception { return null; }).when(client).execute(any(), any(), any()); + final ClusterService clusterService = mock(ClusterService.class); + final ClusterState clusterState = mock(ClusterState.class); + final DiscoveryNodes discoveryNodes = mock(DiscoveryNodes.class); + when(clusterService.state()).thenReturn(clusterState); + when(clusterState.nodes()).thenReturn(discoveryNodes); + when(discoveryNodes.getMinNodeVersion()).thenReturn(Version.CURRENT); + action = new TransportPutWatchAction( transportService, threadPool, @@ -82,7 +98,8 @@ public void setupAction() throws Exception { new ClockHolder(new ClockMock()), TestUtils.newTestLicenseState(), parser, - client + client, + clusterService ); } @@ -90,7 +107,14 @@ public void setupAction() throws Exception { public void testHeadersAreFilteredWhenPuttingWatches() throws Exception { // set up threadcontext with some arbitrary info String headerName = randomFrom(ClientHelper.SECURITY_HEADER_FILTERS); - threadContext.putHeader(headerName, randomAlphaOfLength(10)); + if (Set.of(AuthenticationField.AUTHENTICATION_KEY, SecondaryAuthentication.THREAD_CTX_KEY).contains(headerName)) { + threadContext.putHeader( + headerName, + Authentication.newRealmAuthentication(new User("dummy"), new Authentication.RealmRef("name", "type", "node")).encode() + ); + } else { + threadContext.putHeader(headerName, randomAlphaOfLength(10)); + } threadContext.putHeader(randomAlphaOfLength(10), "doesntmatter"); PutWatchRequest putWatchRequest = new PutWatchRequest(); From 238cd20e88421024b2484d716eab3d85ce47d5e5 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Thu, 17 Feb 2022 07:14:18 +0200 Subject: [PATCH 140/167] Adjust timeout for responses from SMB fixture (#84037) We have recently seen a number of failures in ActiveDirectorySessionFactoryTests where we fail to get a response from the Samba Server we use in the default time frame of 5 sec. The fixture seems to be up and running successfully so there is the case that it's just too slow to respond within the 5 sec. This commit bumps the timeout to 15s --- .../security/authc/ldap/ActiveDirectorySessionFactoryTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index 43eeb69ed3fdf..c6644a976d08a 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -107,6 +107,7 @@ private RealmConfig configureRealm(String name, String type, Settings settings) .normalizePrefix("xpack.security.authc.realms." + type + "." + name + ".") .put(globalSettings) .put(getFullSettingKey(identifier, RealmSettings.ORDER_SETTING), 0) + .put(getFullSettingKey(identifier, SessionFactorySettings.TIMEOUT_RESPONSE_SETTING), "15s") .build(); final Environment env = TestEnvironment.newEnvironment(mergedSettings); this.sslService = new SSLService(env); From ed6627f80432e83eab8109bd551c00e4a532f925 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 17 Feb 2022 07:00:01 +0100 Subject: [PATCH 141/167] Fix GeoHexAggregationBuilderTests (#84049) Adjust some of the random values that are out of bounds. --- .../bucket/geogrid/GeoHexAggregationBuilderTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregationBuilderTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregationBuilderTests.java index dbe960087d91d..f72f2d2b61f70 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregationBuilderTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoHexAggregationBuilderTests.java @@ -46,10 +46,10 @@ protected GeoHexGridAggregationBuilder createTestInstance() { geoHexGridAggregationBuilder.precision(randomIntBetween(0, H3.MAX_H3_RES)); } if (randomBoolean()) { - geoHexGridAggregationBuilder.size(randomIntBetween(0, 256 * 256)); + geoHexGridAggregationBuilder.size(randomIntBetween(1, 256 * 256)); } if (randomBoolean()) { - geoHexGridAggregationBuilder.shardSize(randomIntBetween(0, 256 * 256)); + geoHexGridAggregationBuilder.shardSize(randomIntBetween(1, 256 * 256)); } if (randomBoolean()) { geoHexGridAggregationBuilder.setGeoBoundingBox(GeoTestUtils.randomBBox()); From 3e1b6612ac259e9d9c5bc1a50d1a5068cdc02377 Mon Sep 17 00:00:00 2001 From: Ignacio Vera Date: Thu, 17 Feb 2022 08:12:52 +0100 Subject: [PATCH 142/167] GeometryNormalizer should not fail if it cannot compute signed area (#84051) This commit removes the exception throwing and assumes that when the area is zero, the polygon has the right orientation. The exception will be thrown at indexing time when the polygon is invalid . --- docs/changelog/84051.yaml | 6 ++++++ .../legacygeo/builders/PolygonBuilder.java | 9 ++++----- .../legacygeo/ShapeBuilderTests.java | 2 +- .../legacygeo/builders/PolygonBuilderTests.java | 2 +- .../common/geo/GeoPolygonDecomposer.java | 10 +++------- .../common/geo/GeometryIndexerTests.java | 14 ++++++++++++++ .../common/geo/GeometryNormalizerTests.java | 15 +++++++++++++++ .../GeoShapeWithDocValuesFieldMapperTests.java | 6 +++--- 8 files changed, 47 insertions(+), 17 deletions(-) create mode 100644 docs/changelog/84051.yaml diff --git a/docs/changelog/84051.yaml b/docs/changelog/84051.yaml new file mode 100644 index 0000000000000..726252127bccc --- /dev/null +++ b/docs/changelog/84051.yaml @@ -0,0 +1,6 @@ +pr: 84051 +summary: '`GeometryNormalizer` should not fail if it cannot compute signed area' +area: Geo +type: bug +issues: + - 83946 diff --git a/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/builders/PolygonBuilder.java b/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/builders/PolygonBuilder.java index 314d63d2792e5..b71094f51cc20 100644 --- a/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/builders/PolygonBuilder.java +++ b/modules/legacy-geo/src/main/java/org/elasticsearch/legacygeo/builders/PolygonBuilder.java @@ -721,11 +721,10 @@ private static Edge[] ring( minX = Math.min(minX, points[i].x); maxX = Math.max(maxX, points[i].x); } - if (signedArea == 0) { - // Points are collinear or self-intersection - throw new InvalidShapeException("Cannot determine orientation: signed area equal to 0"); - } - boolean orientation = signedArea < 0; + + // if the polygon is tiny, the computed area can result in zero. In that case + // we assume orientation is correct + boolean orientation = signedArea == 0 ? handedness != false : signedArea < 0; // OGC requires shell as ccw (Right-Handedness) and holes as cw (Left-Handedness) // since GeoJSON doesn't specify (and doesn't need to) GEO core will assume OGC standards diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/ShapeBuilderTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/ShapeBuilderTests.java index d70dc7981296b..0603533a8503f 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/ShapeBuilderTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/ShapeBuilderTests.java @@ -225,7 +225,7 @@ public void testPolygonSelfIntersection() { new CoordinatesBuilder().coordinate(-40.0, 50.0).coordinate(40.0, 50.0).coordinate(-40.0, -50.0).coordinate(40.0, -50.0).close() ); Exception e = expectThrows(InvalidShapeException.class, () -> newPolygon.buildS4J()); - assertThat(e.getMessage(), containsString("Cannot determine orientation: signed area equal to 0")); + assertThat(e.getMessage(), containsString("Self-intersection at or near point (0.0, 0.0, NaN)")); } /** note: only supported by S4J at the moment */ diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/builders/PolygonBuilderTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/builders/PolygonBuilderTests.java index 35b98df7a397d..ba82ad37425be 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/builders/PolygonBuilderTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/builders/PolygonBuilderTests.java @@ -167,7 +167,7 @@ public void testPolygonWithUndefinedOrientationDueToCollinearPoints() { new CoordinatesBuilder().coordinate(0.0, 0.0).coordinate(1.0, 1.0).coordinate(-1.0, -1.0).close() ); InvalidShapeException e = expectThrows(InvalidShapeException.class, pb::buildS4J); - assertEquals("Cannot determine orientation: signed area equal to 0", e.getMessage()); + assertEquals("Self-intersection at or near point (-1.0, -1.0, NaN)", e.getMessage()); } public void testCrossingDateline() { diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeoPolygonDecomposer.java b/server/src/main/java/org/elasticsearch/common/geo/GeoPolygonDecomposer.java index 44c0b2297f55e..9f80619b7e80d 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeoPolygonDecomposer.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeoPolygonDecomposer.java @@ -213,13 +213,9 @@ private static Edge[] ring( minX = Math.min(minX, points[i].getX()); maxX = Math.max(maxX, points[i].getX()); } - if (signedArea == 0) { - // Points are collinear or self-intersection - throw new IllegalArgumentException( - "Cannot determine orientation: signed area equal to 0." + " Points are collinear or polygon self-intersects." - ); - } - boolean orientation = signedArea < 0; + // if the polygon is tiny, the computed area can result in zero. In that case + // we assume orientation is correct + boolean orientation = signedArea == 0 ? handedness != false : signedArea < 0; // OGC requires shell as ccw (Right-Handedness) and holes as cw (Left-Handedness) // since GeoJSON doesn't specify (and doesn't need to) GEO core will assume OGC standards diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeometryIndexerTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeometryIndexerTests.java index aa67871541e83..c9765880d24cb 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeometryIndexerTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeometryIndexerTests.java @@ -198,6 +198,11 @@ public void testPolygonOrientation() throws IOException, ParseException { expected("POLYGON ((180 29, 180 38, 180 56, 180 53, 178 47, 177 23, 180 29))"), actual("POLYGON ((180 38, 180.0 56, 180.0 53, 178 47, 177 23, 180 29, 180 36, 180 37, 180 38))", randomBoolean()) ); + + assertEquals( + expected("POLYGON ((-135 85, 135 85, 45 85, -45 85, -135 85))"), + actual("POLYGON ((-45 85, -135 85, 135 85, 45 85, -45 85))", randomBoolean()) + ); } public void testInvalidSelfCrossingPolygon() { @@ -218,6 +223,15 @@ public void testCrossingDateline() { assertTrue(geometry instanceof MultiPolygon); } + public void testPolygonAllCollinearPoints() { + Polygon polygon = new Polygon(new LinearRing(new double[] { 0, 1, -1, 0 }, new double[] { 0, 1, -1, 0 })); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> indexer.indexShape(polygon)); + assertEquals( + "Unable to Tessellate shape [[1.0, 1.0] [-1.0, -1.0] [0.0, 0.0] [1.0, 1.0] ]. Possible malformed shape detected.", + e.getMessage() + ); + } + private XContentBuilder polygon(Boolean orientation, double... val) throws IOException { XContentBuilder pointGeoJson = XContentFactory.jsonBuilder().startObject(); { diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeometryNormalizerTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeometryNormalizerTests.java index e8080e92de6aa..2a9d8f30dee0f 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeometryNormalizerTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeometryNormalizerTests.java @@ -375,6 +375,21 @@ public void testPolygon() { polygon = new Polygon(new LinearRing(new double[] { 170, 190, 190, 170, 170 }, new double[] { -10, -10, 10, 10, -10 })); assertEquals(indexed, GeometryNormalizer.apply(Orientation.CCW, polygon)); assertEquals(true, GeometryNormalizer.needsNormalize(Orientation.CCW, polygon)); + + polygon = new Polygon( + new LinearRing( + new double[] { -107.88180702965093, -107.88179936541891, -107.88180701456989, -107.88180702965093 }, + new double[] { 37.289285907909985, 37.289278246132682, 37.289285918063491, 37.289285907909985 } + ) + ); + indexed = new Polygon( + new LinearRing( + new double[] { -107.88179936541891, -107.88180701456989, -107.88180702965093, -107.88179936541891 }, + new double[] { 37.289278246132682, 37.289285918063491, 37.289285907909985, 37.289278246132682 } + ) + ); + assertEquals(indexed, GeometryNormalizer.apply(Orientation.CCW, polygon)); + } public void testMultiPolygon() { diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java index 22f4d9cc07eec..51f314e82fda7 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldMapperTests.java @@ -245,8 +245,8 @@ public void testIgnoreMalformedValues() throws IOException { .startObject() .field( "field", - "POLYGON ((18.9401790919516 -33.9681188869036, 18.9401790919516 -33.9681188869037, 18.9401790919517 " - + "-33.9681188869037, 18.9401790919517 -33.9681188869036, 18.9401790919516 -33.9681188869036))" + "POLYGON ((18.9401790919516 -33.9681188869036, 18.9401790919516 -33.9681188869036, 18.9401790919517 " + + "-33.9681188869036, 18.9401790919517 -33.9681188869036, 18.9401790919516 -33.9681188869036))" ) .endObject() ); @@ -254,7 +254,7 @@ public void testIgnoreMalformedValues() throws IOException { ParsedDocument document = ignoreMapper.parse(sourceToParse); assertThat(document.docs().get(0).getFields("field").length, equalTo(0)); MapperParsingException exception = expectThrows(MapperParsingException.class, () -> failMapper.parse(sourceToParse)); - assertThat(exception.getCause().getMessage(), containsString("Cannot determine orientation")); + assertThat(exception.getCause().getMessage(), containsString("at least 4 polygon points required")); } } From aeed4ebe822c2948871dd4faf900f1abfaa34b56 Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Thu, 17 Feb 2022 08:38:51 +0100 Subject: [PATCH 143/167] fix testRestoreLocalHistoryFromTranslogOnPromotion (#84027) This test was failing in rare cases when there are only few operations and maxSeqNoOfUpdatesOrDeletes is greater then maxSeqNoOfUpdatesOrDeletesBeforeRollback --- .../index/shard/IndexShardTests.java | 22 ++++++++----------- 1 file changed, 9 insertions(+), 13 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 7835f7014818d..640da6b5799ab 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -1279,7 +1279,7 @@ public void testClosedIndicesSkipSyncGlobalCheckpoint() throws Exception { public void testRestoreLocalHistoryFromTranslogOnPromotion() throws IOException, InterruptedException { final IndexShard indexShard = newStartedShard(false); - final int operations = 1024 - scaledRandomIntBetween(0, 1024); + final int operations = randomBoolean() ? scaledRandomIntBetween(0, 1024) : 1024 - scaledRandomIntBetween(0, 1024); indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED)); final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo(); @@ -1287,6 +1287,7 @@ public void testRestoreLocalHistoryFromTranslogOnPromotion() throws IOException, indexShard.updateGlobalCheckpointOnReplica(globalCheckpointOnReplica, "test"); final long globalCheckpoint = randomLongBetween(UNASSIGNED_SEQ_NO, indexShard.getLocalCheckpoint()); + final long maxSeqNoOfUpdatesOrDeletes = randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, maxSeqNo); final long maxSeqNoOfUpdatesOrDeletesBeforeRollback = indexShard.getMaxSeqNoOfUpdatesOrDeletes(); final Set docsBeforeRollback = getShardDocUIDs(indexShard); final CountDownLatch latch = new CountDownLatch(1); @@ -1294,8 +1295,8 @@ public void testRestoreLocalHistoryFromTranslogOnPromotion() throws IOException, indexShard, indexShard.getPendingPrimaryTerm() + 1, globalCheckpoint, - randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, maxSeqNo), - new ActionListener() { + maxSeqNoOfUpdatesOrDeletes, + new ActionListener<>() { @Override public void onResponse(Releasable releasable) { releasable.close(); @@ -1312,11 +1313,10 @@ public void onFailure(Exception e) { latch.await(); long globalCheckpointOnPromotedReplica = Math.max(globalCheckpointOnReplica, globalCheckpoint); - if (globalCheckpointOnPromotedReplica < maxSeqNo) { - assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(maxSeqNo)); - } else { - assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(maxSeqNoOfUpdatesOrDeletesBeforeRollback)); - } + long expectedMaxSeqNoOfUpdatesOrDeletes = globalCheckpointOnPromotedReplica < maxSeqNo + ? maxSeqNo + : Math.max(maxSeqNoOfUpdatesOrDeletesBeforeRollback, maxSeqNoOfUpdatesOrDeletes); + assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(expectedMaxSeqNoOfUpdatesOrDeletes)); final ShardRouting newRouting = indexShard.routingEntry().moveActiveReplicaToPrimary(); final CountDownLatch resyncLatch = new CountDownLatch(1); indexShard.updateShardState( @@ -1331,11 +1331,7 @@ public void onFailure(Exception e) { assertThat(indexShard.getLocalCheckpoint(), equalTo(maxSeqNo)); assertThat(indexShard.seqNoStats().getMaxSeqNo(), equalTo(maxSeqNo)); assertThat(getShardDocUIDs(indexShard), equalTo(docsBeforeRollback)); - if (globalCheckpointOnPromotedReplica < maxSeqNo) { - assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(maxSeqNo)); - } else { - assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(maxSeqNoOfUpdatesOrDeletesBeforeRollback)); - } + assertThat(indexShard.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(expectedMaxSeqNoOfUpdatesOrDeletes)); closeShard(indexShard, false); } From 6121477ac520dbe1167c0beadf04d97d5f391d5f Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 17 Feb 2022 07:43:14 +0000 Subject: [PATCH 144/167] Preserve context in ResultDeduplicator (#84038) Today the `ResultDeduplicator` may complete a collection of listeners in contexts different from the ones in which they were submitted. This commit makes sure that the context is preserved in the listener. --- docs/changelog/84038.yaml | 6 ++++ .../action/ResultDeduplicator.java | 10 +++++- .../action/shard/ShardStateAction.java | 3 +- .../blobstore/BlobStoreRepository.java | 3 +- .../snapshots/SnapshotShardsService.java | 4 +-- .../tasks/TaskCancellationService.java | 3 +- .../elasticsearch/tasks/TaskManagerTests.java | 13 +++++-- .../transport/ResultDeduplicatorTests.java | 35 ++++++++++++------- 8 files changed, 56 insertions(+), 21 deletions(-) create mode 100644 docs/changelog/84038.yaml diff --git a/docs/changelog/84038.yaml b/docs/changelog/84038.yaml new file mode 100644 index 0000000000000..c4f07f6d3aefa --- /dev/null +++ b/docs/changelog/84038.yaml @@ -0,0 +1,6 @@ +pr: 84038 +summary: Preserve context in `ResultDeduplicator` +area: Infra/Core +type: bug +issues: + - 84036 diff --git a/server/src/main/java/org/elasticsearch/action/ResultDeduplicator.java b/server/src/main/java/org/elasticsearch/action/ResultDeduplicator.java index 8f3e7ee60b242..b63eeaf64e505 100644 --- a/server/src/main/java/org/elasticsearch/action/ResultDeduplicator.java +++ b/server/src/main/java/org/elasticsearch/action/ResultDeduplicator.java @@ -8,7 +8,9 @@ package org.elasticsearch.action; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.ThreadContext; import java.util.ArrayList; import java.util.List; @@ -22,8 +24,13 @@ */ public final class ResultDeduplicator { + private final ThreadContext threadContext; private final ConcurrentMap requests = ConcurrentCollections.newConcurrentMap(); + public ResultDeduplicator(ThreadContext threadContext) { + this.threadContext = threadContext; + } + /** * Ensures a given request not executed multiple times when another equal request is already in-flight. * If the request is not yet known to the deduplicator it will invoke the passed callback with an {@link ActionListener} @@ -35,7 +42,8 @@ public final class ResultDeduplicator { * @param callback Callback to be invoked with request and completion listener the first time the request is added to the deduplicator */ public void executeOnce(T request, ActionListener listener, BiConsumer> callback) { - ActionListener completionListener = requests.computeIfAbsent(request, CompositeListener::new).addListener(listener); + ActionListener completionListener = requests.computeIfAbsent(request, CompositeListener::new) + .addListener(ContextPreservingActionListener.wrapPreservingContext(listener, threadContext)); if (completionListener != null) { callback.accept(request, completionListener); } diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index 3449423c15f1f..6fe9ded74eaf0 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -81,7 +81,7 @@ public class ShardStateAction { private final ThreadPool threadPool; // we deduplicate these shard state requests in order to avoid sending duplicate failed/started shard requests for a shard - private final ResultDeduplicator remoteShardStateUpdateDeduplicator = new ResultDeduplicator<>(); + private final ResultDeduplicator remoteShardStateUpdateDeduplicator; @Inject public ShardStateAction( @@ -94,6 +94,7 @@ public ShardStateAction( this.transportService = transportService; this.clusterService = clusterService; this.threadPool = threadPool; + this.remoteShardStateUpdateDeduplicator = new ResultDeduplicator<>(threadPool.getThreadContext()); transportService.registerRequestHandler( SHARD_STARTED_ACTION_NAME, diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 28e1897a0272d..b80a0124bc5d9 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -403,6 +403,7 @@ protected BlobStoreRepository( this.namedXContentRegistry = namedXContentRegistry; this.basePath = basePath; this.maxSnapshotCount = MAX_SNAPSHOTS_SETTING.get(metadata.settings()); + this.repoDataDeduplicator = new ResultDeduplicator<>(threadPool.getThreadContext()); } @Override @@ -1866,7 +1867,7 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) * {@link #bestEffortConsistency} must be {@code false}, in which case we can assume that the {@link RepositoryData} loaded is * unique for a given value of {@link #metadata} at any point in time. */ - private final ResultDeduplicator repoDataDeduplicator = new ResultDeduplicator<>(); + private final ResultDeduplicator repoDataDeduplicator; private void doGetRepositoryData(ActionListener listener) { // Retry loading RepositoryData in a loop in case we run into concurrent modifications of the repository. diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java index 646df885cb48c..4223a4239c3a3 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotShardsService.java @@ -82,8 +82,7 @@ public class SnapshotShardsService extends AbstractLifecycleComponent implements private final Map> shardSnapshots = new HashMap<>(); // A map of snapshots to the shardIds that we already reported to the master as failed - private final ResultDeduplicator remoteFailedRequestDeduplicator = - new ResultDeduplicator<>(); + private final ResultDeduplicator remoteFailedRequestDeduplicator; public SnapshotShardsService( Settings settings, @@ -97,6 +96,7 @@ public SnapshotShardsService( this.transportService = transportService; this.clusterService = clusterService; this.threadPool = transportService.getThreadPool(); + this.remoteFailedRequestDeduplicator = new ResultDeduplicator<>(threadPool.getThreadContext()); if (DiscoveryNode.canContainData(settings)) { // this is only useful on the nodes that can hold data clusterService.addListener(this); diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java b/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java index bd6078ec558e5..cd5bbd56a315a 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskCancellationService.java @@ -44,11 +44,12 @@ public class TaskCancellationService { private static final Logger logger = LogManager.getLogger(TaskCancellationService.class); private final TransportService transportService; private final TaskManager taskManager; - private final ResultDeduplicator deduplicator = new ResultDeduplicator<>(); + private final ResultDeduplicator deduplicator; public TaskCancellationService(TransportService transportService) { this.transportService = transportService; this.taskManager = transportService.getTaskManager(); + this.deduplicator = new ResultDeduplicator<>(transportService.getThreadPool().getThreadContext()); transportService.registerRequestHandler( BAN_PARENT_ACTION_NAME, ThreadPool.Names.SAME, diff --git a/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java b/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java index 9e8fc5c8983a6..6e40e9434141e 100644 --- a/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java +++ b/server/src/test/java/org/elasticsearch/tasks/TaskManagerTests.java @@ -46,6 +46,7 @@ import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.in; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class TaskManagerTests extends ESTestCase { private ThreadPool threadPool; @@ -76,7 +77,9 @@ public void testResultsServiceRetryTotalTime() { public void testTrackingChannelTask() throws Exception { final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of()); Set cancelledTasks = ConcurrentCollections.newConcurrentSet(); - taskManager.setTaskCancellationService(new TaskCancellationService(mock(TransportService.class)) { + final var transportServiceMock = mock(TransportService.class); + when(transportServiceMock.getThreadPool()).thenReturn(threadPool); + taskManager.setTaskCancellationService(new TaskCancellationService(transportServiceMock) { @Override void cancelTaskAndDescendants(CancellableTask task, String reason, boolean waitForCompletion, ActionListener listener) { assertThat(reason, equalTo("channel was closed")); @@ -124,7 +127,9 @@ void cancelTaskAndDescendants(CancellableTask task, String reason, boolean waitF public void testTrackingTaskAndCloseChannelConcurrently() throws Exception { final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of()); Set cancelledTasks = ConcurrentCollections.newConcurrentSet(); - taskManager.setTaskCancellationService(new TaskCancellationService(mock(TransportService.class)) { + final var transportServiceMock = mock(TransportService.class); + when(transportServiceMock.getThreadPool()).thenReturn(threadPool); + taskManager.setTaskCancellationService(new TaskCancellationService(transportServiceMock) { @Override void cancelTaskAndDescendants(CancellableTask task, String reason, boolean waitForCompletion, ActionListener listener) { assertTrue("task [" + task + "] was cancelled already", cancelledTasks.add(task)); @@ -180,7 +185,9 @@ void cancelTaskAndDescendants(CancellableTask task, String reason, boolean waitF public void testRemoveBansOnChannelDisconnects() throws Exception { final TaskManager taskManager = new TaskManager(Settings.EMPTY, threadPool, Set.of()); - taskManager.setTaskCancellationService(new TaskCancellationService(mock(TransportService.class)) { + final var transportServiceMock = mock(TransportService.class); + when(transportServiceMock.getThreadPool()).thenReturn(threadPool); + taskManager.setTaskCancellationService(new TaskCancellationService(transportServiceMock) { @Override void cancelTaskAndDescendants(CancellableTask task, String reason, boolean waitForCompletion, ActionListener listener) {} }); diff --git a/server/src/test/java/org/elasticsearch/transport/ResultDeduplicatorTests.java b/server/src/test/java/org/elasticsearch/transport/ResultDeduplicatorTests.java index 2bdfa3cc7865c..2d9fa940d5d5a 100644 --- a/server/src/test/java/org/elasticsearch/transport/ResultDeduplicatorTests.java +++ b/server/src/test/java/org/elasticsearch/transport/ResultDeduplicatorTests.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ResultDeduplicator; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; @@ -29,8 +31,11 @@ public void testRequestDeduplication() throws Exception { @Override public void setParentTask(final TaskId taskId) {} }; - final ResultDeduplicator deduplicator = new ResultDeduplicator<>(); + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); + final ResultDeduplicator deduplicator = new ResultDeduplicator<>(threadContext); final SetOnce> listenerHolder = new SetOnce<>(); + final var headerName = "thread-context-header"; + final var headerGenerator = new AtomicInteger(); int iterationsPerThread = scaledRandomIntBetween(100, 1000); Thread[] threads = new Thread[between(1, 4)]; Phaser barrier = new Phaser(threads.length + 1); @@ -38,18 +43,24 @@ public void setParentTask(final TaskId taskId) {} threads[i] = new Thread(() -> { barrier.arriveAndAwaitAdvance(); for (int n = 0; n < iterationsPerThread; n++) { - deduplicator.executeOnce(request, new ActionListener() { - @Override - public void onResponse(Void aVoid) { - successCount.incrementAndGet(); - } + final var headerValue = Integer.toString(headerGenerator.incrementAndGet()); + try (var ignored = threadContext.stashContext()) { + threadContext.putHeader(headerName, headerValue); + deduplicator.executeOnce(request, new ActionListener<>() { + @Override + public void onResponse(Void aVoid) { + assertThat(threadContext.getHeader(headerName), equalTo(headerValue)); + successCount.incrementAndGet(); + } - @Override - public void onFailure(Exception e) { - assertThat(e, sameInstance(failure)); - failureCount.incrementAndGet(); - } - }, (req, reqListener) -> listenerHolder.set(reqListener)); + @Override + public void onFailure(Exception e) { + assertThat(threadContext.getHeader(headerName), equalTo(headerValue)); + assertThat(e, sameInstance(failure)); + failureCount.incrementAndGet(); + } + }, (req, reqListener) -> listenerHolder.set(reqListener)); + } } }); threads[i].start(); From 48e562ad9eec6df0cc1d6da1fd8ecb1302da7421 Mon Sep 17 00:00:00 2001 From: Hendrik Muhs Date: Thu, 17 Feb 2022 09:38:48 +0100 Subject: [PATCH 145/167] [Transform] Improve robustness of checkpointing (#80984) rewrites checkpointing as internal actions, reducing several sub-calls to only 1 per data node that has at least 1 primary shard of the indexes of interest. Robustness: The current checkpointing sends a request to every shard - primary and replica - and collects the results. If 1 request fails, even for a replica, checkpointing fails. See #75780 for details. Performance: The current checkpointing is wasteful, it uses get index and get index stats which results in a lot more calls and executes a lot more code which produces results we are not interested in. Number of requests before and after: before: 1 + #shards * #indices * (#replicas + 1) after: #data_nodes_holding_gt1_shard Fixes #75780 --- .../authz/privilege/IndexPrivilege.java | 4 +- .../transform/action/GetCheckpointAction.java | 156 ++++++++++ .../action/GetCheckpointNodeAction.java | 151 +++++++++ .../GetCheckpointActionRequestTests.java | 67 ++++ .../GetCheckpointActionResponseTests.java | 44 +++ .../GetCheckpointNodeActionRequestTests.java | 75 +++++ .../GetCheckpointNodeActionResponseTests.java | 44 +++ .../xpack/security/operator/Constants.java | 2 + .../test/multi_cluster/80_transform.yml | 7 +- .../test/remote_cluster/80_transform.yml | 59 +++- .../TransformCheckpointServiceNodeTests.java | 54 ++-- .../checkpoint/TransformGetCheckpointIT.java | 97 ++++++ .../TransformGetCheckpointTests.java | 291 ++++++++++++++++++ .../xpack/transform/Transform.java | 10 +- .../action/TransportGetCheckpointAction.java | 225 ++++++++++++++ .../TransportGetCheckpointNodeAction.java | 63 ++++ .../checkpoint/DefaultCheckpointProvider.java | 78 ++++- .../TimeBasedCheckpointProviderTests.java | 25 +- 18 files changed, 1401 insertions(+), 51 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionRequestTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionResponseTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionRequestTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionResponseTests.java create mode 100644 x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java create mode 100644 x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointTests.java create mode 100644 x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointAction.java create mode 100644 x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java index 0f79f7b9310ae..24589c3525f00 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/IndexPrivilege.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.core.ilm.action.ExplainLifecycleAction; import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; import org.elasticsearch.xpack.core.security.support.Automatons; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import java.util.Arrays; import java.util.Collection; @@ -99,7 +100,8 @@ public final class IndexPrivilege extends Privilege { GetDataStreamAction.NAME, ResolveIndexAction.NAME, FieldCapabilitiesAction.NAME + "*", - GetRollupIndexCapsAction.NAME + "*" + GetRollupIndexCapsAction.NAME + "*", + GetCheckpointAction.NAME + "*" // transform internal action ); private static final Automaton MANAGE_FOLLOW_INDEX_AUTOMATON = patterns( PutFollowAction.NAME, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java new file mode 100644 index 0000000000000..168853fa9bf70 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointAction.java @@ -0,0 +1,156 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Objects; + +/** + * Transform internal API (no REST layer) to retrieve index checkpoints. + */ +public class GetCheckpointAction extends ActionType { + + public static final GetCheckpointAction INSTANCE = new GetCheckpointAction(); + + // note: this is an index action and requires `view_index_metadata` + public static final String NAME = "indices:internal/transform/checkpoint"; + + private GetCheckpointAction() { + super(NAME, GetCheckpointAction.Response::new); + } + + public static class Request extends ActionRequest implements IndicesRequest.Replaceable { + + private String[] indices; + private final IndicesOptions indicesOptions; + + public Request(StreamInput in) throws IOException { + super(in); + indices = in.readStringArray(); + indicesOptions = IndicesOptions.readIndicesOptions(in); + } + + public Request(String[] indices, IndicesOptions indicesOptions) { + this.indices = indices != null ? indices : Strings.EMPTY_ARRAY; + this.indicesOptions = indicesOptions; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public String[] indices() { + return indices; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (obj == null || obj.getClass() != getClass()) { + return false; + } + Request that = (Request) obj; + + return Arrays.equals(indices, that.indices) && Objects.equals(indicesOptions, that.indicesOptions); + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(indices), indicesOptions); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(indices); + indicesOptions.writeIndicesOptions(out); + } + + @Override + public IndicesRequest indices(String... indices) { + this.indices = indices; + return this; + } + + // this action does not allow remote indices, but they have to be resolved upfront, see {@link DefaultCheckpointProvider} + @Override + public boolean allowsRemoteIndices() { + return false; + } + } + + public static class Response extends ActionResponse { + + private final Map checkpoints; + + public Response(Map checkpoints) { + this.checkpoints = checkpoints; + } + + public Response(StreamInput in) throws IOException { + this.checkpoints = in.readOrderedMap(StreamInput::readString, StreamInput::readLongArray); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeMap(getCheckpoints(), StreamOutput::writeString, StreamOutput::writeLongArray); + } + + public Map getCheckpoints() { + return Collections.unmodifiableMap(checkpoints); + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (obj == null || obj.getClass() != getClass()) { + return false; + } + Response that = (Response) obj; + + return this.checkpoints.size() == that.checkpoints.size() + && this.checkpoints.entrySet().stream().allMatch(e -> Arrays.equals(e.getValue(), that.checkpoints.get(e.getKey()))); + } + + @Override + public int hashCode() { + int hash = 1; + + for (Entry e : checkpoints.entrySet()) { + hash = 31 * hash + Objects.hash(e.getKey(), Arrays.hashCode(e.getValue())); + } + + return hash; + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java new file mode 100644 index 0000000000000..341cc0a9cec0b --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeAction.java @@ -0,0 +1,151 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.shard.ShardId; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Objects; +import java.util.Set; + +public class GetCheckpointNodeAction extends ActionType { + + public static final GetCheckpointNodeAction INSTANCE = new GetCheckpointNodeAction(); + + // note: this is an index action and requires `view_index_metadata` + public static final String NAME = GetCheckpointAction.NAME + "[n]"; + + private GetCheckpointNodeAction() { + super(NAME, GetCheckpointNodeAction.Response::new); + } + + public static class Response extends ActionResponse { + private final Map checkpoints; + + public Response(Map checkpoints) { + this.checkpoints = checkpoints; + } + + public Response(StreamInput in) throws IOException { + this.checkpoints = in.readOrderedMap(StreamInput::readString, StreamInput::readLongArray); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeMap(getCheckpoints(), StreamOutput::writeString, StreamOutput::writeLongArray); + } + + public Map getCheckpoints() { + return checkpoints; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (obj == null || obj.getClass() != getClass()) { + return false; + } + Response that = (Response) obj; + + return this.checkpoints.size() == that.checkpoints.size() + && this.checkpoints.entrySet().stream().allMatch(e -> Arrays.equals(e.getValue(), that.checkpoints.get(e.getKey()))); + } + + @Override + public int hashCode() { + int hash = 1; + + for (Entry e : checkpoints.entrySet()) { + hash = 31 * hash + Objects.hash(e.getKey(), Arrays.hashCode(e.getValue())); + } + + return hash; + } + } + + public static class Request extends ActionRequest implements IndicesRequest { + + private final Set shards; + private final OriginalIndices originalIndices; + + public Request(Set shards, OriginalIndices originalIndices) { + this.shards = shards; + this.originalIndices = originalIndices; + } + + public Request(StreamInput in) throws IOException { + super(in); + this.shards = Collections.unmodifiableSet(in.readSet(ShardId::new)); + this.originalIndices = OriginalIndices.readOriginalIndices(in); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeCollection(shards); + OriginalIndices.writeOriginalIndices(originalIndices, out); + } + + public Set getShards() { + return shards; + } + + public OriginalIndices getOriginalIndices() { + return originalIndices; + } + + @Override + public boolean equals(Object obj) { + if (obj == this) { + return true; + } + if (obj == null || obj.getClass() != getClass()) { + return false; + } + Request that = (Request) obj; + + return Objects.equals(shards, that.shards) && Objects.equals(originalIndices, that.originalIndices); + } + + @Override + public int hashCode() { + return Objects.hash(shards, originalIndices); + } + + @Override + public String[] indices() { + return originalIndices.indices(); + } + + @Override + public IndicesOptions indicesOptions() { + return originalIndices.indicesOptions(); + } + + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionRequestTests.java new file mode 100644 index 0000000000000..48704068e3d8f --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionRequestTests.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction.Request; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; + +public class GetCheckpointActionRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Request createTestInstance() { + return new Request( + randomBoolean() ? null : generateRandomStringArray(10, 10, false, false), + IndicesOptions.fromParameters( + randomFrom(IndicesOptions.WildcardStates.values()).name().toLowerCase(Locale.ROOT), + Boolean.toString(randomBoolean()), + Boolean.toString(randomBoolean()), + Boolean.toString(randomBoolean()), + SearchRequest.DEFAULT_INDICES_OPTIONS + ) + ); + } + + @Override + protected Reader instanceReader() { + return Request::new; + } + + @Override + protected Request mutateInstance(Request instance) throws IOException { + List indices = instance.indices() != null ? new ArrayList<>(Arrays.asList(instance.indices())) : new ArrayList<>(); + IndicesOptions indicesOptions = instance.indicesOptions(); + + switch (between(0, 1)) { + case 0: + indices.add(randomAlphaOfLengthBetween(1, 20)); + break; + case 1: + indicesOptions = IndicesOptions.fromParameters( + randomFrom(IndicesOptions.WildcardStates.values()).name().toLowerCase(Locale.ROOT), + Boolean.toString(instance.indicesOptions().ignoreUnavailable() == false), + Boolean.toString(instance.indicesOptions().allowNoIndices() == false), + Boolean.toString(instance.indicesOptions().ignoreThrottled() == false), + SearchRequest.DEFAULT_INDICES_OPTIONS + ); + break; + default: + throw new AssertionError("Illegal randomization branch"); + } + + return new Request(indices.toArray(new String[0]), indicesOptions); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionResponseTests.java new file mode 100644 index 0000000000000..fdb7a59a4792e --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointActionResponseTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction.Response; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +public class GetCheckpointActionResponseTests extends AbstractWireSerializingTestCase { + + public static Response randomCheckpointResponse() { + Map checkpointsByIndex = new TreeMap<>(); + int indices = randomIntBetween(1, 10); + for (int i = 0; i < indices; ++i) { + List checkpoints = new ArrayList<>(); + int shards = randomIntBetween(1, 20); + for (int j = 0; j < shards; ++j) { + checkpoints.add(randomLongBetween(0, 1_000_000)); + } + checkpointsByIndex.put(randomAlphaOfLengthBetween(1, 10), checkpoints.stream().mapToLong(l -> l).toArray()); + } + return new Response(checkpointsByIndex); + } + + @Override + protected Reader instanceReader() { + return Response::new; + } + + @Override + protected Response createTestInstance() { + return randomCheckpointResponse(); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionRequestTests.java new file mode 100644 index 0000000000000..fd3573f1acae8 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionRequestTests.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction.Request; + +import java.io.IOException; +import java.util.HashSet; +import java.util.Set; + +public class GetCheckpointNodeActionRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Reader instanceReader() { + return Request::new; + } + + @Override + protected Request createTestInstance() { + Set shards = new HashSet<>(); + OriginalIndices originalIndices = randomOriginalIndices(randomIntBetween(0, 20)); + int numberOfRandomShardIds = randomInt(10); + + for (int i = 0; i < numberOfRandomShardIds; ++i) { + shards.add(new ShardId(randomAlphaOfLength(4) + i, randomAlphaOfLength(4), randomInt(5))); + } + + return new Request(shards, originalIndices); + } + + @Override + protected Request mutateInstance(Request instance) throws IOException { + + switch (random().nextInt(1)) { + case 0 -> { + Set shards = new HashSet<>(instance.getShards()); + if (randomBoolean() && shards.size() > 0) { + ShardId firstShard = shards.iterator().next(); + shards.remove(firstShard); + if (randomBoolean()) { + shards.add(new ShardId(randomAlphaOfLength(8), randomAlphaOfLength(4), randomInt(5))); + } + } else { + shards.add(new ShardId(randomAlphaOfLength(8), randomAlphaOfLength(4), randomInt(5))); + } + return new Request(shards, instance.getOriginalIndices()); + } + case 1 -> { + OriginalIndices originalIndices = randomOriginalIndices(instance.indices().length + 1); + return new Request(instance.getShards(), originalIndices); + } + default -> throw new IllegalStateException("The test should only allow 1 parameters mutated"); + } + } + + private OriginalIndices randomOriginalIndices(int numIndices) { + String[] randomIndices = new String[numIndices]; + for (int i = 0; i < numIndices; i++) { + randomIndices[i] = randomAlphaOfLengthBetween(5, 10); + } + IndicesOptions indicesOptions = randomBoolean() ? IndicesOptions.strictExpand() : IndicesOptions.lenientExpandOpen(); + return new OriginalIndices(randomIndices, indicesOptions); + } + +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionResponseTests.java new file mode 100644 index 0000000000000..f189a4f0faae2 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/action/GetCheckpointNodeActionResponseTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.transform.action; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction.Response; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class GetCheckpointNodeActionResponseTests extends AbstractWireSerializingTestCase { + + public static Response randomNodeCheckpointResponse() { + Map checkpointsByIndex = new HashMap<>(); + int indices = randomIntBetween(1, 10); + for (int i = 0; i < indices; ++i) { + List checkpoints = new ArrayList<>(); + int shards = randomIntBetween(1, 20); + for (int j = 0; j < shards; ++j) { + checkpoints.add(randomLongBetween(0, 1_000_000)); + } + checkpointsByIndex.put(randomAlphaOfLengthBetween(1, 10), checkpoints.stream().mapToLong(l -> l).toArray()); + } + return new Response(checkpointsByIndex); + } + + @Override + protected Reader instanceReader() { + return Response::new; + } + + @Override + protected Response createTestInstance() { + return randomNodeCheckpointResponse(); + } +} diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 94378f91d0ebc..c3e8ba51bc369 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -95,6 +95,8 @@ public class Constants { "cluster:admin/transform/update", "cluster:admin/transform/upgrade", "cluster:admin/transform/validate", + "indices:internal/transform/checkpoint", + "indices:internal/transform/checkpoint[n]", // "cluster:admin/voting_config/add_exclusions", // "cluster:admin/voting_config/clear_exclusions", "cluster:admin/xpack/ccr/auto_follow_pattern/activate", diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/80_transform.yml b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/80_transform.yml index cafa1fc384d06..4ded3005fb4ef 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/80_transform.yml +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/multi_cluster/80_transform.yml @@ -137,7 +137,7 @@ teardown: transform_id: "simple-remote-transform" body: > { - "source": { "index": ["my_remote_cluster:remote_test_index", "my_remote_cluster:remote_test_index_2"] } + "source": { "index": ["my_remote_cluster:remote_test_index*"] } } - do: headers: { Authorization: "Basic am9lOnRyYW5zZm9ybS1wYXNzd29yZA==" } @@ -146,7 +146,8 @@ teardown: - match: { count: 1 } - match: { transforms.0.id: "simple-remote-transform" } - match: { transforms.0.state: "stopped" } - # we added test_index_2, which has 2 more docs: + # the source now includes test_index_2, which has 2 more docs + # note that test_index_3 fits the wildcard pattern, but is not authorized, this test should not return a count of 4 as this would mean broken access control - match: { transforms.0.checkpointing.operations_behind: 2 } - do: @@ -155,7 +156,7 @@ teardown: transform_id: "simple-remote-transform" - do: - catch: /Cannot preview transform \[simple-remote-transform\] because user bob lacks all the required permissions for indices. \[my_remote_cluster:remote_test_index, my_remote_cluster:remote_test_index_2, simple-remote-transform\]/ + catch: /Cannot preview transform \[simple-remote-transform\] because user bob lacks all the required permissions for indices. \[my_remote_cluster:remote_test_index\*, simple-remote-transform\]/ headers: { Authorization: "Basic Ym9iOnRyYW5zZm9ybS1wYXNzd29yZA==" } # This is bob transform.preview_transform: transform_id: "simple-remote-transform" diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/80_transform.yml b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/80_transform.yml index c550f148f956b..83f94a213303a 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/80_transform.yml +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/src/test/resources/rest-api-spec/test/remote_cluster/80_transform.yml @@ -22,7 +22,7 @@ setup: "cluster": [], "indices": [ { - "names": ["remote_test_index*"], + "names": ["remote_test_index", "remote_test_index_2"], "privileges": ["read", "view_index_metadata"] } ] @@ -139,3 +139,60 @@ teardown: - length: { aggregations.user.buckets: 2 } - match: { aggregations.user.buckets.0.key: "d" } - match: { aggregations.user.buckets.0.doc_count: 1 } + + # create a 3rd index, but for this index joe has no privileges + - do: + indices.create: + index: remote_test_index_3 + body: + settings: + index: + number_of_shards: 3 + number_of_replicas: 0 + aliases: + test_alias: {} + mappings: + properties: + time: + type: date + user: + type: keyword + stars: + type: integer + coolness: + type: integer + + - do: + bulk: + refresh: true + body: + - '{"index": {"_index": "remote_test_index_3"}}' + - '{"user": "z", "stars": 2, "date" : "2018-11-29T12:12:12.123456789Z"}' + - '{"index": {"_index": "remote_test_index_3"}}' + - '{"user": "x", "stars": 1, "date" : "2018-11-29T12:14:12.123456789Z"}' + - do: + search: + rest_total_hits_as_int: true + index: remote_test_index_3 + body: + aggs: + user: + terms: + field: user + + - match: { _shards.total: 3 } + - match: { hits.total: 2 } + - length: { aggregations.user.buckets: 2 } + + # search should fail for joe + - do: + catch: /action \[indices:data/read/search\] is unauthorized for user \[joe\] .*/ + headers: { Authorization: "Basic am9lOnRyYW5zZm9ybS1wYXNzd29yZA==" } + search: + rest_total_hits_as_int: true + index: remote_test_index_3 + body: + aggs: + user: + terms: + field: user diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointServiceNodeTests.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointServiceNodeTests.java index 7c4746989c888..1e3d89b565e0a 100644 --- a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointServiceNodeTests.java +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformCheckpointServiceNodeTests.java @@ -44,6 +44,8 @@ import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.search.suggest.completion.CompletionStats; import org.elasticsearch.test.client.NoOpClient; +import org.elasticsearch.transport.ActionNotFoundTransportException; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointStats; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo; @@ -64,11 +66,9 @@ import java.time.Instant; import java.util.ArrayList; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; -import java.util.Set; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -85,22 +85,24 @@ public class TransformCheckpointServiceNodeTests extends TransformSingleNodeTest private class MockClientForCheckpointing extends NoOpClient { - private volatile ShardStats[] shardStats; + private final boolean supportTransformCheckpointApi; + private volatile Map checkpoints; private volatile String[] indices; - MockClientForCheckpointing(String testName) { + /** + * Mock client for checkpointing + * + * @param testName name of the test, used for naming the threadpool + * @param supportTransformCheckpointApi whether to mock the checkpoint API, if false throws action not found + */ + MockClientForCheckpointing(String testName, boolean supportTransformCheckpointApi) { super(testName); + this.supportTransformCheckpointApi = supportTransformCheckpointApi; } - void setShardStats(ShardStats[] shardStats) { - this.shardStats = shardStats; - - Set indicesSet = new HashSet<>(); - for (ShardStats s : shardStats) { - indicesSet.add(s.getShardRouting().getIndexName()); - } - - this.indices = indicesSet.toArray(new String[0]); + void setCheckpoints(Map checkpoints) { + this.checkpoints = checkpoints; + this.indices = checkpoints.keySet().toArray(new String[0]); } @SuppressWarnings("unchecked") @@ -111,6 +113,18 @@ protected void ActionListener listener ) { + if (request instanceof GetCheckpointAction.Request) { + // throw action not found if checkpoint API is not supported, transform should fallback to legacy checkpointing + if (supportTransformCheckpointApi == false) { + listener.onFailure(new ActionNotFoundTransportException(GetCheckpointAction.NAME)); + return; + } + + final GetCheckpointAction.Response getCheckpointResponse = new GetCheckpointAction.Response(checkpoints); + listener.onResponse((Response) getCheckpointResponse); + return; + } + if (request instanceof GetIndexRequest) { // for this test we only need the indices assert (indices != null); @@ -118,11 +132,13 @@ protected void listener.onResponse((Response) indexResponse); return; - } else if (request instanceof IndicesStatsRequest) { + } + + if (request instanceof IndicesStatsRequest) { // IndicesStatsResponse is package private, therefore using a mock final IndicesStatsResponse indicesStatsResponse = mock(IndicesStatsResponse.class); - when(indicesStatsResponse.getShards()).thenReturn(shardStats); + when(indicesStatsResponse.getShards()).thenReturn(createShardStats(checkpoints)); when(indicesStatsResponse.getFailedShards()).thenReturn(0); listener.onResponse((Response) indicesStatsResponse); @@ -137,7 +153,7 @@ protected void public void createComponents() { // it's not possible to run it as @BeforeClass as clients aren't initialized if (mockClientForCheckpointing == null) { - mockClientForCheckpointing = new MockClientForCheckpointing("TransformCheckpointServiceNodeTests"); + mockClientForCheckpointing = new MockClientForCheckpointing("TransformCheckpointServiceNodeTests", randomBoolean()); } ClusterService clusterService = mock(ClusterService.class); transformsConfigManager = new IndexBasedTransformConfigManager( @@ -270,7 +286,7 @@ public void testGetCheckpointStats() throws InterruptedException { assertAsync(listener -> transformsConfigManager.putTransformCheckpoint(checkpoint2, listener), true, null, null); - mockClientForCheckpointing.setShardStats(createShardStats(createCheckPointMap(transformId, 20, 20, 20))); + mockClientForCheckpointing.setCheckpoints(createCheckPointMap(transformId, 20, 20, 20)); TransformCheckpointingInfo checkpointInfo = new TransformCheckpointingInfo( new TransformCheckpointStats(1, null, null, timestamp, 0L), new TransformCheckpointStats(2, position, progress, timestamp + 100L, 0L), @@ -286,7 +302,7 @@ public void testGetCheckpointStats() throws InterruptedException { null ); - mockClientForCheckpointing.setShardStats(createShardStats(createCheckPointMap(transformId, 10, 50, 33))); + mockClientForCheckpointing.setCheckpoints(createCheckPointMap(transformId, 10, 50, 33)); checkpointInfo = new TransformCheckpointingInfo( new TransformCheckpointStats(1, null, null, timestamp, 0L), new TransformCheckpointStats(2, position, progress, timestamp + 100L, 0L), @@ -302,7 +318,7 @@ public void testGetCheckpointStats() throws InterruptedException { ); // same as current - mockClientForCheckpointing.setShardStats(createShardStats(createCheckPointMap(transformId, 10, 10, 10))); + mockClientForCheckpointing.setCheckpoints(createCheckPointMap(transformId, 10, 10, 10)); checkpointInfo = new TransformCheckpointingInfo( new TransformCheckpointStats(1, null, null, timestamp, 0L), new TransformCheckpointStats(2, position, progress, timestamp + 100L, 0L), diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java new file mode 100644 index 0000000000000..2130a15760acb --- /dev/null +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointIT.java @@ -0,0 +1,97 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.checkpoint; + +import org.apache.commons.lang3.ArrayUtils; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; +import org.elasticsearch.xpack.transform.TransformSingleNodeTestCase; + +import java.util.Arrays; +import java.util.Comparator; +import java.util.stream.Collectors; + +/** + * Test suite for checkpointing using transform getcheckpoint API + */ +public class TransformGetCheckpointIT extends TransformSingleNodeTestCase { + + public void testGetCheckpoint() throws Exception { + final String indexNamePrefix = "test_index-"; + final int shards = randomIntBetween(1, 5); + final int indices = randomIntBetween(1, 5); + + for (int i = 0; i < indices; ++i) { + client().admin() + .indices() + .prepareCreate(indexNamePrefix + i) + .setSettings(Settings.builder().put("index.number_of_shards", shards).put("index.number_of_replicas", 1)) + .get(); + } + + final GetCheckpointAction.Request request = new GetCheckpointAction.Request( + new String[] { indexNamePrefix + "*" }, + IndicesOptions.LENIENT_EXPAND_OPEN + ); + + final GetCheckpointAction.Response response = client().execute(GetCheckpointAction.INSTANCE, request).get(); + assertEquals(indices, response.getCheckpoints().size()); + + // empty indices should report -1 as sequence id + assertFalse( + response.getCheckpoints().entrySet().stream().anyMatch(entry -> Arrays.stream(entry.getValue()).anyMatch(l -> l != -1L)) + ); + + final int docsToCreatePerShard = randomIntBetween(0, 10); + for (int d = 0; d < docsToCreatePerShard; ++d) { + for (int i = 0; i < indices; ++i) { + for (int j = 0; j < shards; ++j) { + client().prepareIndex(indexNamePrefix + i).setSource("{" + "\"field\":" + j + "}", XContentType.JSON).get(); + } + } + } + + client().admin().indices().refresh(new RefreshRequest(indexNamePrefix + "*")); + + final GetCheckpointAction.Response response2 = client().execute(GetCheckpointAction.INSTANCE, request).get(); + assertEquals(indices, response2.getCheckpoints().size()); + + // check the sum, counting starts with 0, so we have to take docsToCreatePerShard - 1 + long checkpointSum = response2.getCheckpoints().values().stream().map(l -> Arrays.stream(l).sum()).mapToLong(Long::valueOf).sum(); + assertEquals( + "Expected " + + (docsToCreatePerShard - 1) * shards * indices + + " as sum of " + + response2.getCheckpoints() + .entrySet() + .stream() + .map(e -> e.getKey() + ": {" + Strings.arrayToCommaDelimitedString(ArrayUtils.toObject(e.getValue())) + "}") + .collect(Collectors.joining(",")), + (docsToCreatePerShard - 1) * shards * indices, + checkpointSum + ); + + final IndicesStatsResponse statsResponse = client().admin().indices().prepareStats(indexNamePrefix + "*").get(); + + assertEquals( + "Checkpoint API and indices stats don't match", + Arrays.stream(statsResponse.getShards()) + .filter(i -> i.getShardRouting().primary()) + .sorted(Comparator.comparingInt(value -> value.getShardRouting().id())) + .mapToLong(s -> s.getSeqNoStats().getGlobalCheckpoint()) + .sum(), + checkpointSum + ); + } + +} diff --git a/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointTests.java b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointTests.java new file mode 100644 index 0000000000000..2b3d403e74c18 --- /dev/null +++ b/x-pack/plugin/transform/src/internalClusterTest/java/org/elasticsearch/xpack/transform/checkpoint/TransformGetCheckpointTests.java @@ -0,0 +1,291 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.transform.checkpoint; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.LatchedActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ActionTestUtils; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.replication.ClusterStateCreationUtils; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.seqno.SeqNoStats; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.indices.EmptySystemIndices; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.transport.MockTransport; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.transform.TransformField; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction.Request; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction.Response; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction; +import org.elasticsearch.xpack.transform.action.TransportGetCheckpointAction; +import org.elasticsearch.xpack.transform.action.TransportGetCheckpointNodeAction; +import org.junit.After; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; + +import static java.util.Collections.emptySet; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class TransformGetCheckpointTests extends ESSingleNodeTestCase { + + private TransportService transportService; + private ClusterService clusterService; + private IndicesService indicesService; + private ThreadPool threadPool; + private IndexNameExpressionResolver indexNameExpressionResolver; + private MockTransport mockTransport; + private Task transformTask; + private final String indexNamePattern = "test_index-"; + private String[] testIndices; + private int numberOfNodes; + private int numberOfIndices; + private int numberOfShards; + + private TestTransportGetCheckpointAction getCheckpointAction; + private TestTransportGetCheckpointNodeAction getCheckpointNodeAction; + private ClusterState clusterStateWithIndex; + + @Override + @Before + public void setUp() throws Exception { + super.setUp(); + numberOfNodes = randomIntBetween(1, 10); + numberOfIndices = randomIntBetween(1, 10); + // create at least as many shards as nodes, so every node has at least 1 shard + numberOfShards = randomIntBetween(numberOfNodes, numberOfNodes * 3); + threadPool = new TestThreadPool("GetCheckpointActionTests"); + indexNameExpressionResolver = new MockResolver(); + clusterService = getInstanceFromNode(ClusterService.class); + indicesService = getInstanceFromNode(IndicesService.class); + mockTransport = new MockTransport() { + @Override + protected void onSendRequest(long requestId, String action, TransportRequest request, DiscoveryNode node) { + if (action.equals(GetCheckpointNodeAction.NAME)) { + getCheckpointNodeAction.execute( + null, + (GetCheckpointNodeAction.Request) request, + ActionListener.wrap(r -> { this.handleResponse(requestId, r); }, e -> { + this.handleError(requestId, new TransportException(e.getMessage(), e)); + + }) + ); + } + } + }; + + transportService = mockTransport.createTransportService( + clusterService.getSettings(), + threadPool, + TransportService.NOOP_TRANSPORT_INTERCEPTOR, + boundAddress -> clusterService.localNode(), + null, + emptySet() + ); + transportService.start(); + transportService.acceptIncomingRequests(); + + List testIndicesList = new ArrayList<>(); + for (int i = 0; i < numberOfIndices; ++i) { + testIndicesList.add(indexNamePattern + i); + } + testIndices = testIndicesList.toArray(new String[0]); + clusterStateWithIndex = ClusterStateCreationUtils.state(numberOfNodes, testIndices, numberOfShards); + + transformTask = new Task( + 1L, + "persistent", + "action", + TransformField.PERSISTENT_TASK_DESCRIPTION_PREFIX + "the_id", + TaskId.EMPTY_TASK_ID, + Collections.emptyMap() + ); + getCheckpointAction = new TestTransportGetCheckpointAction(); + getCheckpointNodeAction = new TestTransportGetCheckpointNodeAction(); + } + + @Override + @After + public void tearDown() throws Exception { + ThreadPool.terminate(threadPool, 30, TimeUnit.SECONDS); + threadPool = null; + super.tearDown(); + } + + public void testEmptyCheckpoint() throws InterruptedException { + GetCheckpointAction.Request request = new GetCheckpointAction.Request(Strings.EMPTY_ARRAY, IndicesOptions.LENIENT_EXPAND_OPEN); + assertCheckpointAction(request, response -> { + assertNotNull(response.getCheckpoints()); + Map checkpoints = response.getCheckpoints(); + assertTrue(checkpoints.isEmpty()); + + }); + } + + public void testSingleIndexRequest() throws InterruptedException { + GetCheckpointAction.Request request = new GetCheckpointAction.Request( + new String[] { indexNamePattern + "0" }, + IndicesOptions.LENIENT_EXPAND_OPEN + ); + + assertCheckpointAction(request, response -> { + assertNotNull(response.getCheckpoints()); + Map checkpoints = response.getCheckpoints(); + assertEquals(1, checkpoints.size()); + assertTrue(checkpoints.containsKey(indexNamePattern + "0")); + for (int i = 0; i < numberOfShards; ++i) { + assertEquals(42 + i, checkpoints.get(indexNamePattern + "0")[i]); + } + assertEquals(numberOfNodes, getCheckpointNodeAction.getCalls()); + + }); + } + + public void testMultiIndexRequest() throws InterruptedException { + GetCheckpointAction.Request request = new GetCheckpointAction.Request(testIndices, IndicesOptions.LENIENT_EXPAND_OPEN); + assertCheckpointAction(request, response -> { + assertNotNull(response.getCheckpoints()); + Map checkpoints = response.getCheckpoints(); + assertEquals(testIndices.length, checkpoints.size()); + for (int i = 0; i < this.numberOfIndices; ++i) { + assertTrue(checkpoints.containsKey(indexNamePattern + i)); + for (int j = 0; j < numberOfShards; ++j) { + assertEquals(42 + i + j, checkpoints.get(indexNamePattern + i)[j]); + } + } + assertEquals(numberOfNodes, getCheckpointNodeAction.getCalls()); + }); + } + + class TestTransportGetCheckpointAction extends TransportGetCheckpointAction { + + TestTransportGetCheckpointAction() { + super(transportService, new ActionFilters(emptySet()), indicesService, clusterService, indexNameExpressionResolver); + } + + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + resolveIndicesAndGetCheckpoint(task, request, listener, clusterStateWithIndex); + } + + } + + class TestTransportGetCheckpointNodeAction extends TransportGetCheckpointNodeAction { + + private final IndicesService mockIndicesService; + private int calls; + + TestTransportGetCheckpointNodeAction() { + super(transportService, new ActionFilters(emptySet()), indicesService); + calls = 0; + mockIndicesService = mock(IndicesService.class); + for (int i = 0; i < numberOfIndices; ++i) { + IndexService mockIndexService = mock(IndexService.class); + IndexMetadata indexMeta = clusterStateWithIndex.metadata().index(indexNamePattern + i); + + IndexSettings mockIndexSettings = new IndexSettings(indexMeta, clusterService.getSettings()); + when(mockIndexService.getIndexSettings()).thenReturn(mockIndexSettings); + for (int j = 0; j < numberOfShards; ++j) { + IndexShard mockIndexShard = mock(IndexShard.class); + when(mockIndexService.getShard(j)).thenReturn(mockIndexShard); + SeqNoStats seqNoStats = new SeqNoStats(42 + i + j, 42 + i + j, 42 + i + j); + when(mockIndexShard.seqNoStats()).thenReturn(seqNoStats); + } + + when(mockIndicesService.indexServiceSafe(indexMeta.getIndex())).thenReturn(mockIndexService); + } + } + + @Override + protected void doExecute( + Task task, + GetCheckpointNodeAction.Request request, + ActionListener listener + ) { + ++calls; + getGlobalCheckpoints(mockIndicesService, request.getShards(), listener); + } + + public int getCalls() { + return calls; + } + } + + static class MockResolver extends IndexNameExpressionResolver { + MockResolver() { + super(new ThreadContext(Settings.EMPTY), EmptySystemIndices.INSTANCE); + } + + @Override + public String[] concreteIndexNames(ClusterState state, IndicesRequest request) { + return request.indices(); + } + + @Override + public String[] concreteIndexNames( + ClusterState state, + IndicesOptions options, + boolean includeDataStreams, + String... indexExpressions + ) { + return indexExpressions; + } + + @Override + public Index[] concreteIndices(ClusterState state, IndicesRequest request) { + Index[] out = new Index[request.indices().length]; + for (int x = 0; x < out.length; x++) { + out[x] = new Index(request.indices()[x], "_na_"); + } + return out; + } + } + + private void assertCheckpointAction(GetCheckpointAction.Request request, Consumer furtherTests) + throws InterruptedException { + CountDownLatch latch = new CountDownLatch(1); + AtomicBoolean listenerCalled = new AtomicBoolean(false); + + LatchedActionListener listener = new LatchedActionListener<>(ActionListener.wrap(r -> { + assertTrue("listener called more than once", listenerCalled.compareAndSet(false, true)); + furtherTests.accept(r); + }, e -> { fail("got unexpected exception: " + e); }), latch); + + ActionTestUtils.execute(getCheckpointAction, transformTask, request, listener); + assertTrue("timed out after 20s", latch.await(20, TimeUnit.SECONDS)); + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java index 1cde900c3b54a..3c9b3f3596026 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java @@ -60,6 +60,8 @@ import org.elasticsearch.xpack.core.transform.TransformMessages; import org.elasticsearch.xpack.core.transform.TransformNamedXContentProvider; import org.elasticsearch.xpack.core.transform.action.DeleteTransformAction; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction; import org.elasticsearch.xpack.core.transform.action.GetTransformAction; import org.elasticsearch.xpack.core.transform.action.GetTransformStatsAction; import org.elasticsearch.xpack.core.transform.action.PreviewTransformAction; @@ -72,6 +74,8 @@ import org.elasticsearch.xpack.core.transform.action.UpgradeTransformsAction; import org.elasticsearch.xpack.core.transform.action.ValidateTransformAction; import org.elasticsearch.xpack.transform.action.TransportDeleteTransformAction; +import org.elasticsearch.xpack.transform.action.TransportGetCheckpointAction; +import org.elasticsearch.xpack.transform.action.TransportGetCheckpointNodeAction; import org.elasticsearch.xpack.transform.action.TransportGetTransformAction; import org.elasticsearch.xpack.transform.action.TransportGetTransformStatsAction; import org.elasticsearch.xpack.transform.action.TransportPreviewTransformAction; @@ -188,10 +192,14 @@ public List getRestHandlers( new ActionHandler<>(PreviewTransformAction.INSTANCE, TransportPreviewTransformAction.class), new ActionHandler<>(UpdateTransformAction.INSTANCE, TransportUpdateTransformAction.class), new ActionHandler<>(SetResetModeAction.INSTANCE, TransportSetTransformResetModeAction.class), - new ActionHandler<>(ValidateTransformAction.INSTANCE, TransportValidateTransformAction.class), new ActionHandler<>(UpgradeTransformsAction.INSTANCE, TransportUpgradeTransformsAction.class), new ActionHandler<>(ResetTransformAction.INSTANCE, TransportResetTransformAction.class), + // internal, no rest endpoint + new ActionHandler<>(ValidateTransformAction.INSTANCE, TransportValidateTransformAction.class), + new ActionHandler<>(GetCheckpointAction.INSTANCE, TransportGetCheckpointAction.class), + new ActionHandler<>(GetCheckpointNodeAction.INSTANCE, TransportGetCheckpointNodeAction.class), + // usage and info new ActionHandler<>(XPackUsageFeatureAction.TRANSFORM, TransformUsageTransportAction.class), new ActionHandler<>(XPackInfoFeatureAction.TRANSFORM, TransformInfoTransportAction.class) diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointAction.java new file mode 100644 index 0000000000000..0397c38ec90e5 --- /dev/null +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointAction.java @@ -0,0 +1,225 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.transform.action; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.NoShardAvailableActionException; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.UnavailableShardsException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.GroupedActionListener; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.ActionNotFoundTransportException; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponseHandler; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction.Request; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction.Response; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction; + +import java.io.IOException; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; +import java.util.TreeMap; + +public class TransportGetCheckpointAction extends HandledTransportAction { + + private static final Logger logger = LogManager.getLogger(TransportGetCheckpointAction.class); + private final ClusterService clusterService; + private final IndicesService indicesService; + private final TransportService transportService; + private final IndexNameExpressionResolver indexNameExpressionResolver; + + @Inject + public TransportGetCheckpointAction( + final TransportService transportService, + final ActionFilters actionFilters, + final IndicesService indicesService, + final ClusterService clusterService, + final IndexNameExpressionResolver indexNameExpressionResolver + ) { + super(GetCheckpointAction.NAME, transportService, actionFilters, Request::new); + this.transportService = transportService; + this.indicesService = indicesService; + this.clusterService = clusterService; + this.indexNameExpressionResolver = indexNameExpressionResolver; + } + + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + final ClusterState state = clusterService.state(); + resolveIndicesAndGetCheckpoint(task, request, listener, state); + } + + protected void resolveIndicesAndGetCheckpoint(Task task, Request request, ActionListener listener, final ClusterState state) { + // note: when security is turned on, the indices are already resolved + // TODO: do a quick check and only resolve if necessary?? + String[] concreteIndices = this.indexNameExpressionResolver.concreteIndexNames(state, request); + + Map> nodesAndShards = resolveIndicesToPrimaryShards(state, concreteIndices); + + if (nodesAndShards.size() == 0) { + listener.onResponse(new Response(Collections.emptyMap())); + return; + } + + new AsyncGetCheckpointsFromNodesAction(state, task, nodesAndShards, new OriginalIndices(request), listener).start(); + } + + private Map> resolveIndicesToPrimaryShards(ClusterState state, String[] concreteIndices) { + if (concreteIndices.length == 0) { + return Collections.emptyMap(); + } + + final DiscoveryNodes nodes = state.nodes(); + Map> nodesAndShards = new HashMap<>(); + + ShardsIterator shardsIt = state.routingTable().allShards(concreteIndices); + for (ShardRouting shard : shardsIt) { + // only take primary shards, which should be exactly 1, this isn't strictly necessary + // and we should consider taking any shard copy, but then we need another way to de-dup + if (shard.primary() == false) { + continue; + } + if (shard.assignedToNode() && nodes.get(shard.currentNodeId()) != null) { + // special case: a node that holds the shard is on an old version + if (nodes.get(shard.currentNodeId()).getVersion().before(Version.V_8_2_0)) { + throw new ActionNotFoundTransportException(GetCheckpointNodeAction.NAME); + } + + String nodeId = shard.currentNodeId(); + nodesAndShards.computeIfAbsent(nodeId, k -> new HashSet<>()).add(shard.shardId()); + } else { + throw new NoShardAvailableActionException(shard.shardId(), " no primary shards available for shard [" + shard + "]"); + } + } + return nodesAndShards; + } + + protected class AsyncGetCheckpointsFromNodesAction { + private final Task task; + private final ActionListener listener; + private final Map> nodesAndShards; + private final OriginalIndices originalIndices; + private final DiscoveryNodes nodes; + private final String localNodeId; + + protected AsyncGetCheckpointsFromNodesAction( + ClusterState clusterState, + Task task, + Map> nodesAndShards, + OriginalIndices originalIndices, + ActionListener listener + ) { + this.task = task; + this.listener = listener; + this.nodesAndShards = nodesAndShards; + this.originalIndices = originalIndices; + this.nodes = clusterState.nodes(); + this.localNodeId = clusterService.localNode().getId(); + } + + public void start() { + GroupedActionListener groupedListener = new GroupedActionListener<>( + ActionListener.wrap(responses -> { + // the final list should be ordered by key + Map checkpointsByIndexReduced = new TreeMap<>(); + + // merge the node responses + for (GetCheckpointNodeAction.Response response : responses) { + response.getCheckpoints().forEach((index, checkpoint) -> { + if (checkpointsByIndexReduced.containsKey(index)) { + long[] shardCheckpoints = checkpointsByIndexReduced.get(index); + for (int i = 0; i < checkpoint.length; ++i) { + shardCheckpoints[i] = Math.max(shardCheckpoints[i], checkpoint[i]); + } + } else { + checkpointsByIndexReduced.put(index, checkpoint); + } + }); + } + + listener.onResponse(new Response(checkpointsByIndexReduced)); + }, listener::onFailure), + nodesAndShards.size() + ); + + for (Entry> oneNodeAndItsShards : nodesAndShards.entrySet()) { + if (localNodeId.equals(oneNodeAndItsShards.getKey())) { + TransportGetCheckpointNodeAction.getGlobalCheckpoints(indicesService, oneNodeAndItsShards.getValue(), groupedListener); + continue; + } + + GetCheckpointNodeAction.Request nodeCheckpointsRequest = new GetCheckpointNodeAction.Request( + oneNodeAndItsShards.getValue(), + originalIndices + ); + DiscoveryNode node = nodes.get(oneNodeAndItsShards.getKey()); + + // paranoia: this should not be possible using the same cluster state + if (node == null) { + listener.onFailure( + new UnavailableShardsException( + oneNodeAndItsShards.getValue().iterator().next(), + "Node not found for [{}] shards", + oneNodeAndItsShards.getValue().size() + ) + ); + return; + } + + logger.trace("get checkpoints from node {}", node); + transportService.sendChildRequest( + node, + GetCheckpointNodeAction.NAME, + nodeCheckpointsRequest, + task, + TransportRequestOptions.EMPTY, + new TransportResponseHandler() { + + @Override + public GetCheckpointNodeAction.Response read(StreamInput in) throws IOException { + return new GetCheckpointNodeAction.Response(in); + } + + @Override + public void handleResponse(GetCheckpointNodeAction.Response response) { + groupedListener.onResponse(response); + } + + @Override + public void handleException(TransportException exp) { + groupedListener.onFailure(exp); + } + + } + ); + } + } + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java new file mode 100644 index 0000000000000..b257ed80acf14 --- /dev/null +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportGetCheckpointNodeAction.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.transform.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction.Request; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointNodeAction.Response; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; + +public class TransportGetCheckpointNodeAction extends HandledTransportAction { + + private final IndicesService indicesService; + + @Inject + public TransportGetCheckpointNodeAction( + final TransportService transportService, + final ActionFilters actionFilters, + final IndicesService indicesService + ) { + super(GetCheckpointNodeAction.NAME, transportService, actionFilters, Request::new); + this.indicesService = indicesService; + } + + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + getGlobalCheckpoints(indicesService, request.getShards(), listener); + } + + protected static void getGlobalCheckpoints(IndicesService indicesService, Set shards, ActionListener listener) { + Map checkpointsByIndexOfThisNode = new HashMap<>(); + for (ShardId shardId : shards) { + final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); + final IndexShard indexShard = indexService.getShard(shardId.id()); + + checkpointsByIndexOfThisNode.computeIfAbsent(shardId.getIndexName(), k -> { + long[] seqNumbers = new long[indexService.getIndexSettings().getNumberOfShards()]; + Arrays.fill(seqNumbers, SequenceNumbers.UNASSIGNED_SEQ_NO); + return seqNumbers; + }); + checkpointsByIndexOfThisNode.get(shardId.getIndexName())[shardId.getId()] = indexShard.seqNoStats().getGlobalCheckpoint(); + } + listener.onResponse(new Response(checkpointsByIndexOfThisNode)); + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java index 801fb1f7064de..0ed005f4f92ed 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/checkpoint/DefaultCheckpointProvider.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.get.GetIndexAction; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; @@ -21,8 +22,10 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.util.set.Sets; +import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.xpack.core.ClientHelper; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo.TransformCheckpointingInfoBuilder; @@ -59,6 +62,9 @@ class DefaultCheckpointProvider implements CheckpointProvider { protected final TransformAuditor transformAuditor; protected final TransformConfig transformConfig; + // set of clusters that do not support 8.2+ checkpoint actions + private final Set fallbackToBWC = new HashSet<>(); + DefaultCheckpointProvider( final Clock clock, final Client client, @@ -130,7 +136,7 @@ protected void getIndexCheckpoints(ActionListener> listener) remoteClient, transformConfig.getHeaders(), remoteIndex.getValue().toArray(new String[0]), - remoteIndex.getKey() + RemoteClusterService.REMOTE_CLUSTER_INDEX_SEPARATOR, + remoteIndex.getKey(), groupedListener ); } @@ -139,11 +145,69 @@ protected void getIndexCheckpoints(ActionListener> listener) } } - private static void getCheckpointsFromOneCluster( + private void getCheckpointsFromOneCluster( + Client client, + Map headers, + String[] indices, + String cluster, + ActionListener> listener + ) { + if (fallbackToBWC.contains(cluster)) { + getCheckpointsFromOneClusterBWC(client, headers, indices, cluster, listener); + } else { + getCheckpointsFromOneClusterV2(client, headers, indices, cluster, ActionListener.wrap(response -> { + logger.debug( + "[{}] Successfully retrieved checkpoints from cluster [{}] using transform checkpoint API", + transformConfig.getId(), + cluster + ); + listener.onResponse(response); + }, e -> { + Throwable unwrappedException = ExceptionsHelper.unwrapCause(e); + if (unwrappedException instanceof ActionNotFoundTransportException) { + // this is an implementation detail, so not necessary to audit or warn, but only report as debug + logger.debug( + "[{}] Cluster [{}] does not support transform checkpoint API, falling back to legacy checkpointing", + transformConfig.getId(), + cluster + ); + + fallbackToBWC.add(cluster); + getCheckpointsFromOneClusterBWC(client, headers, indices, cluster, listener); + } else { + listener.onFailure(e); + } + })); + } + } + + private static void getCheckpointsFromOneClusterV2( + Client client, + Map headers, + String[] indices, + String cluster, + ActionListener> listener + ) { + GetCheckpointAction.Request getCheckpointRequest = new GetCheckpointAction.Request(indices, IndicesOptions.LENIENT_EXPAND_OPEN); + + ClientHelper.executeWithHeadersAsync( + headers, + ClientHelper.TRANSFORM_ORIGIN, + client, + GetCheckpointAction.INSTANCE, + getCheckpointRequest, + ActionListener.wrap(checkpointResponse -> listener.onResponse(checkpointResponse.getCheckpoints()), listener::onFailure) + ); + } + + /** + * BWC fallback for nodes/cluster older than 8.2 + */ + private static void getCheckpointsFromOneClusterBWC( Client client, Map headers, String[] indices, - String prefix, + String cluster, ActionListener> listener ) { // 1st get index to see the indexes the user has access to @@ -189,14 +253,14 @@ private static void getCheckpointsFromOneCluster( ); return; } - listener.onResponse(extractIndexCheckPoints(response.getShards(), userIndices, prefix)); + listener.onResponse(extractIndexCheckPoints(response.getShards(), userIndices, cluster)); }, e -> listener.onFailure(new CheckpointException("Failed to create checkpoint", e))) ); }, e -> listener.onFailure(new CheckpointException("Failed to create checkpoint", e))) ); } - static Map extractIndexCheckPoints(ShardStats[] shards, Set userIndices, String prefix) { + static Map extractIndexCheckPoints(ShardStats[] shards, Set userIndices, String cluster) { Map> checkpointsByIndex = new TreeMap<>(); for (ShardStats shard : shards) { @@ -205,7 +269,9 @@ static Map extractIndexCheckPoints(ShardStats[] shards, Set checkpoints = checkpointsByIndex.get(fullIndexName); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java index 4e30dd38141fb..902a69b885079 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java @@ -12,18 +12,12 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; -import org.elasticsearch.action.admin.indices.get.GetIndexAction; -import org.elasticsearch.action.admin.indices.get.GetIndexResponse; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsAction; -import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; -import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -36,6 +30,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.transform.action.GetCheckpointAction; import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.TimeSyncConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; @@ -53,6 +48,7 @@ import org.mockito.stubbing.Answer; import java.time.Clock; +import java.util.Collections; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.function.Supplier; @@ -62,8 +58,8 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -255,19 +251,8 @@ private void testCreateNextCheckpoint( TransformCheckpoint lastCheckpoint, TransformCheckpoint expectedNextCheckpoint ) throws InterruptedException { - GetIndexResponse getIndexResponse = new GetIndexResponse( - new String[] { "some-index" }, - ImmutableOpenMap.of(), - ImmutableOpenMap.of(), - ImmutableOpenMap.of(), - ImmutableOpenMap.of(), - ImmutableOpenMap.of() - ); - doAnswer(withResponse(getIndexResponse)).when(client).execute(eq(GetIndexAction.INSTANCE), any(), any()); - IndicesStatsResponse indicesStatsResponse = mock(IndicesStatsResponse.class); - when(indicesStatsResponse.getShards()).thenReturn(new ShardStats[0]); - when(indicesStatsResponse.getFailedShards()).thenReturn(0); - doAnswer(withResponse(indicesStatsResponse)).when(client).execute(eq(IndicesStatsAction.INSTANCE), any(), any()); + GetCheckpointAction.Response checkpointResponse = new GetCheckpointAction.Response(Collections.emptyMap()); + doAnswer(withResponse(checkpointResponse)).when(client).execute(eq(GetCheckpointAction.INSTANCE), any(), any()); TransformConfig transformConfig = newTransformConfigWithDateHistogram( transformId, From bf00ab381ed206401cc695df7e1525e6d2f89311 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Thu, 17 Feb 2022 09:19:14 +0000 Subject: [PATCH 146/167] [ML] Add ML memory stats API (#83802) Adds an API that can be used to find out how much memory ML is permitted to use and is currently using on each node, both within the JVM heap, and natively, outside of the JVM. --- docs/changelog/83802.yaml | 5 + .../ml/common/apis/get-ml-memory.asciidoc | 310 +++++++++++++++ docs/reference/ml/common/apis/index.asciidoc | 1 + .../reference/ml/common/apis/ml-apis.asciidoc | 18 +- .../api/ml.get_memory_stats.json | 45 +++ .../xpack/core/ml/action/MlMemoryAction.java | 361 ++++++++++++++++++ .../action/TrainedModelCacheInfoAction.java | 160 ++++++++ .../xpack/core/ml/action/CacheInfoTests.java | 40 ++ .../ml/action/MlMemoryActionRequestTests.java | 24 ++ .../action/MlMemoryActionResponseTests.java | 55 +++ .../core/ml/action/MlMemoryStatsTests.java | 52 +++ .../TrainedModelCacheInfoRequestTests.java | 38 ++ .../TrainedModelCacheInfoResponseTests.java | 56 +++ .../smoketest/MlWithSecurityUserRoleIT.java | 1 - .../xpack/ml/integration/MlMemoryIT.java | 200 ++++++++++ ...NativeDataFrameAnalyticsIntegTestCase.java | 2 +- .../xpack/ml/MachineLearning.java | 8 + .../ml/action/TransportMlMemoryAction.java | 231 +++++++++++ .../TransportTrainedModelCacheInfoAction.java | 115 ++++++ .../MlAutoscalingDeciderService.java | 2 +- .../loadingservice/ModelLoadingService.java | 14 + .../xpack/ml/job/JobNodeSelector.java | 2 +- .../elasticsearch/xpack/ml/job/NodeLoad.java | 135 +++++-- .../xpack/ml/job/NodeLoadDetector.java | 13 +- .../xpack/ml/process/MlMemoryTracker.java | 10 + .../xpack/ml/rest/RestMlMemoryAction.java | 51 +++ .../MlAutoscalingDeciderServiceTests.java | 41 +- .../xpack/security/operator/Constants.java | 4 +- .../test/ml/get_memory_stats.yml | 110 ++++++ 29 files changed, 2043 insertions(+), 61 deletions(-) create mode 100644 docs/changelog/83802.yaml create mode 100644 docs/reference/ml/common/apis/get-ml-memory.asciidoc create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_memory_stats.json create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoAction.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CacheInfoTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryActionRequestTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryActionResponseTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryStatsTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoRequestTests.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoResponseTests.java create mode 100644 x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlMemoryIT.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportTrainedModelCacheInfoAction.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlMemoryAction.java create mode 100644 x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/get_memory_stats.yml diff --git a/docs/changelog/83802.yaml b/docs/changelog/83802.yaml new file mode 100644 index 0000000000000..c93e0a1c98db6 --- /dev/null +++ b/docs/changelog/83802.yaml @@ -0,0 +1,5 @@ +pr: 83802 +summary: Add ML memory stats API +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/reference/ml/common/apis/get-ml-memory.asciidoc b/docs/reference/ml/common/apis/get-ml-memory.asciidoc new file mode 100644 index 0000000000000..2dd79a969aca0 --- /dev/null +++ b/docs/reference/ml/common/apis/get-ml-memory.asciidoc @@ -0,0 +1,310 @@ +[role="xpack"] +[[get-ml-memory]] += Get machine learning memory stats API + +[subs="attributes"] +++++ +Get {ml} memory stats +++++ + +Returns information on how {ml} is using memory. + +[[get-ml-memory-request]] +== {api-request-title} + +`GET _ml/memory/_stats` + +`GET _ml/memory//_stats` + +[[get-ml-memory-prereqs]] +== {api-prereq-title} + +Requires the `monitor_ml` cluster privilege. This privilege is included in the +`machine_learning_user` built-in role. + +[[get-ml-memory-desc]] +== {api-description-title} + +Get information about how {ml} jobs and trained models are using memory, on each +node, both within the JVM heap, and natively, outside of the JVM. + +[[get-ml-memory-path-params]] +== {api-path-parms-title} + +``:: + (Optional, string) The names of particular nodes in the cluster to target. + For example, `nodeId1,nodeId2` or `ml:true`. For node selection options, + see <>. + +[[get-ml-memory-query-parms]] +== {api-query-parms-title} + +`human`:: + Specify this query parameter to include the fields with units in the response. + Otherwise only the `_in_bytes` sizes are returned in the response. + +include::{es-repo-dir}/rest-api/common-parms.asciidoc[tag=timeoutparms] + +[role="child_attributes"] +[[get-ml-memory-response-body]] +== {api-response-body-title} + +`_nodes`:: +(object) +Contains statistics about the number of nodes selected by the request. ++ +.Properties of `_nodes` +[%collapsible%open] +==== +`failed`:: +(integer) +Number of nodes that rejected the request or failed to respond. If this value +is not `0`, a reason for the rejection or failure is included in the response. + +`successful`:: +(integer) +Number of nodes that responded successfully to the request. + +`total`:: +(integer) +Total number of nodes selected by the request. +==== + +`cluster_name`:: +(string) +Name of the cluster. Based on the <> setting. + +`nodes`:: +(object) +Contains statistics for the nodes selected by the request. ++ +.Properties of `nodes` +[%collapsible%open] +==== +``:: +(object) +Contains statistics for the node. ++ +.Properties of `` +[%collapsible%open] +===== +`attributes`:: +(object) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-attributes] + +`ephemeral_id`:: +(string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-ephemeral-id] + +`jvm`:: +(object) +Contains Java Virtual Machine (JVM) statistics for the node. ++ +.Properties of `jvm` +[%collapsible%open] +====== +`heap_max`:: +(<>) +Maximum amount of memory available for use by the heap. + +`heap_max_in_bytes`:: +(integer) +Maximum amount of memory, in bytes, available for use by the heap. + +`java_inference`:: +(<>) +Amount of Java heap currently being used for caching inference models. + +`java_inference_in_bytes`:: +(integer) +Amount of Java heap, in bytes, currently being used for caching inference models. + +`java_inference_max`:: +(<>) +Maximum amount of Java heap to be used for caching inference models. + +`java_inference_max_in_bytes`:: +(integer) +Maximum amount of Java heap, in bytes, to be used for caching inference models. +====== + +`mem`:: +(object) +Contains statistics about memory usage for the node. ++ +.Properties of `mem` +[%collapsible%open] +====== +`adjusted_total`:: +(<>) +If the amount of physical memory has been overridden using the `es.total_memory_bytes` +system property then this reports the overridden value. Otherwise it reports the same +value as `total`. + +`adjusted_total_in_bytes`:: +(integer) +If the amount of physical memory has been overridden using the `es.total_memory_bytes` +system property then this reports the overridden value in bytes. Otherwise it reports +the same value as `total_in_bytes`. + +`ml`:: +(object) +Contains statistics about {ml} use of native memory on the node. ++ +.Properties of `ml` +[%collapsible%open] +======= +`anomaly_detectors`:: +(<>) +Amount of native memory set aside for {anomaly-jobs}. + +`anomaly_detectors_in_bytes`:: +(integer) +Amount of native memory, in bytes, set aside for {anomaly-jobs}. + +`data_frame_analytics`:: +(<>) +Amount of native memory set aside for {dfanalytics-jobs}. + +`data_frame_analytics_in_bytes`:: +(integer) +Amount of native memory, in bytes, set aside for {dfanalytics-jobs}. + +`max`:: +(<>) +Maximum amount of native memory (separate to the JVM heap) that may be used by {ml} +native processes. + +`max_in_bytes`:: +(integer) +Maximum amount of native memory (separate to the JVM heap), in bytes, that may be +used by {ml} native processes. + +`native_code_overhead`:: +(<>) +Amount of native memory set aside for loading {ml} native code shared libraries. + +`native_code_overhead_in_bytes`:: +(integer) +Amount of native memory, in bytes, set aside for loading {ml} native code shared libraries. + +`native_inference`:: +(<>) +Amount of native memory set aside for trained models that have a PyTorch `model_type`. + +`native_inference_in_bytes`:: +(integer) +Amount of native memory, in bytes, set aside for trained models that have a PyTorch `model_type`. +======= + +`total`:: +(<>) +Total amount of physical memory. + +`total_in_bytes`:: +(integer) +Total amount of physical memory in bytes. + +====== + +`name`:: +(string) +Human-readable identifier for the node. Based on the <> setting. + +`roles`:: +(array of strings) +Roles assigned to the node. See <>. + +`transport_address`:: +(string) +include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=node-transport-address] + +===== +==== + +[[get-ml-memory-example]] +== {api-examples-title} + +[source,console] +-------------------------------------------------- +GET _ml/memory/_stats?human +-------------------------------------------------- +// TEST[setup:node] + +This is a possible response: + +[source,console-result] +---- +{ + "_nodes": { + "total": 1, + "successful": 1, + "failed": 0 + }, + "cluster_name": "my_cluster", + "nodes": { + "pQHNt5rXTTWNvUgOrdynKg": { + "name": "node-0", + "ephemeral_id": "ITZ6WGZnSqqeT_unfit2SQ", + "transport_address": "127.0.0.1:9300", + "attributes": { + "ml.machine_memory": "68719476736", + "ml.max_jvm_size": "536870912" + }, + "roles": [ + "data", + "data_cold", + "data_content", + "data_frozen", + "data_hot", + "data_warm", + "ingest", + "master", + "ml", + "remote_cluster_client", + "transform" + ], + "mem": { + "total": "64gb", + "total_in_bytes": 68719476736, + "adjusted_total": "64gb", + "adjusted_total_in_bytes": 68719476736, + "ml": { + "max": "19.1gb", + "max_in_bytes": 20615843020, + "native_code_overhead": "0b", + "native_code_overhead_in_bytes": 0, + "anomaly_detectors": "0b", + "anomaly_detectors_in_bytes": 0, + "data_frame_analytics": "0b", + "data_frame_analytics_in_bytes": 0, + "native_inference": "0b", + "native_inference_in_bytes": 0 + } + }, + "jvm": { + "heap_max": "512mb", + "heap_max_in_bytes": 536870912, + "java_inference_max": "204.7mb", + "java_inference_max_in_bytes": 214748364, + "java_inference": "0b", + "java_inference_in_bytes": 0 + } + } + } +} +---- +// TESTRESPONSE[s/"cluster_name": "my_cluster"/"cluster_name": $body.cluster_name/] +// TESTRESPONSE[s/"pQHNt5rXTTWNvUgOrdynKg"/\$node_name/] +// TESTRESPONSE[s/"ephemeral_id": "ITZ6WGZnSqqeT_unfit2SQ"/"ephemeral_id": "$body.$_path"/] +// TESTRESPONSE[s/"transport_address": "127.0.0.1:9300"/"transport_address": "$body.$_path"/] +// TESTRESPONSE[s/"attributes": \{[^\}]*\}/"attributes": $body.$_path/] +// TESTRESPONSE[s/"total": "64gb"/"total": "$body.$_path"/] +// TESTRESPONSE[s/"total_in_bytes": 68719476736/"total_in_bytes": $body.$_path/] +// TESTRESPONSE[s/"adjusted_total": "64gb"/"adjusted_total": "$body.$_path"/] +// TESTRESPONSE[s/"adjusted_total_in_bytes": 68719476736/"adjusted_total_in_bytes": $body.$_path/] +// TESTRESPONSE[s/"max": "19.1gb"/"max": "$body.$_path"/] +// TESTRESPONSE[s/"max_in_bytes": 20615843020/"max_in_bytes": $body.$_path/] +// TESTRESPONSE[s/"heap_max": "512mb"/"heap_max": "$body.$_path"/] +// TESTRESPONSE[s/"heap_max_in_bytes": 536870912/"heap_max_in_bytes": $body.$_path/] +// TESTRESPONSE[s/"java_inference_max": "204.7mb"/"java_inference_max": "$body.$_path"/] +// TESTRESPONSE[s/"java_inference_max_in_bytes": 214748364/"java_inference_max_in_bytes": $body.$_path/] diff --git a/docs/reference/ml/common/apis/index.asciidoc b/docs/reference/ml/common/apis/index.asciidoc index e7dfc8d437169..84ebd0adffb7f 100644 --- a/docs/reference/ml/common/apis/index.asciidoc +++ b/docs/reference/ml/common/apis/index.asciidoc @@ -1,6 +1,7 @@ include::ml-apis.asciidoc[leveloffset=+1] //GET include::get-ml-info.asciidoc[leveloffset=+2] +include::get-ml-memory.asciidoc[leveloffset=+2] //SET include::set-upgrade-mode.asciidoc[leveloffset=+2] diff --git a/docs/reference/ml/common/apis/ml-apis.asciidoc b/docs/reference/ml/common/apis/ml-apis.asciidoc index c4a24e2e6a59b..c4349f3eb7366 100644 --- a/docs/reference/ml/common/apis/ml-apis.asciidoc +++ b/docs/reference/ml/common/apis/ml-apis.asciidoc @@ -2,18 +2,14 @@ [[ml-apis]] = {ml-cap} APIs -You can use the following APIs to retrieve information related to the {stack-ml-features}. +You can use the following APIs to retrieve information related to the +{stack-ml-features}: -See also <>, <>, and <>. - -[discrete] -[[ml-api-ml-info-endpoint]] -== Info - -* <> +* <> +* <> -[discrete] -[[ml-set-upgrade-mode-endpoint]] -== Set upgrade mode +The following API is useful when you upgrade: * <> + +See also <>, <>, and <>. diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_memory_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_memory_stats.json new file mode 100644 index 0000000000000..272f2264292d3 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/ml.get_memory_stats.json @@ -0,0 +1,45 @@ +{ + "ml.get_memory_stats":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/get-ml-memory.html", + "description":"Returns information on how ML is using memory." + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_ml/memory/_stats", + "methods":[ + "GET" + ] + }, + { + "path":"/_ml/memory/{node_id}/_stats", + "methods":[ + "GET" + ], + "parts":{ + "node_id":{ + "type":"string", + "description":"Specifies the node or nodes to retrieve stats for." + } + } + } + ] + }, + "params":{ + "master_timeout":{ + "type":"time", + "description":"Explicit operation timeout for connection to master node" + }, + "timeout":{ + "type":"time", + "description":"Explicit operation timeout" + } + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java new file mode 100644 index 0000000000000..ce7ad1bea8e17 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/MlMemoryAction.java @@ -0,0 +1,361 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.xcontent.ToXContent; +import org.elasticsearch.xcontent.ToXContentFragment; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +public class MlMemoryAction extends ActionType { + + public static final MlMemoryAction INSTANCE = new MlMemoryAction(); + public static final String NAME = "cluster:monitor/xpack/ml/memory/stats/get"; + + static final String MEM = "mem"; + static final String TOTAL = "total"; + static final String TOTAL_IN_BYTES = "total_in_bytes"; + static final String ADJUSTED_TOTAL = "adjusted_total"; + static final String ADJUSTED_TOTAL_IN_BYTES = "adjusted_total_in_bytes"; + static final String ML = "ml"; + static final String MAX = "max"; + static final String MAX_IN_BYTES = "max_in_bytes"; + static final String NATIVE_CODE_OVERHEAD = "native_code_overhead"; + static final String NATIVE_CODE_OVERHEAD_IN_BYTES = "native_code_overhead_in_bytes"; + static final String ANOMALY_DETECTORS = "anomaly_detectors"; + static final String ANOMALY_DETECTORS_IN_BYTES = "anomaly_detectors_in_bytes"; + static final String DATA_FRAME_ANALYTICS = "data_frame_analytics"; + static final String DATA_FRAME_ANALYTICS_IN_BYTES = "data_frame_analytics_in_bytes"; + static final String NATIVE_INFERENCE = "native_inference"; + static final String NATIVE_INFERENCE_IN_BYTES = "native_inference_in_bytes"; + static final String JVM = "jvm"; + static final String HEAP_MAX = "heap_max"; + static final String HEAP_MAX_IN_BYTES = "heap_max_in_bytes"; + static final String JAVA_INFERENCE_MAX = "java_inference_max"; + static final String JAVA_INFERENCE_MAX_IN_BYTES = "java_inference_max_in_bytes"; + static final String JAVA_INFERENCE = "java_inference"; + static final String JAVA_INFERENCE_IN_BYTES = "java_inference_in_bytes"; + + private MlMemoryAction() { + super(NAME, Response::new); + } + + public static class Request extends AcknowledgedRequest { + + private final String nodeId; + + public Request(String nodeId) { + this.nodeId = ExceptionsHelper.requireNonNull(nodeId, "nodeId"); + } + + public Request(StreamInput in) throws IOException { + super(in); + nodeId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(nodeId); + } + + public String getNodeId() { + return nodeId; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public int hashCode() { + return Objects.hash(nodeId); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(nodeId, other.nodeId); + } + } + + public static class Response extends BaseNodesResponse implements ToXContentFragment { + + public static class MlMemoryStats extends BaseNodeResponse implements ToXContent, Writeable { + + private final ByteSizeValue memTotal; + private final ByteSizeValue memAdjustedTotal; + private final ByteSizeValue mlMax; + private final ByteSizeValue mlNativeCodeOverhead; + private final ByteSizeValue mlAnomalyDetectors; + private final ByteSizeValue mlDataFrameAnalytics; + private final ByteSizeValue mlNativeInference; + private final ByteSizeValue jvmHeapMax; + private final ByteSizeValue jvmInferenceMax; + private final ByteSizeValue jvmInference; + + public MlMemoryStats( + DiscoveryNode node, + ByteSizeValue memTotal, + ByteSizeValue memAdjustedTotal, + ByteSizeValue mlMax, + ByteSizeValue mlNativeCodeOverhead, + ByteSizeValue mlAnomalyDetectors, + ByteSizeValue mlDataFrameAnalytics, + ByteSizeValue mlNativeInference, + ByteSizeValue jvmHeapMax, + ByteSizeValue jvmInferenceMax, + ByteSizeValue jvmInference + ) { + super(node); + this.memTotal = Objects.requireNonNull(memTotal); + this.memAdjustedTotal = Objects.requireNonNull(memAdjustedTotal); + this.mlMax = Objects.requireNonNull(mlMax); + this.mlNativeCodeOverhead = Objects.requireNonNull(mlNativeCodeOverhead); + this.mlAnomalyDetectors = Objects.requireNonNull(mlAnomalyDetectors); + this.mlDataFrameAnalytics = Objects.requireNonNull(mlDataFrameAnalytics); + this.mlNativeInference = Objects.requireNonNull(mlNativeInference); + this.jvmHeapMax = Objects.requireNonNull(jvmHeapMax); + this.jvmInferenceMax = Objects.requireNonNull(jvmInferenceMax); + this.jvmInference = Objects.requireNonNull(jvmInference); + } + + public MlMemoryStats(StreamInput in) throws IOException { + super(in); + memTotal = new ByteSizeValue(in); + memAdjustedTotal = new ByteSizeValue(in); + mlMax = new ByteSizeValue(in); + mlNativeCodeOverhead = new ByteSizeValue(in); + mlAnomalyDetectors = new ByteSizeValue(in); + mlDataFrameAnalytics = new ByteSizeValue(in); + mlNativeInference = new ByteSizeValue(in); + jvmHeapMax = new ByteSizeValue(in); + jvmInferenceMax = new ByteSizeValue(in); + jvmInference = new ByteSizeValue(in); + } + + public ByteSizeValue getMemTotal() { + return memTotal; + } + + public ByteSizeValue getMemAdjustedTotal() { + return memAdjustedTotal; + } + + public ByteSizeValue getMlMax() { + return mlMax; + } + + public ByteSizeValue getMlNativeCodeOverhead() { + return mlNativeCodeOverhead; + } + + public ByteSizeValue getMlAnomalyDetectors() { + return mlAnomalyDetectors; + } + + public ByteSizeValue getMlDataFrameAnalytics() { + return mlDataFrameAnalytics; + } + + public ByteSizeValue getMlNativeInference() { + return mlNativeInference; + } + + public ByteSizeValue getJvmHeapMax() { + return jvmHeapMax; + } + + public ByteSizeValue getJvmInferenceMax() { + return jvmInferenceMax; + } + + public ByteSizeValue getJvmInference() { + return jvmInference; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + DiscoveryNode node = getNode(); + builder.startObject(node.getId()); + builder.field("name", node.getName()); + builder.field("ephemeral_id", node.getEphemeralId()); + builder.field("transport_address", node.getAddress().toString()); + + builder.startObject("attributes"); + for (Map.Entry entry : node.getAttributes().entrySet()) { + if (entry.getKey().startsWith("ml.")) { + builder.field(entry.getKey(), entry.getValue()); + } + } + builder.endObject(); + + builder.startArray("roles"); + for (DiscoveryNodeRole role : node.getRoles()) { + builder.value(role.roleName()); + } + builder.endArray(); + + builder.startObject(MEM); + + builder.humanReadableField(TOTAL_IN_BYTES, TOTAL, memTotal); + builder.humanReadableField(ADJUSTED_TOTAL_IN_BYTES, ADJUSTED_TOTAL, memAdjustedTotal); + + builder.startObject(ML); + builder.humanReadableField(MAX_IN_BYTES, MAX, mlMax); + builder.humanReadableField(NATIVE_CODE_OVERHEAD_IN_BYTES, NATIVE_CODE_OVERHEAD, mlNativeCodeOverhead); + builder.humanReadableField(ANOMALY_DETECTORS_IN_BYTES, ANOMALY_DETECTORS, mlAnomalyDetectors); + builder.humanReadableField(DATA_FRAME_ANALYTICS_IN_BYTES, DATA_FRAME_ANALYTICS, mlDataFrameAnalytics); + builder.humanReadableField(NATIVE_INFERENCE_IN_BYTES, NATIVE_INFERENCE, mlNativeInference); + builder.endObject(); + + builder.endObject(); // end mem + + builder.startObject(JVM); + builder.humanReadableField(HEAP_MAX_IN_BYTES, HEAP_MAX, jvmHeapMax); + builder.humanReadableField(JAVA_INFERENCE_MAX_IN_BYTES, JAVA_INFERENCE_MAX, jvmInferenceMax); + builder.humanReadableField(JAVA_INFERENCE_IN_BYTES, JAVA_INFERENCE, jvmInference); + builder.endObject(); + + builder.endObject(); // end node + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + memTotal.writeTo(out); + memAdjustedTotal.writeTo(out); + mlMax.writeTo(out); + mlNativeCodeOverhead.writeTo(out); + mlAnomalyDetectors.writeTo(out); + mlDataFrameAnalytics.writeTo(out); + mlNativeInference.writeTo(out); + jvmHeapMax.writeTo(out); + jvmInferenceMax.writeTo(out); + jvmInference.writeTo(out); + } + + @Override + public int hashCode() { + return Objects.hash( + getNode(), + memTotal, + memAdjustedTotal, + mlMax, + mlNativeCodeOverhead, + mlAnomalyDetectors, + mlDataFrameAnalytics, + mlNativeInference, + jvmHeapMax, + jvmInferenceMax, + jvmInference + ); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MlMemoryStats that = (MlMemoryStats) o; + return Objects.equals(getNode(), that.getNode()) + && Objects.equals(memTotal, that.memTotal) + && Objects.equals(memAdjustedTotal, that.memAdjustedTotal) + && Objects.equals(mlMax, that.mlMax) + && Objects.equals(mlNativeCodeOverhead, that.mlNativeCodeOverhead) + && Objects.equals(mlAnomalyDetectors, that.mlAnomalyDetectors) + && Objects.equals(mlDataFrameAnalytics, that.mlDataFrameAnalytics) + && Objects.equals(mlNativeInference, that.mlNativeInference) + && Objects.equals(jvmHeapMax, that.jvmHeapMax) + && Objects.equals(jvmInferenceMax, that.jvmInferenceMax) + && Objects.equals(jvmInference, that.jvmInference); + } + + @Override + public String toString() { + return Strings.toString(this); + } + } + + public Response(StreamInput in) throws IOException { + super(in); + } + + public Response(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readList(MlMemoryStats::new); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeList(nodes); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject("nodes"); + for (MlMemoryStats mlMemoryStats : getNodes()) { + mlMemoryStats.toXContent(builder, params); + } + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(getNodes()); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Response other = (Response) obj; + return Objects.equals(getNodes(), other.getNodes()); + } + + @Override + public final String toString() { + return Strings.toString(this); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoAction.java new file mode 100644 index 0000000000000..2632fbdad21f5 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoAction.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.nodes.BaseNodeResponse; +import org.elasticsearch.action.support.nodes.BaseNodesRequest; +import org.elasticsearch.action.support.nodes.BaseNodesResponse; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.unit.ByteSizeValue; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +public class TrainedModelCacheInfoAction extends ActionType { + + public static final TrainedModelCacheInfoAction INSTANCE = new TrainedModelCacheInfoAction(); + public static final String NAME = "cluster:internal/xpack/ml/trained_models/cache/info"; + + private TrainedModelCacheInfoAction() { + super(NAME, Response::new); + } + + public static class Request extends BaseNodesRequest { + + public Request(DiscoveryNode... concreteNodes) { + super(concreteNodes); + } + + public Request(StreamInput in) throws IOException { + super(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public int hashCode() { + return Arrays.hashCode(concreteNodes()); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Arrays.deepEquals(concreteNodes(), other.concreteNodes()); + } + } + + public static class Response extends BaseNodesResponse { + + public static class CacheInfo extends BaseNodeResponse implements Writeable { + + private final ByteSizeValue jvmInferenceMax; + private final ByteSizeValue jvmInference; + + public CacheInfo(DiscoveryNode node, ByteSizeValue jvmInferenceMax, ByteSizeValue jvmInference) { + super(node); + this.jvmInferenceMax = Objects.requireNonNull(jvmInferenceMax); + this.jvmInference = Objects.requireNonNull(jvmInference); + } + + public CacheInfo(StreamInput in) throws IOException { + super(in); + jvmInferenceMax = new ByteSizeValue(in); + jvmInference = new ByteSizeValue(in); + } + + public ByteSizeValue getJvmInferenceMax() { + return jvmInferenceMax; + } + + public ByteSizeValue getJvmInference() { + return jvmInference; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + jvmInferenceMax.writeTo(out); + jvmInference.writeTo(out); + } + + @Override + public int hashCode() { + return Objects.hash(getNode(), jvmInferenceMax, jvmInference); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CacheInfo cacheInfo = (CacheInfo) o; + return Objects.equals(getNode(), cacheInfo.getNode()) + && Objects.equals(jvmInferenceMax, cacheInfo.jvmInferenceMax) + && Objects.equals(jvmInference, cacheInfo.jvmInference); + } + } + + public Response(StreamInput in) throws IOException { + super(in); + } + + public Response(ClusterName clusterName, List nodes, List failures) { + super(clusterName, nodes, failures); + } + + @Override + protected List readNodesFrom(StreamInput in) throws IOException { + return in.readList(CacheInfo::new); + } + + @Override + protected void writeNodesTo(StreamOutput out, List nodes) throws IOException { + out.writeList(nodes); + } + + @Override + public int hashCode() { + return Objects.hash(getNodes()); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Response other = (Response) obj; + return Objects.equals(getNodes(), other.getNodes()); + } + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CacheInfoTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CacheInfoTests.java new file mode 100644 index 0000000000000..eb8c65c884d88 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/CacheInfoTests.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.ml.action.TrainedModelCacheInfoAction.Response.CacheInfo; + +import java.net.InetAddress; + +public class CacheInfoTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return CacheInfo::new; + } + + @Override + protected CacheInfo createTestInstance() { + DiscoveryNode node = new DiscoveryNode( + randomAlphaOfLength(20), + new TransportAddress(InetAddress.getLoopbackAddress(), randomIntBetween(1024, 65535)), + Version.CURRENT + ); + return createTestInstance(node); + } + + static CacheInfo createTestInstance(DiscoveryNode node) { + return new CacheInfo(node, ByteSizeValue.ofMb(randomLongBetween(1000, 30000)), ByteSizeValue.ofMb(randomLongBetween(0, 1000))); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryActionRequestTests.java new file mode 100644 index 0000000000000..eb4716bc05178 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryActionRequestTests.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +public class MlMemoryActionRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return MlMemoryAction.Request::new; + } + + @Override + protected MlMemoryAction.Request createTestInstance() { + return new MlMemoryAction.Request(randomAlphaOfLength(20)); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryActionResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryActionResponseTests.java new file mode 100644 index 0000000000000..2875ab7c80208 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryActionResponseTests.java @@ -0,0 +1,55 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.net.InetAddress; +import java.util.ArrayList; +import java.util.List; + +public class MlMemoryActionResponseTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return MlMemoryAction.Response::new; + } + + @Override + protected MlMemoryAction.Response createTestInstance() { + int numNodes = randomIntBetween(1, 20); + List nodes = new ArrayList<>(numNodes); + for (int i = 0; i < numNodes; ++i) { + DiscoveryNode node = new DiscoveryNode( + randomAlphaOfLength(20), + new TransportAddress(InetAddress.getLoopbackAddress(), 9200 + i), + Version.CURRENT + ); + nodes.add(MlMemoryStatsTests.createTestInstance(node)); + } + int numFailures = randomIntBetween(0, 5); + List failures = (numFailures > 0) ? new ArrayList<>(numFailures) : List.of(); + for (int i = 0; i < numFailures; ++i) { + failures.add( + new FailedNodeException( + randomAlphaOfLength(20), + randomAlphaOfLength(50), + new ElasticsearchException(randomAlphaOfLength(30)) + ) + ); + } + return new MlMemoryAction.Response(ClusterName.DEFAULT, nodes, failures); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryStatsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryStatsTests.java new file mode 100644 index 0000000000000..d5093e7f4adcc --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/MlMemoryStatsTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.ml.action.MlMemoryAction.Response.MlMemoryStats; + +import java.net.InetAddress; + +public class MlMemoryStatsTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return MlMemoryStats::new; + } + + @Override + protected MlMemoryStats createTestInstance() { + DiscoveryNode node = new DiscoveryNode( + randomAlphaOfLength(20), + new TransportAddress(InetAddress.getLoopbackAddress(), randomIntBetween(1024, 65535)), + Version.CURRENT + ); + return createTestInstance(node); + } + + static MlMemoryStats createTestInstance(DiscoveryNode node) { + return new MlMemoryStats( + node, + ByteSizeValue.ofGb(randomLongBetween(1, 64)), + ByteSizeValue.ofGb(randomLongBetween(1, 64)), + ByteSizeValue.ofGb(randomLongBetween(0, 48)), + ByteSizeValue.ofMb(randomLongBetween(0, 20000)), + ByteSizeValue.ofMb(randomLongBetween(0, 20000)), + ByteSizeValue.ofMb(randomLongBetween(0, 20000)), + ByteSizeValue.ofKb(randomLongBetween(0, 30000)), + ByteSizeValue.ofGb(randomLongBetween(0, 32)), + ByteSizeValue.ofGb(randomLongBetween(0, 16)), + ByteSizeValue.ofMb(randomLongBetween(0, 10000)) + ); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoRequestTests.java new file mode 100644 index 0000000000000..e879dc62a4c6a --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoRequestTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.test.AbstractWireSerializingTestCase; + +import java.net.InetAddress; + +public class TrainedModelCacheInfoRequestTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return TrainedModelCacheInfoAction.Request::new; + } + + @Override + protected TrainedModelCacheInfoAction.Request createTestInstance() { + int numNodes = randomIntBetween(1, 20); + DiscoveryNode[] nodes = new DiscoveryNode[numNodes]; + for (int i = 0; i < numNodes; ++i) { + nodes[i] = new DiscoveryNode( + randomAlphaOfLength(20), + new TransportAddress(InetAddress.getLoopbackAddress(), 9200 + i), + Version.CURRENT + ); + } + return new TrainedModelCacheInfoAction.Request(nodes); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoResponseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoResponseTests.java new file mode 100644 index 0000000000000..2964a47e9c3f0 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/TrainedModelCacheInfoResponseTests.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.core.ml.action.TrainedModelCacheInfoAction.Response.CacheInfo; + +import java.net.InetAddress; +import java.util.ArrayList; +import java.util.List; + +public class TrainedModelCacheInfoResponseTests extends AbstractWireSerializingTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return TrainedModelCacheInfoAction.Response::new; + } + + @Override + protected TrainedModelCacheInfoAction.Response createTestInstance() { + int numNodes = randomIntBetween(1, 20); + List nodes = new ArrayList<>(numNodes); + for (int i = 0; i < numNodes; ++i) { + DiscoveryNode node = new DiscoveryNode( + randomAlphaOfLength(20), + new TransportAddress(InetAddress.getLoopbackAddress(), 9200 + i), + Version.CURRENT + ); + nodes.add(CacheInfoTests.createTestInstance(node)); + } + int numFailures = randomIntBetween(0, 5); + List failures = (numFailures > 0) ? new ArrayList<>(numFailures) : List.of(); + for (int i = 0; i < numFailures; ++i) { + failures.add( + new FailedNodeException( + randomAlphaOfLength(20), + randomAlphaOfLength(50), + new ElasticsearchException(randomAlphaOfLength(30)) + ) + ); + } + return new TrainedModelCacheInfoAction.Response(ClusterName.DEFAULT, nodes, failures); + } +} diff --git a/x-pack/plugin/ml/qa/ml-with-security/src/yamlRestTest/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java b/x-pack/plugin/ml/qa/ml-with-security/src/yamlRestTest/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java index 4f874713606ff..0f594e47c4a76 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/src/yamlRestTest/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java +++ b/x-pack/plugin/ml/qa/ml-with-security/src/yamlRestTest/java/org/elasticsearch/smoketest/MlWithSecurityUserRoleIT.java @@ -28,7 +28,6 @@ public class MlWithSecurityUserRoleIT extends MlWithSecurityIT { */ private static final List ALLOWED_ACTION_PATTERNS = Arrays.asList( Pattern.compile("ml\\.get_.*"), - Pattern.compile("ml\\.find_file_structure"), Pattern.compile("ml\\.evaluate_data_frame") ); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlMemoryIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlMemoryIT.java new file mode 100644 index 0000000000000..14c6e7a770ed3 --- /dev/null +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlMemoryIT.java @@ -0,0 +1,200 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.integration; + +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; +import org.elasticsearch.xpack.core.ml.action.MlMemoryAction; +import org.elasticsearch.xpack.core.ml.action.MlMemoryAction.Response.MlMemoryStats; +import org.elasticsearch.xpack.core.ml.action.NodeAcknowledgedResponse; +import org.elasticsearch.xpack.core.ml.action.OpenJobAction; +import org.elasticsearch.xpack.core.ml.action.PutJobAction; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelDefinitionPartAction; +import org.elasticsearch.xpack.core.ml.action.PutTrainedModelVocabularyAction; +import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; +import org.elasticsearch.xpack.core.ml.dataframe.analyses.Classification; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; +import org.elasticsearch.xpack.core.ml.inference.allocation.AllocationStatus; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.BertTokenization; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.PassThroughConfig; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; +import org.elasticsearch.xpack.core.ml.job.config.Job; +import org.elasticsearch.xpack.core.ml.job.config.JobState; +import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizer; +import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; +import org.junit.After; + +import java.util.Base64; +import java.util.List; + +import static org.elasticsearch.xpack.ml.integration.ClassificationIT.KEYWORD_FIELD; +import static org.elasticsearch.xpack.ml.integration.PyTorchModelIT.BASE_64_ENCODED_MODEL; +import static org.elasticsearch.xpack.ml.integration.PyTorchModelIT.RAW_MODEL_SIZE; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.emptyString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.not; + +public class MlMemoryIT extends MlNativeDataFrameAnalyticsIntegTestCase { + + @After + public void cleanUpAfterTest() { + cleanUp(); + } + + public void testMemoryStats() throws Exception { + + deployTrainedModel(); + openAnomalyDetectionJob(); + String dfaJobId = "dfa"; + startDataFrameAnalyticsJob(dfaJobId); + + MlMemoryAction.Response response = client().execute(MlMemoryAction.INSTANCE, new MlMemoryAction.Request("_all")).actionGet(); + + assertThat(response.failures(), empty()); + + List statsList = response.getNodes(); + // There are 4 nodes: 3 in the external cluster plus the test harness + assertThat(statsList, hasSize(4)); + + int mlNodes = 0; + int nodesWithPytorchModel = 0; + int nodesWithAnomalyJob = 0; + int nodesWithDfaJob = 0; + + for (MlMemoryStats stats : statsList) { + assertThat(stats.getMemTotal().getBytes(), greaterThan(0L)); + assertThat(stats.getMemAdjustedTotal().getBytes(), greaterThan(0L)); + assertThat(stats.getMemAdjustedTotal().getBytes(), lessThanOrEqualTo(stats.getMemTotal().getBytes())); + boolean isMlNode = stats.getNode().getRoles().contains(DiscoveryNodeRole.ML_ROLE); + boolean hasPyTorchModel = (stats.getMlNativeInference().getBytes() > 0); + boolean hasAnomalyJob = (stats.getMlAnomalyDetectors().getBytes() > 0); + boolean hasDfaJob = (stats.getMlDataFrameAnalytics().getBytes() > 0); + if (isMlNode) { + ++mlNodes; + assertThat(stats.getMlMax().getBytes(), greaterThan(0L)); + if (hasPyTorchModel) { + ++nodesWithPytorchModel; + } + if (hasAnomalyJob) { + ++nodesWithAnomalyJob; + } + if (hasDfaJob) { + ++nodesWithDfaJob; + } + } else { + assertThat(stats.getMlMax().getBytes(), equalTo(0L)); + assertThat(stats.getMlAnomalyDetectors().getBytes(), equalTo(0L)); + assertThat(stats.getMlDataFrameAnalytics().getBytes(), equalTo(0L)); + assertThat(stats.getMlNativeInference().getBytes(), equalTo(0L)); + assertThat(stats.getMlNativeCodeOverhead().getBytes(), equalTo(0L)); + } + if (hasAnomalyJob || hasDfaJob || hasPyTorchModel) { + assertThat(stats.getMlNativeCodeOverhead().getBytes(), greaterThan(0L)); + } else { + assertThat(stats.getMlNativeCodeOverhead().getBytes(), equalTo(0L)); + } + assertThat(stats.getJvmHeapMax().getBytes(), greaterThan(0L)); + assertThat(stats.getJvmInferenceMax().getBytes(), greaterThan(0L)); + // This next one has to be >= 0 rather than 0 because the cache is invalidated + // lazily after models are no longer in use, and previous tests could have + // caused a model to be cached. + assertThat(stats.getJvmInference().getBytes(), greaterThanOrEqualTo(0L)); + } + assertThat(mlNodes, is(2)); + assertThat(nodesWithPytorchModel, equalTo(mlNodes)); + assertThat(nodesWithAnomalyJob, is(1)); + // It's possible that the DFA job could have finished before the stats call was made + assumeFalse( + "Data frame analytics job finished really quickly, so cannot assert DFA memory stats", + getProgress(dfaJobId).stream().allMatch(phaseProgress -> phaseProgress.getProgressPercent() == 100) + ); + assertThat(nodesWithDfaJob, is(1)); + } + + private void openAnomalyDetectionJob() throws Exception { + Job.Builder job = BaseMlIntegTestCase.createFareQuoteJob("ad", ByteSizeValue.ofMb(20)); + client().execute(PutJobAction.INSTANCE, new PutJobAction.Request(job)).actionGet(); + client().execute(OpenJobAction.INSTANCE, new OpenJobAction.Request(job.getId())).actionGet(); + assertBusy(() -> { + GetJobsStatsAction.Response response = client().execute( + GetJobsStatsAction.INSTANCE, + new GetJobsStatsAction.Request(job.getId()) + ).actionGet(); + assertEquals(JobState.OPENED, response.getResponse().results().get(0).getState()); + }); + } + + private void startDataFrameAnalyticsJob(String jobId) throws Exception { + String sourceIndex = "source"; + String destIndex = "dest"; + ClassificationIT.createIndex(sourceIndex, false); + ClassificationIT.indexData(sourceIndex, 350, 0, KEYWORD_FIELD); + + DataFrameAnalyticsConfig config = buildAnalytics(jobId, sourceIndex, destIndex, null, new Classification(KEYWORD_FIELD)); + putAnalytics(config); + + NodeAcknowledgedResponse response = startAnalytics(jobId); + assertThat(response.getNode(), not(emptyString())); + + waitUntilSomeProgressHasBeenMadeForPhase(jobId, "loading_data"); + } + + private void deployTrainedModel() { + String modelId = "pytorch"; + client().execute( + PutTrainedModelAction.INSTANCE, + new PutTrainedModelAction.Request( + TrainedModelConfig.builder() + .setModelType(TrainedModelType.PYTORCH) + .setInferenceConfig( + new PassThroughConfig(null, new BertTokenization(null, false, null, Tokenization.Truncate.NONE), null) + ) + .setModelId(modelId) + .build(), + false + ) + ).actionGet(); + client().execute( + PutTrainedModelDefinitionPartAction.INSTANCE, + new PutTrainedModelDefinitionPartAction.Request( + modelId, + new BytesArray(Base64.getDecoder().decode(BASE_64_ENCODED_MODEL)), + 0, + RAW_MODEL_SIZE, + 1 + ) + ).actionGet(); + client().execute( + PutTrainedModelVocabularyAction.INSTANCE, + new PutTrainedModelVocabularyAction.Request( + modelId, + List.of("these", "are", "my", "words", BertTokenizer.UNKNOWN_TOKEN, BertTokenizer.PAD_TOKEN) + ) + ).actionGet(); + client().execute( + StartTrainedModelDeploymentAction.INSTANCE, + new StartTrainedModelDeploymentAction.Request(modelId).setWaitForState(AllocationStatus.State.STARTED) + ).actionGet(); + } + + @Override + boolean supportsInference() { + return true; + } +} diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java index c57e41537f722..90fb4e81b7364 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeDataFrameAnalyticsIntegTestCase.java @@ -205,7 +205,7 @@ protected void assertProgressComplete(String id) { abstract boolean supportsInference(); - private List getProgress(String id) { + protected List getProgress(String id) { GetDataFrameAnalyticsStatsAction.Response.Stats stats = getAnalyticsStats(id); assertThat(stats.getId(), equalTo(id)); List progress = stats.getProgress(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index d0138f49a8591..527c395e6e3df 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -136,6 +136,7 @@ import org.elasticsearch.xpack.core.ml.action.IsolateDatafeedAction; import org.elasticsearch.xpack.core.ml.action.KillProcessAction; import org.elasticsearch.xpack.core.ml.action.MlInfoAction; +import org.elasticsearch.xpack.core.ml.action.MlMemoryAction; import org.elasticsearch.xpack.core.ml.action.OpenJobAction; import org.elasticsearch.xpack.core.ml.action.PersistJobAction; import org.elasticsearch.xpack.core.ml.action.PostCalendarEventsAction; @@ -161,6 +162,7 @@ import org.elasticsearch.xpack.core.ml.action.StopDataFrameAnalyticsAction; import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction; import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.action.TrainedModelCacheInfoAction; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateDataFrameAnalyticsAction; import org.elasticsearch.xpack.core.ml.action.UpdateDatafeedAction; @@ -231,6 +233,7 @@ import org.elasticsearch.xpack.ml.action.TransportIsolateDatafeedAction; import org.elasticsearch.xpack.ml.action.TransportKillProcessAction; import org.elasticsearch.xpack.ml.action.TransportMlInfoAction; +import org.elasticsearch.xpack.ml.action.TransportMlMemoryAction; import org.elasticsearch.xpack.ml.action.TransportOpenJobAction; import org.elasticsearch.xpack.ml.action.TransportPersistJobAction; import org.elasticsearch.xpack.ml.action.TransportPostCalendarEventsAction; @@ -256,6 +259,7 @@ import org.elasticsearch.xpack.ml.action.TransportStopDataFrameAnalyticsAction; import org.elasticsearch.xpack.ml.action.TransportStopDatafeedAction; import org.elasticsearch.xpack.ml.action.TransportStopTrainedModelDeploymentAction; +import org.elasticsearch.xpack.ml.action.TransportTrainedModelCacheInfoAction; import org.elasticsearch.xpack.ml.action.TransportUpdateCalendarJobAction; import org.elasticsearch.xpack.ml.action.TransportUpdateDataFrameAnalyticsAction; import org.elasticsearch.xpack.ml.action.TransportUpdateDatafeedAction; @@ -342,6 +346,7 @@ import org.elasticsearch.xpack.ml.process.NativeStorageProvider; import org.elasticsearch.xpack.ml.rest.RestDeleteExpiredDataAction; import org.elasticsearch.xpack.ml.rest.RestMlInfoAction; +import org.elasticsearch.xpack.ml.rest.RestMlMemoryAction; import org.elasticsearch.xpack.ml.rest.RestSetUpgradeModeAction; import org.elasticsearch.xpack.ml.rest.calendar.RestDeleteCalendarAction; import org.elasticsearch.xpack.ml.rest.calendar.RestDeleteCalendarEventAction; @@ -1137,6 +1142,7 @@ public List getRestHandlers( new RestGetJobsAction(), new RestGetJobStatsAction(), new RestMlInfoAction(), + new RestMlMemoryAction(), new RestPutJobAction(), new RestPostJobUpdateAction(), new RestDeleteJobAction(), @@ -1223,6 +1229,7 @@ public List getRestHandlers( new ActionHandler<>(GetJobsAction.INSTANCE, TransportGetJobsAction.class), new ActionHandler<>(GetJobsStatsAction.INSTANCE, TransportGetJobsStatsAction.class), new ActionHandler<>(MlInfoAction.INSTANCE, TransportMlInfoAction.class), + new ActionHandler<>(MlMemoryAction.INSTANCE, TransportMlMemoryAction.class), new ActionHandler<>(PutJobAction.INSTANCE, TransportPutJobAction.class), new ActionHandler<>(UpdateJobAction.INSTANCE, TransportUpdateJobAction.class), new ActionHandler<>(DeleteJobAction.INSTANCE, TransportDeleteJobAction.class), @@ -1281,6 +1288,7 @@ public List getRestHandlers( new ActionHandler<>(EvaluateDataFrameAction.INSTANCE, TransportEvaluateDataFrameAction.class), new ActionHandler<>(ExplainDataFrameAnalyticsAction.INSTANCE, TransportExplainDataFrameAnalyticsAction.class), new ActionHandler<>(InternalInferModelAction.INSTANCE, TransportInternalInferModelAction.class), + new ActionHandler<>(TrainedModelCacheInfoAction.INSTANCE, TransportTrainedModelCacheInfoAction.class), new ActionHandler<>(GetTrainedModelsAction.INSTANCE, TransportGetTrainedModelsAction.class), new ActionHandler<>(DeleteTrainedModelAction.INSTANCE, TransportDeleteTrainedModelAction.class), new ActionHandler<>(GetTrainedModelsStatsAction.INSTANCE, TransportGetTrainedModelsStatsAction.class), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java new file mode 100644 index 0000000000000..08c6361c09408 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportMlMemoryAction.java @@ -0,0 +1,231 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; +import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.master.TransportMasterNodeAction; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.OriginSettingClient; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.block.ClusterBlockLevel; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.monitor.os.OsStats; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ml.action.MlMemoryAction; +import org.elasticsearch.xpack.core.ml.action.MlMemoryAction.Response.MlMemoryStats; +import org.elasticsearch.xpack.core.ml.action.TrainedModelCacheInfoAction; +import org.elasticsearch.xpack.core.ml.action.TrainedModelCacheInfoAction.Response.CacheInfo; +import org.elasticsearch.xpack.ml.job.NodeLoad; +import org.elasticsearch.xpack.ml.job.NodeLoadDetector; +import org.elasticsearch.xpack.ml.process.MlMemoryTracker; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.ml.MachineLearning.MAX_MACHINE_MEMORY_PERCENT; +import static org.elasticsearch.xpack.ml.MachineLearning.MAX_OPEN_JOBS_PER_NODE; +import static org.elasticsearch.xpack.ml.MachineLearning.USE_AUTO_MACHINE_MEMORY_PERCENT; + +public class TransportMlMemoryAction extends TransportMasterNodeAction { + + private final Client client; + private final MlMemoryTracker memoryTracker; + + @Inject + public TransportMlMemoryAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + Client client, + MlMemoryTracker memoryTracker + ) { + super( + MlMemoryAction.NAME, + transportService, + clusterService, + threadPool, + actionFilters, + MlMemoryAction.Request::new, + indexNameExpressionResolver, + MlMemoryAction.Response::new, + ThreadPool.Names.SAME + ); + this.client = new OriginSettingClient(client, ML_ORIGIN); + this.memoryTracker = memoryTracker; + } + + @Override + protected void masterOperation( + Task task, + MlMemoryAction.Request request, + ClusterState state, + ActionListener listener + ) throws Exception { + + ClusterSettings clusterSettings = clusterService.getClusterSettings(); + + // Resolve the node specification to some concrete nodes + String[] nodeIds = state.nodes().resolveNodes(request.getNodeId()); + + ParentTaskAssigningClient parentTaskClient = new ParentTaskAssigningClient(client, task.getParentTaskId()); + + ActionListener nodeStatsListener = ActionListener.wrap(nodesStatsResponse -> { + TrainedModelCacheInfoAction.Request trainedModelCacheInfoRequest = new TrainedModelCacheInfoAction.Request( + nodesStatsResponse.getNodes().stream().map(NodeStats::getNode).toArray(DiscoveryNode[]::new) + ).timeout(request.timeout()); + + parentTaskClient.execute( + TrainedModelCacheInfoAction.INSTANCE, + trainedModelCacheInfoRequest, + ActionListener.wrap( + trainedModelCacheInfoResponse -> handleResponses( + state, + clusterSettings, + nodesStatsResponse, + trainedModelCacheInfoResponse, + listener + ), + listener::onFailure + ) + ); + }, listener::onFailure); + + // Next get node stats related to the OS and JVM + ActionListener memoryTrackerRefreshListener = ActionListener.wrap( + r -> parentTaskClient.admin() + .cluster() + .prepareNodesStats(nodeIds) + .clear() + .setOs(true) + .setJvm(true) + .setTimeout(request.timeout()) + .execute(nodeStatsListener), + listener::onFailure + ); + + // If the memory tracker has never been refreshed, do that first + if (memoryTracker.isEverRefreshed()) { + memoryTrackerRefreshListener.onResponse(null); + } else { + memoryTracker.refresh(state.getMetadata().custom(PersistentTasksCustomMetadata.TYPE), memoryTrackerRefreshListener); + } + } + + void handleResponses( + ClusterState state, + ClusterSettings clusterSettings, + NodesStatsResponse nodesStatsResponse, + TrainedModelCacheInfoAction.Response trainedModelCacheInfoResponse, + ActionListener listener + ) { + List nodeResponses = new ArrayList<>(nodesStatsResponse.getNodes().size()); + + int maxOpenJobsPerNode = clusterSettings.get(MAX_OPEN_JOBS_PER_NODE); + int maxMachineMemoryPercent = clusterSettings.get(MAX_MACHINE_MEMORY_PERCENT); + boolean useAutoMachineMemoryPercent = clusterSettings.get(USE_AUTO_MACHINE_MEMORY_PERCENT); + NodeLoadDetector nodeLoadDetector = new NodeLoadDetector(memoryTracker); + Map cacheInfoByNode = trainedModelCacheInfoResponse.getNodesMap(); + List failures = new ArrayList<>(nodesStatsResponse.failures()); + + for (NodeStats nodeStats : nodesStatsResponse.getNodes()) { + DiscoveryNode node = nodeStats.getNode(); + String nodeId = node.getId(); + // We only provide a response if both requests we issued to all nodes returned. + // The loop is iterating successes of the node stats call with failures already + // accumulated. This check adds failures of the trained model cache call that + // happened on nodes where the node stats call succeeded. + Optional trainedModelCacheInfoFailure = trainedModelCacheInfoResponse.failures() + .stream() + .filter(e -> nodeId.equals(e.nodeId())) + .findFirst(); + if (trainedModelCacheInfoFailure.isPresent()) { + failures.add(trainedModelCacheInfoFailure.get()); + continue; + } + OsStats.Mem mem = nodeStats.getOs().getMem(); + ByteSizeValue mlMax; + ByteSizeValue mlNativeCodeOverhead; + ByteSizeValue mlAnomalyDetectors; + ByteSizeValue mlDataFrameAnalytics; + ByteSizeValue mlNativeInference; + if (node.getRoles().contains(DiscoveryNodeRole.ML_ROLE)) { + NodeLoad nodeLoad = nodeLoadDetector.detectNodeLoad( + state, + node, + maxOpenJobsPerNode, + maxMachineMemoryPercent, + useAutoMachineMemoryPercent + ); + mlMax = ByteSizeValue.ofBytes(nodeLoad.getMaxMlMemory()); + mlNativeCodeOverhead = ByteSizeValue.ofBytes(nodeLoad.getAssignedNativeCodeOverheadMemory()); + mlAnomalyDetectors = ByteSizeValue.ofBytes(nodeLoad.getAssignedAnomalyDetectorMemory()); + mlDataFrameAnalytics = ByteSizeValue.ofBytes(nodeLoad.getAssignedDataFrameAnalyticsMemory()); + mlNativeInference = ByteSizeValue.ofBytes(nodeLoad.getAssignedNativeInferenceMemory()); + } else { + mlMax = ByteSizeValue.ZERO; + mlNativeCodeOverhead = ByteSizeValue.ZERO; + mlAnomalyDetectors = ByteSizeValue.ZERO; + mlDataFrameAnalytics = ByteSizeValue.ZERO; + mlNativeInference = ByteSizeValue.ZERO; + } + ByteSizeValue jvmHeapMax = nodeStats.getJvm().getMem().getHeapMax(); + ByteSizeValue jvmInferenceMax; + ByteSizeValue jvmInference; + CacheInfo cacheInfoForNode = cacheInfoByNode.get(nodeId); + if (cacheInfoForNode != null) { + jvmInferenceMax = cacheInfoForNode.getJvmInferenceMax(); + jvmInference = cacheInfoForNode.getJvmInference(); + } else { + jvmInferenceMax = ByteSizeValue.ZERO; + jvmInference = ByteSizeValue.ZERO; + } + nodeResponses.add( + new MlMemoryStats( + node, + mem.getTotal(), + mem.getAdjustedTotal(), + mlMax, + mlNativeCodeOverhead, + mlAnomalyDetectors, + mlDataFrameAnalytics, + mlNativeInference, + jvmHeapMax, + jvmInferenceMax, + jvmInference + ) + ); + } + + listener.onResponse(new MlMemoryAction.Response(state.getClusterName(), nodeResponses, failures)); + } + + @Override + protected ClusterBlockException checkBlock(MlMemoryAction.Request request, ClusterState state) { + return state.blocks().globalBlockedException(ClusterBlockLevel.METADATA_READ); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportTrainedModelCacheInfoAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportTrainedModelCacheInfoAction.java new file mode 100644 index 0000000000000..832fb72010bcf --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportTrainedModelCacheInfoAction.java @@ -0,0 +1,115 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.action.FailedNodeException; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.nodes.TransportNodesAction; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ml.action.TrainedModelCacheInfoAction; +import org.elasticsearch.xpack.core.ml.action.TrainedModelCacheInfoAction.Response.CacheInfo; +import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public class TransportTrainedModelCacheInfoAction extends TransportNodesAction< + TrainedModelCacheInfoAction.Request, + TrainedModelCacheInfoAction.Response, + TransportTrainedModelCacheInfoAction.NodeModelCacheInfoRequest, + CacheInfo> { + + private final ModelLoadingService modelLoadingService; + + @Inject + public TransportTrainedModelCacheInfoAction( + ThreadPool threadPool, + ClusterService clusterService, + TransportService transportService, + ActionFilters actionFilters, + ModelLoadingService modelLoadingService + ) { + super( + TrainedModelCacheInfoAction.NAME, + threadPool, + clusterService, + transportService, + actionFilters, + TrainedModelCacheInfoAction.Request::new, + NodeModelCacheInfoRequest::new, + ThreadPool.Names.MANAGEMENT, + CacheInfo.class + ); + this.modelLoadingService = modelLoadingService; + } + + @Override + protected TrainedModelCacheInfoAction.Response newResponse( + TrainedModelCacheInfoAction.Request request, + List responses, + List failures + ) { + return new TrainedModelCacheInfoAction.Response(clusterService.getClusterName(), responses, failures); + } + + @Override + protected NodeModelCacheInfoRequest newNodeRequest(TrainedModelCacheInfoAction.Request request) { + return new NodeModelCacheInfoRequest(request); + } + + @Override + protected CacheInfo newNodeResponse(StreamInput in, DiscoveryNode node) throws IOException { + return new CacheInfo(in); + } + + @Override + protected CacheInfo nodeOperation(NodeModelCacheInfoRequest nodeModelCacheInfoRequest, Task task) { + assert task instanceof CancellableTask; + return new CacheInfo( + transportService.getLocalNode(), + modelLoadingService.getMaxCacheSize(), + modelLoadingService.getCurrentCacheSize() + ); + } + + public static class NodeModelCacheInfoRequest extends TransportRequest { + + TrainedModelCacheInfoAction.Request request; + + public NodeModelCacheInfoRequest(StreamInput in) throws IOException { + super(in); + request = new TrainedModelCacheInfoAction.Request(in); + } + + NodeModelCacheInfoRequest(TrainedModelCacheInfoAction.Request request) { + this.request = request; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new CancellableTask(id, type, action, "", parentTaskId, headers); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + request.writeTo(out); + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java index 34db2e7d039d8..c225e08639a05 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderService.java @@ -214,7 +214,7 @@ static Optional>> determineUnassignab if (nodeLoad.getFreeMemory() >= requiredMemory) { assignmentIter.remove(); // Remove and add to the priority queue to make sure the biggest node with availability is first - mostFreeMemoryFirst.add(mostFreeMemoryFirst.poll().incNumAssignedJobs().incAssignedJobMemory(requiredMemory)); + mostFreeMemoryFirst.add(mostFreeMemoryFirst.poll().incNumAssignedJobs().incAssignedAnomalyDetectorMemory(requiredMemory)); } } List adjustedLoads = mostFreeMemoryFirst.stream().map(NodeLoad.Builder::build).collect(Collectors.toList()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java index 7dcdac32e4e68..920e01e6ba972 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java @@ -181,6 +181,20 @@ boolean isModelCached(String modelId) { return localModelCache.get(modelAliasToId.getOrDefault(modelId, modelId)) != null; } + public ByteSizeValue getMaxCacheSize() { + return maxCacheSize; + } + + /** + * This method is intended for use in telemetry, not making decisions about what will fit in the cache. + * The value returned could immediately be out-of-date if cache changes are in progress. It is good + * enough for external reporting of vaguely up-to-date status, but not for anything related to immediate + * additions to the cache. + */ + public ByteSizeValue getCurrentCacheSize() { + return ByteSizeValue.ofBytes(localModelCache.weight()); + } + /** * Load the model for use by an ingest pipeline. The model will not be cached if there is no * ingest pipeline referencing it i.e. it is used in simulate mode diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java index 1c7d34f83b06b..c5b1d0cb30c82 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobNodeSelector.java @@ -217,7 +217,7 @@ public PersistentTasksCustomMetadata.Assignment selectNode( reason = createReason( jobId, nodeNameAndMlAttributes(node), - "This node is not providing accurate information to determine is load by memory." + "This node is not providing accurate information to determine its load by memory." ); logger.trace(reason); reasons.put(node.getName(), reason); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoad.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoad.java index 143df0be852f6..15fafd3b5f220 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoad.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoad.java @@ -9,8 +9,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.core.Nullable; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; import java.util.Objects; @@ -25,7 +25,10 @@ public class NodeLoad { private final boolean useMemory; private final String error; private final long numAssignedJobs; - private final long assignedJobMemory; + private final long assignedNativeCodeOverheadMemory; + private final long assignedAnomalyDetectorMemory; + private final long assignedDataFrameAnalyticsMemory; + private final long assignedNativeInferenceMemory; private final long numAllocatingJobs; NodeLoad( @@ -35,7 +38,10 @@ public class NodeLoad { boolean useMemory, String error, long numAssignedJobs, - long assignedJobMemory, + long assignedNativeCodeOverheadMemory, + long assignedAnomalyDetectorMemory, + long assignedDataFrameAnalyticsMemory, + long assignedNativeInferenceMemory, long numAllocatingJobs ) { this.maxMemory = maxMemory; @@ -44,7 +50,10 @@ public class NodeLoad { this.useMemory = useMemory; this.error = error; this.numAssignedJobs = numAssignedJobs; - this.assignedJobMemory = assignedJobMemory; + this.assignedNativeCodeOverheadMemory = assignedNativeCodeOverheadMemory; + this.assignedAnomalyDetectorMemory = assignedAnomalyDetectorMemory; + this.assignedDataFrameAnalyticsMemory = assignedDataFrameAnalyticsMemory; + this.assignedNativeInferenceMemory = assignedNativeInferenceMemory; this.numAllocatingJobs = numAllocatingJobs; } @@ -56,10 +65,39 @@ public long getNumAssignedJobs() { } /** - * @return The total memory in bytes used by the assigned jobs. + * @return The total memory in bytes used by all assigned jobs. */ public long getAssignedJobMemory() { - return assignedJobMemory; + return assignedNativeCodeOverheadMemory + assignedAnomalyDetectorMemory + assignedDataFrameAnalyticsMemory + + assignedNativeInferenceMemory; + } + + /** + * @return The native code overhead, if any, for native processes on this node. + */ + public long getAssignedNativeCodeOverheadMemory() { + return assignedNativeCodeOverheadMemory; + } + + /** + * @return The total memory in bytes used by the assigned anomaly detectors. + */ + public long getAssignedAnomalyDetectorMemory() { + return assignedAnomalyDetectorMemory; + } + + /** + * @return The total memory in bytes used by the assigned data frame analytics jobs. + */ + public long getAssignedDataFrameAnalyticsMemory() { + return assignedDataFrameAnalyticsMemory; + } + + /** + * @return The total memory in bytes used by the assigned native inference processes. + */ + public long getAssignedNativeInferenceMemory() { + return assignedNativeInferenceMemory; } /** @@ -94,7 +132,7 @@ public String getNodeId() { * @return The available memory on the node */ public long getFreeMemory() { - return Math.max(maxMemory - assignedJobMemory, 0L); + return Math.max(maxMemory - getAssignedJobMemory(), 0L); } /** @@ -128,7 +166,10 @@ public boolean equals(Object o) { && maxJobs == nodeLoad.maxJobs && useMemory == nodeLoad.useMemory && numAssignedJobs == nodeLoad.numAssignedJobs - && assignedJobMemory == nodeLoad.assignedJobMemory + && assignedNativeCodeOverheadMemory == nodeLoad.assignedNativeCodeOverheadMemory + && assignedAnomalyDetectorMemory == nodeLoad.assignedAnomalyDetectorMemory + && assignedDataFrameAnalyticsMemory == nodeLoad.assignedDataFrameAnalyticsMemory + && assignedNativeInferenceMemory == nodeLoad.assignedNativeInferenceMemory && numAllocatingJobs == nodeLoad.numAllocatingJobs && Objects.equals(nodeId, nodeLoad.nodeId) && Objects.equals(error, nodeLoad.error); @@ -136,7 +177,19 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(maxMemory, maxJobs, nodeId, useMemory, error, numAssignedJobs, assignedJobMemory, numAllocatingJobs); + return Objects.hash( + maxMemory, + maxJobs, + nodeId, + useMemory, + error, + numAssignedJobs, + assignedNativeCodeOverheadMemory, + assignedAnomalyDetectorMemory, + assignedDataFrameAnalyticsMemory, + assignedNativeInferenceMemory, + numAllocatingJobs + ); } public static Builder builder(String nodeId) { @@ -154,7 +207,10 @@ public static class Builder { private boolean useMemory; private String error; private long numAssignedJobs; - private long assignedJobMemory; + private long assignedNativeCodeOverheadMemory; + private long assignedAnomalyDetectorMemory; + private long assignedDataFrameAnalyticsMemory; + private long assignedNativeInferenceMemory; private long numAllocatingJobs; public Builder(NodeLoad nodeLoad) { @@ -164,7 +220,10 @@ public Builder(NodeLoad nodeLoad) { this.useMemory = nodeLoad.useMemory; this.error = nodeLoad.error; this.numAssignedJobs = nodeLoad.numAssignedJobs; - this.assignedJobMemory = nodeLoad.assignedJobMemory; + this.assignedNativeCodeOverheadMemory = nodeLoad.assignedNativeCodeOverheadMemory; + this.assignedAnomalyDetectorMemory = nodeLoad.assignedAnomalyDetectorMemory; + this.assignedDataFrameAnalyticsMemory = nodeLoad.assignedDataFrameAnalyticsMemory; + this.assignedNativeInferenceMemory = nodeLoad.assignedNativeInferenceMemory; this.numAllocatingJobs = nodeLoad.numAllocatingJobs; } @@ -173,7 +232,7 @@ public Builder(String nodeId) { } public long getFreeMemory() { - return Math.max(maxMemory - assignedJobMemory, 0L); + return Math.max(maxMemory - assignedAnomalyDetectorMemory, 0L); } public int remainingJobs() { @@ -217,8 +276,23 @@ public Builder incNumAssignedJobs() { return this; } - public Builder incAssignedJobMemory(long assignedJobMemory) { - this.assignedJobMemory += assignedJobMemory; + public Builder incAssignedNativeCodeOverheadMemory(long assignedNativeCodeOverheadMemory) { + this.assignedNativeCodeOverheadMemory += assignedNativeCodeOverheadMemory; + return this; + } + + public Builder incAssignedAnomalyDetectorMemory(long assignedAnomalyDetectorMemory) { + this.assignedAnomalyDetectorMemory += assignedAnomalyDetectorMemory; + return this; + } + + public Builder incAssignedDataFrameAnalyticsMemory(long assignedDataFrameAnalyticsMemory) { + this.assignedDataFrameAnalyticsMemory += assignedDataFrameAnalyticsMemory; + return this; + } + + public Builder incAssignedNativeInferenceMemory(long assignedNativeInferenceMemory) { + this.assignedNativeInferenceMemory += assignedNativeInferenceMemory; return this; } @@ -235,19 +309,36 @@ void addTask(String taskName, String taskId, boolean isAllocating, MlMemoryTrack Long jobMemoryRequirement = memoryTracker.getJobMemoryRequirement(taskName, taskId); if (jobMemoryRequirement == null) { useMemory = false; - logger.debug( - () -> new ParameterizedMessage( - "[{}] memory requirement was not available. Calculating load by number of assigned jobs.", - taskId - ) - ); + logger.debug("[{}] task memory requirement was not available.", taskId); } else { - assignedJobMemory += jobMemoryRequirement; + switch (taskName) { + case MlTasks.JOB_TASK_NAME, MlTasks.JOB_SNAPSHOT_UPGRADE_TASK_NAME -> assignedAnomalyDetectorMemory += + jobMemoryRequirement; + case MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME -> assignedDataFrameAnalyticsMemory += jobMemoryRequirement; + default -> { + assert false : "ML memory-requiring task name not handled: " + taskName; + // If this ever happens in production then this is better than nothing, but + // hopefully the assertion will mean we pick up any omission in testing + assignedAnomalyDetectorMemory += jobMemoryRequirement; + } + } } } public NodeLoad build() { - return new NodeLoad(maxMemory, maxJobs, nodeId, useMemory, error, numAssignedJobs, assignedJobMemory, numAllocatingJobs); + return new NodeLoad( + maxMemory, + maxJobs, + nodeId, + useMemory, + error, + numAssignedJobs, + assignedNativeCodeOverheadMemory, + assignedAnomalyDetectorMemory, + assignedDataFrameAnalyticsMemory, + assignedNativeInferenceMemory, + numAllocatingJobs + ); } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java index 2a8ef46d8e5c9..eed33e77a8e2d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java @@ -88,6 +88,11 @@ public NodeLoad detectNodeLoad( } updateLoadGivenTasks(nodeLoad, persistentTasks); updateLoadGivenModelAllocations(nodeLoad, allocationMetadata); + // if any processes are running then the native code will be loaded, but shared between all processes, + // so increase the total memory usage to account for this + if (nodeLoad.getNumAssignedJobs() > 0) { + nodeLoad.incAssignedNativeCodeOverheadMemory(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes()); + } return nodeLoad.build(); } @@ -105,12 +110,6 @@ private void updateLoadGivenTasks(NodeLoad.Builder nodeLoad, PersistentTasksCust nodeLoad.addTask(task.getTaskName(), taskParams.getMlId(), state.isAllocating(), mlMemoryTracker); } } - - // if any jobs are running then the native code will be loaded, but shared between all jobs, - // so increase the total memory usage of the assigned jobs to account for this - if (nodeLoad.getNumAssignedJobs() > 0) { - nodeLoad.incAssignedJobMemory(MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes()); - } } } @@ -122,7 +121,7 @@ private void updateLoadGivenModelAllocations(NodeLoad.Builder nodeLoad, TrainedM .orElse(RoutingState.STOPPED) .consumesMemory()) { nodeLoad.incNumAssignedJobs(); - nodeLoad.incAssignedJobMemory(allocation.getTaskParams().estimateMemoryUsageBytes()); + nodeLoad.incAssignedNativeInferenceMemory(allocation.getTaskParams().estimateMemoryUsageBytes()); } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java index 701abf89ae816..666ed4ce29487 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java @@ -210,6 +210,16 @@ public boolean isRecentlyRefreshed(Duration customDuration) { && localLastUpdateTime.plus(RECENT_UPDATE_THRESHOLD).plus(customDuration).isAfter(Instant.now()); } + /** + * Has the information in this object ever been refreshed? + * This method is intended for use when it's not essential to + * have the most up-to-date information, but having some + * information is necessary, for example in telemetry. + */ + public boolean isEverRefreshed() { + return isMaster && lastUpdateTime != null; + } + /** * Get the memory requirement for an anomaly detector job. * This method only works on the master node. diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlMemoryAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlMemoryAction.java new file mode 100644 index 0000000000000..0cc76216936e5 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestMlMemoryAction.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.rest; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.common.Strings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestActions.NodesResponseRestListener; +import org.elasticsearch.xpack.core.ml.action.MlMemoryAction; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.xpack.ml.MachineLearning.BASE_PATH; + +public class RestMlMemoryAction extends BaseRestHandler { + + public static final String NODE_ID = "nodeId"; + public static final String MASTER_TIMEOUT = "master_timeout"; + public static final String TIMEOUT = "timeout"; + + @Override + public List routes() { + return List.of(new Route(GET, BASE_PATH + "memory/{" + NODE_ID + "}/_stats"), new Route(GET, BASE_PATH + "memory/_stats")); + } + + @Override + public String getName() { + return "ml_memory_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String nodeId = restRequest.param(NODE_ID); + if (Strings.isNullOrEmpty(nodeId)) { + nodeId = Metadata.ALL; + } + MlMemoryAction.Request request = new MlMemoryAction.Request(nodeId); + request.masterNodeTimeout(restRequest.paramAsTime(MASTER_TIMEOUT, request.masterNodeTimeout())); + request.timeout(restRequest.paramAsTime(TIMEOUT, request.timeout())); + return channel -> client.execute(MlMemoryAction.INSTANCE, request, new NodesResponseRestListener<>(channel)); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java index 716efb04b6c40..82f7dd71cdec9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingDeciderServiceTests.java @@ -121,7 +121,7 @@ public void setup() { nodeLoadDetector = mock(NodeLoadDetector.class); when(nodeLoadDetector.getMlMemoryTracker()).thenReturn(mlMemoryTracker); when(nodeLoadDetector.detectNodeLoad(any(), any(), anyInt(), anyInt(), anyBoolean())).thenReturn( - NodeLoad.builder("any").setUseMemory(true).incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()).build() + NodeLoad.builder("any").setUseMemory(true).incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()).build() ); clusterService = mock(ClusterService.class); settings = Settings.EMPTY; @@ -148,7 +148,7 @@ public void testScalingEdgeCase() { NodeLoad.builder("any") .setMaxMemory(432013312) .setUseMemory(true) - .incAssignedJobMemory( + .incAssignedAnomalyDetectorMemory( (long) (168.7 * 1024 + 0.5) + (long) (1.4 * 1024 * 1024 + 0.5) + ByteSizeValue.ofMb(256).getBytes() + Job.PROCESS_MEMORY_OVERHEAD.getBytes() * 3 ) @@ -188,7 +188,7 @@ public void testScalingEdgeCase() { NodeLoad.builder("any") .setMaxMemory(bytesForML) .setUseMemory(true) - .incAssignedJobMemory( + .incAssignedAnomalyDetectorMemory( (long) (168.7 * 1024 + 0.5) + (long) (1.4 * 1024 * 1024 + 0.5) + ByteSizeValue.ofMb(256).getBytes() + ByteSizeValue .ofMb(128) .getBytes() + Job.PROCESS_MEMORY_OVERHEAD.getBytes() * 4 @@ -232,8 +232,8 @@ public void testScaleStability() { while (forScaleUp.getFreeMemory() > Job.PROCESS_MEMORY_OVERHEAD.getBytes()) { long jobSize = randomLongBetween(Job.PROCESS_MEMORY_OVERHEAD.getBytes(), forScaleUp.getFreeMemory()); maxJob = Math.max(jobSize, maxJob); - forScaleUp.incNumAssignedJobs().incAssignedJobMemory(jobSize); - forScaleDown.incNumAssignedJobs().incAssignedJobMemory(jobSize); + forScaleUp.incNumAssignedJobs().incAssignedAnomalyDetectorMemory(jobSize); + forScaleDown.incNumAssignedJobs().incAssignedAnomalyDetectorMemory(jobSize); } // Create jobs for scale up NodeLoad nodeLoadForScaleUp = forScaleUp.build(); @@ -244,7 +244,7 @@ public void testScaleStability() { break; } maxJob = Math.max(jobSize, maxJob); - forScaleDown.incNumAssignedJobs().incAssignedJobMemory(jobSize); + forScaleDown.incNumAssignedJobs().incAssignedAnomalyDetectorMemory(jobSize); String waitingJob = randomAlphaOfLength(10); when(mlMemoryTracker.getAnomalyDetectorJobMemoryRequirement(eq(waitingJob))).thenReturn(jobSize); waitingJobs.add(waitingJob); @@ -345,7 +345,7 @@ public void testScaleUp_withWaitingJobsAndAutoMemoryAndNoRoomInNodes() { NodeLoad.builder("any") .setMaxMemory(ByteSizeValue.ofGb(1).getBytes()) .setUseMemory(true) - .incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) .build() ); MlScalingReason.Builder reasonBuilder = new MlScalingReason.Builder().setPassedConfiguration(Settings.EMPTY) @@ -460,7 +460,7 @@ public void testScaleUp_withWaitingSnapshotUpgradesAndAutoMemoryAndNoRoomInNodes NodeLoad.builder("any") .setMaxMemory(ByteSizeValue.ofGb(1).getBytes()) .setUseMemory(true) - .incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) .build() ); MlScalingReason.Builder reasonBuilder = new MlScalingReason.Builder().setPassedConfiguration(Settings.EMPTY) @@ -570,7 +570,7 @@ public void testScaleUp_withWaitingJobsAndRoomInNodes() { .setUseMemory(true) .setMaxJobs(10) .incNumAssignedJobs() - .incAssignedJobMemory(ByteSizeValue.ofMb(230).getBytes()) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofMb(230).getBytes()) .build(), NodeLoad.builder("not_filled").setMaxMemory(ByteSizeValue.ofMb(230).getBytes()).setMaxJobs(10).setUseMemory(true).build() ); @@ -634,7 +634,7 @@ public void testScaleUp_withWaitingJobsAndNoRoomInNodes() { NodeLoad.builder("any") .setMaxMemory(ByteSizeValue.ofGb(1).getBytes()) .setUseMemory(true) - .incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) .build() ); MlAutoscalingDeciderService service = buildService(); @@ -701,7 +701,7 @@ public void testScaleUp_withWaitingJobs_WithFutureCapacity() { NodeLoad.builder("any") .setMaxMemory(ByteSizeValue.ofGb(1).getBytes()) .setUseMemory(true) - .incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) .build() ); MlAutoscalingDeciderService service = buildService(); @@ -763,7 +763,7 @@ public void testScaleUp_withWaitingModelAndAutoMemoryAndNoRoomInNodes() { NodeLoad.builder("any") .setMaxMemory(ByteSizeValue.ofGb(1).getBytes()) .setUseMemory(true) - .incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) .build() ); MlScalingReason.Builder reasonBuilder = new MlScalingReason.Builder().setPassedConfiguration(Settings.EMPTY) @@ -809,7 +809,7 @@ public void testScaleUp_withWaitingModelsAndRoomInNodes() { .setUseMemory(true) .setMaxJobs(10) .incNumAssignedJobs() - .incAssignedJobMemory(ByteSizeValue.ofMb(230).getBytes()) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofMb(230).getBytes()) .build(), NodeLoad.builder("not_filled").setMaxMemory(ByteSizeValue.ofMb(230).getBytes()).setMaxJobs(10).setUseMemory(true).build() ); @@ -848,9 +848,18 @@ public void testScaleUp_withWaitingModelsAndRoomInNodes() { public void testScaleDown() { List nodeLoads = Arrays.asList( - NodeLoad.builder("foo").setMaxMemory(DEFAULT_NODE_SIZE).incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()).build(), - NodeLoad.builder("bar").setMaxMemory(DEFAULT_NODE_SIZE).incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()).build(), - NodeLoad.builder("baz").setMaxMemory(DEFAULT_NODE_SIZE).incAssignedJobMemory(ByteSizeValue.ofGb(1).getBytes()).build() + NodeLoad.builder("foo") + .setMaxMemory(DEFAULT_NODE_SIZE) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) + .build(), + NodeLoad.builder("bar") + .setMaxMemory(DEFAULT_NODE_SIZE) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) + .build(), + NodeLoad.builder("baz") + .setMaxMemory(DEFAULT_NODE_SIZE) + .incAssignedAnomalyDetectorMemory(ByteSizeValue.ofGb(1).getBytes()) + .build() ); MlAutoscalingDeciderService service = buildService(); diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index c3e8ba51bc369..d535c395baf09 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -245,6 +245,8 @@ public class Constants { "cluster:internal/xpack/ml/model_allocation/delete", "cluster:internal/xpack/ml/model_allocation/update", "cluster:internal/xpack/ml/reset_mode", + "cluster:internal/xpack/ml/trained_models/cache/info", + "cluster:internal/xpack/ml/trained_models/deployments/stats/get", "cluster:internal/xpack/transform/reset_mode", "cluster:monitor/allocation/explain", "cluster:monitor/async_search/status", @@ -317,8 +319,8 @@ public class Constants { "cluster:monitor/xpack/ml/job/results/overall_buckets/get", "cluster:monitor/xpack/ml/job/results/records/get", "cluster:monitor/xpack/ml/job/stats/get", + "cluster:monitor/xpack/ml/memory/stats/get", "cluster:monitor/xpack/ml/trained_models/deployment/infer", - "cluster:internal/xpack/ml/trained_models/deployments/stats/get", "cluster:monitor/xpack/repositories_metering/clear_metering_archive", "cluster:monitor/xpack/repositories_metering/get_metrics", "cluster:monitor/xpack/rollup/get", diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/get_memory_stats.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/get_memory_stats.yml new file mode 100644 index 0000000000000..84f5d13885a7d --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/get_memory_stats.yml @@ -0,0 +1,110 @@ +# The three tests call the API in different ways but all get the +# same result as they run against a single node cluster +--- +"ML memory all nodes": + - skip: + features: [arbitrary_key] + - do: + ml.get_memory_stats: {} + - set: + nodes._arbitrary_key_: node_id + + - do: + ml.get_memory_stats: {} + + - match: { _nodes.total: 1 } + - match: { _nodes.successful: 1 } + - match: { _nodes.failed: 0 } + - is_true: cluster_name + - is_true: nodes.$node_id.name + - is_true: nodes.$node_id.ephemeral_id + - is_true: nodes.$node_id.transport_address + - is_true: nodes.$node_id.attributes + - is_true: nodes.$node_id.roles + - gt: { nodes.$node_id.mem.total_in_bytes: 0 } + - gt: { nodes.$node_id.mem.adjusted_total_in_bytes: 0 } + - gt: { nodes.$node_id.mem.ml.max_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.native_code_overhead_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.anomaly_detectors_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.data_frame_analytics_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.native_inference_in_bytes: 0 } + - gt: { nodes.$node_id.jvm.heap_max_in_bytes: 0 } + - gt: { nodes.$node_id.jvm.java_inference_max_in_bytes: 0 } + # This next one has to be >= 0 rather than 0 because the cache is invalidated + # lazily after models are no longer in use, and previous tests could have + # caused a model to be cached + - gte: { nodes.$node_id.jvm.java_inference_in_bytes: 0 } + +--- +"ML memory for ML nodes": + - skip: + features: [arbitrary_key] + - do: + ml.get_memory_stats: {} + - set: + nodes._arbitrary_key_: node_id + + - do: + ml.get_memory_stats: + node_id: "ml:true" + master_timeout: "1m" + + - match: { _nodes.total: 1 } + - match: { _nodes.successful: 1 } + - match: { _nodes.failed: 0 } + - is_true: cluster_name + - is_true: nodes.$node_id.name + - is_true: nodes.$node_id.ephemeral_id + - is_true: nodes.$node_id.transport_address + - is_true: nodes.$node_id.attributes + - is_true: nodes.$node_id.roles + - gt: { nodes.$node_id.mem.total_in_bytes: 0 } + - gt: { nodes.$node_id.mem.adjusted_total_in_bytes: 0 } + - gt: { nodes.$node_id.mem.ml.max_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.native_code_overhead_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.anomaly_detectors_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.data_frame_analytics_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.native_inference_in_bytes: 0 } + - gt: { nodes.$node_id.jvm.heap_max_in_bytes: 0 } + - gt: { nodes.$node_id.jvm.java_inference_max_in_bytes: 0 } + # This next one has to be >= 0 rather than 0 because the cache is invalidated + # lazily after models are no longer in use, and previous tests could have + # caused a model to be cached + - gte: { nodes.$node_id.jvm.java_inference_in_bytes: 0 } + +--- +"ML memory for specific node": + - skip: + features: [arbitrary_key] + - do: + ml.get_memory_stats: {} + - set: + nodes._arbitrary_key_: node_id + + - do: + ml.get_memory_stats: + node_id: $node_id + timeout: "29s" + + - match: { _nodes.total: 1 } + - match: { _nodes.successful: 1 } + - match: { _nodes.failed: 0 } + - is_true: cluster_name + - is_true: nodes.$node_id.name + - is_true: nodes.$node_id.ephemeral_id + - is_true: nodes.$node_id.transport_address + - is_true: nodes.$node_id.attributes + - is_true: nodes.$node_id.roles + - gt: { nodes.$node_id.mem.total_in_bytes: 0 } + - gt: { nodes.$node_id.mem.adjusted_total_in_bytes: 0 } + - gt: { nodes.$node_id.mem.ml.max_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.native_code_overhead_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.anomaly_detectors_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.data_frame_analytics_in_bytes: 0 } + - match: { nodes.$node_id.mem.ml.native_inference_in_bytes: 0 } + - gt: { nodes.$node_id.jvm.heap_max_in_bytes: 0 } + - gt: { nodes.$node_id.jvm.java_inference_max_in_bytes: 0 } + # This next one has to be >= 0 rather than 0 because the cache is invalidated + # lazily after models are no longer in use, and previous tests could have + # caused a model to be cached + - gte: { nodes.$node_id.jvm.java_inference_in_bytes: 0 } From 35c9258e46c4865efc0efeeec1beaadeee9469ec Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Thu, 17 Feb 2022 11:00:25 +0100 Subject: [PATCH 147/167] Refactor FilterXContentParser and DelegatingXContentParser (#83457) We have two implementations of XContentParser that both delegate all of its methods to a delegate, either an inner parser provided at construction (FilterXContentParser) or a more dynamic variant that is returned by overriding the delegate method (DelegatingXContentParser). Effectively the two classes do exactly the same, the only difference being how the delegate parser is provided. While these two are two separate implementations, they could inherit from each other. With this change we make FilterXContentParser be the previous DelegatingXContentParser, that allows to override the delegate method, and we introduce a new FilterXContentParserWrapper that takes the fixed delegate as a constructor argument. Additionally, XContentSubParser is rewritten to extend FilterXContentParserWrapper. --- .../xcontent/DelegatingXContentParser.java | 244 ------------------ .../xcontent/DotExpandingXContentParser.java | 16 +- .../xcontent/FilterXContentParser.java | 98 ++++--- .../xcontent/FilterXContentParserWrapper.java | 25 ++ .../xcontent/XContentSubParser.java | 217 +--------------- .../index/mapper/CompletionFieldMapper.java | 6 +- .../index/mapper/DocumentParserContext.java | 8 +- .../index/mapper/GeoPointFieldMapper.java | 4 +- .../support/AbstractFilteringTestCase.java | 4 +- .../xcontent/WatcherXContentParser.java | 4 +- 10 files changed, 98 insertions(+), 528 deletions(-) delete mode 100644 libs/x-content/src/main/java/org/elasticsearch/xcontent/DelegatingXContentParser.java create mode 100644 libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParserWrapper.java diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/DelegatingXContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/DelegatingXContentParser.java deleted file mode 100644 index 1a87920947db1..0000000000000 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/DelegatingXContentParser.java +++ /dev/null @@ -1,244 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.xcontent; - -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.core.RestApiVersion; - -import java.io.IOException; -import java.nio.CharBuffer; -import java.util.List; -import java.util.Map; -import java.util.function.Supplier; - -public abstract class DelegatingXContentParser implements XContentParser { - - protected abstract XContentParser delegate(); - - @Override - public XContentType contentType() { - return delegate().contentType(); - } - - @Override - public void allowDuplicateKeys(boolean allowDuplicateKeys) { - delegate().allowDuplicateKeys(allowDuplicateKeys); - } - - @Override - public Token nextToken() throws IOException { - return delegate().nextToken(); - } - - @Override - public void skipChildren() throws IOException { - delegate().skipChildren(); - } - - @Override - public Token currentToken() { - return delegate().currentToken(); - } - - @Override - public String currentName() throws IOException { - return delegate().currentName(); - } - - @Override - public Map map() throws IOException { - return delegate().map(); - } - - @Override - public Map mapOrdered() throws IOException { - return delegate().mapOrdered(); - } - - @Override - public Map mapStrings() throws IOException { - return delegate().mapStrings(); - } - - @Override - public Map map(Supplier> mapFactory, CheckedFunction mapValueParser) - throws IOException { - return delegate().map(mapFactory, mapValueParser); - } - - @Override - public List list() throws IOException { - return delegate().list(); - } - - @Override - public List listOrderedMap() throws IOException { - return delegate().listOrderedMap(); - } - - @Override - public String text() throws IOException { - return delegate().text(); - } - - @Override - public String textOrNull() throws IOException { - return delegate().textOrNull(); - } - - @Override - public CharBuffer charBufferOrNull() throws IOException { - return delegate().charBufferOrNull(); - } - - @Override - public CharBuffer charBuffer() throws IOException { - return delegate().charBuffer(); - } - - @Override - public Object objectText() throws IOException { - return delegate().objectText(); - } - - @Override - public Object objectBytes() throws IOException { - return delegate().objectBytes(); - } - - @Override - public boolean hasTextCharacters() { - return delegate().hasTextCharacters(); - } - - @Override - public char[] textCharacters() throws IOException { - return delegate().textCharacters(); - } - - @Override - public int textLength() throws IOException { - return delegate().textLength(); - } - - @Override - public int textOffset() throws IOException { - return delegate().textOffset(); - } - - @Override - public Number numberValue() throws IOException { - return delegate().numberValue(); - } - - @Override - public NumberType numberType() throws IOException { - return delegate().numberType(); - } - - @Override - public short shortValue(boolean coerce) throws IOException { - return delegate().shortValue(coerce); - } - - @Override - public int intValue(boolean coerce) throws IOException { - return delegate().intValue(coerce); - } - - @Override - public long longValue(boolean coerce) throws IOException { - return delegate().longValue(coerce); - } - - @Override - public float floatValue(boolean coerce) throws IOException { - return delegate().floatValue(coerce); - } - - @Override - public double doubleValue(boolean coerce) throws IOException { - return delegate().doubleValue(coerce); - } - - @Override - public short shortValue() throws IOException { - return delegate().shortValue(); - } - - @Override - public int intValue() throws IOException { - return delegate().intValue(); - } - - @Override - public long longValue() throws IOException { - return delegate().longValue(); - } - - @Override - public float floatValue() throws IOException { - return delegate().floatValue(); - } - - @Override - public double doubleValue() throws IOException { - return delegate().doubleValue(); - } - - @Override - public boolean isBooleanValue() throws IOException { - return delegate().isBooleanValue(); - } - - @Override - public boolean booleanValue() throws IOException { - return delegate().booleanValue(); - } - - @Override - public byte[] binaryValue() throws IOException { - return delegate().binaryValue(); - } - - @Override - public XContentLocation getTokenLocation() { - return delegate().getTokenLocation(); - } - - @Override - public T namedObject(Class categoryClass, String name, Object context) throws IOException { - return delegate().namedObject(categoryClass, name, context); - } - - @Override - public NamedXContentRegistry getXContentRegistry() { - return delegate().getXContentRegistry(); - } - - @Override - public boolean isClosed() { - return delegate().isClosed(); - } - - @Override - public RestApiVersion getRestApiVersion() { - return delegate().getRestApiVersion(); - } - - @Override - public DeprecationHandler getDeprecationHandler() { - return delegate().getDeprecationHandler(); - } - - @Override - public void close() throws IOException { - delegate().close(); - } -} diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/DotExpandingXContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/DotExpandingXContentParser.java index f352143979806..cc0f8a13da37e 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/DotExpandingXContentParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/DotExpandingXContentParser.java @@ -17,9 +17,9 @@ * * A fieldname named {@code "foo.bar.baz":...} will be parsed instead as {@code 'foo':{'bar':{'baz':...}}} */ -public class DotExpandingXContentParser extends FilterXContentParser { +public class DotExpandingXContentParser extends FilterXContentParserWrapper { - private static class WrappingParser extends DelegatingXContentParser { + private static final class WrappingParser extends FilterXContentParser { final Deque parsers = new ArrayDeque<>(); @@ -135,7 +135,7 @@ public Token nextToken() throws IOException { assert expandedTokens < subPaths.length * 2; if (expandedTokens == subPaths.length * 2 - 1) { state = State.PARSING_ORIGINAL_CONTENT; - Token token = in.currentToken(); + Token token = delegate().currentToken(); if (token == Token.START_OBJECT || token == Token.START_ARRAY) { innerLevel++; } @@ -170,7 +170,7 @@ public Token currentToken() { return switch (state) { case EXPANDING_START_OBJECT -> expandedTokens % 2 == 1 ? Token.START_OBJECT : Token.FIELD_NAME; case ENDING_EXPANDED_OBJECT -> Token.END_OBJECT; - case PARSING_ORIGINAL_CONTENT -> in.currentToken(); + case PARSING_ORIGINAL_CONTENT -> delegate().currentToken(); }; } @@ -181,14 +181,14 @@ public String currentName() throws IOException { // whenever we are parsing some inner object/array we can easily delegate to the inner parser // e.g. field.with.dots: { obj:{ parsing here } } if (innerLevel > 0) { - return in.currentName(); + return delegate().currentName(); } Token token = currentToken(); // if we are parsing the outer object/array, only at the start object/array we need to return // e.g. dots instead of field.with.dots otherwise we can simply delegate to the inner parser // which will do the right thing if (innerLevel == 0 && token != Token.START_OBJECT && token != Token.START_ARRAY) { - return in.currentName(); + return delegate().currentName(); } // note that innerLevel can be -1 if there are no inner object/array e.g. field.with.dots: value // as well as while there is and we are parsing their END_OBJECT or END_ARRAY @@ -199,7 +199,7 @@ public String currentName() throws IOException { @Override public void skipChildren() throws IOException { if (state == State.EXPANDING_START_OBJECT) { - in.skipChildren(); + delegate().skipChildren(); state = State.ENDING_EXPANDED_OBJECT; } if (state == State.PARSING_ORIGINAL_CONTENT) { @@ -231,7 +231,7 @@ public boolean booleanValue() throws IOException { return super.booleanValue(); } - private static class SingletonValueXContentParser extends FilterXContentParser { + private static class SingletonValueXContentParser extends FilterXContentParserWrapper { protected SingletonValueXContentParser(XContentParser in) { super(in); diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParser.java index 877c3daeff636..62c93ba91ef45 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParser.java @@ -18,234 +18,232 @@ import java.util.function.Supplier; /** - * Filters an existing XContentParser by using a delegate + * Delegates every method to the parser returned by the {@link #delegate()} method. + * To be used extended directly when the delegated parser may dynamically changed. + * Extend {@link FilterXContentParserWrapper} instead when the delegate is fixed and can be provided at construction time. */ public abstract class FilterXContentParser implements XContentParser { - protected final XContentParser in; - - protected FilterXContentParser(XContentParser in) { - this.in = in; - } + protected abstract XContentParser delegate(); @Override public XContentType contentType() { - return in.contentType(); + return delegate().contentType(); } @Override public void allowDuplicateKeys(boolean allowDuplicateKeys) { - in.allowDuplicateKeys(allowDuplicateKeys); + delegate().allowDuplicateKeys(allowDuplicateKeys); } @Override public Token nextToken() throws IOException { - return in.nextToken(); + return delegate().nextToken(); } @Override public void skipChildren() throws IOException { - in.skipChildren(); + delegate().skipChildren(); } @Override public Token currentToken() { - return in.currentToken(); + return delegate().currentToken(); } @Override public String currentName() throws IOException { - return in.currentName(); + return delegate().currentName(); } @Override public Map map() throws IOException { - return in.map(); + return delegate().map(); } @Override public Map mapOrdered() throws IOException { - return in.mapOrdered(); + return delegate().mapOrdered(); } @Override public Map mapStrings() throws IOException { - return in.mapStrings(); + return delegate().mapStrings(); } @Override public Map map(Supplier> mapFactory, CheckedFunction mapValueParser) throws IOException { - return in.map(mapFactory, mapValueParser); + return delegate().map(mapFactory, mapValueParser); } @Override public List list() throws IOException { - return in.list(); + return delegate().list(); } @Override public List listOrderedMap() throws IOException { - return in.listOrderedMap(); + return delegate().listOrderedMap(); } @Override public String text() throws IOException { - return in.text(); + return delegate().text(); } @Override public String textOrNull() throws IOException { - return in.textOrNull(); + return delegate().textOrNull(); } @Override public CharBuffer charBufferOrNull() throws IOException { - return in.charBufferOrNull(); + return delegate().charBufferOrNull(); } @Override public CharBuffer charBuffer() throws IOException { - return in.charBuffer(); + return delegate().charBuffer(); } @Override public Object objectText() throws IOException { - return in.objectText(); + return delegate().objectText(); } @Override public Object objectBytes() throws IOException { - return in.objectBytes(); + return delegate().objectBytes(); } @Override public boolean hasTextCharacters() { - return in.hasTextCharacters(); + return delegate().hasTextCharacters(); } @Override public char[] textCharacters() throws IOException { - return in.textCharacters(); + return delegate().textCharacters(); } @Override public int textLength() throws IOException { - return in.textLength(); + return delegate().textLength(); } @Override public int textOffset() throws IOException { - return in.textOffset(); + return delegate().textOffset(); } @Override public Number numberValue() throws IOException { - return in.numberValue(); + return delegate().numberValue(); } @Override public NumberType numberType() throws IOException { - return in.numberType(); + return delegate().numberType(); } @Override public short shortValue(boolean coerce) throws IOException { - return in.shortValue(coerce); + return delegate().shortValue(coerce); } @Override public int intValue(boolean coerce) throws IOException { - return in.intValue(coerce); + return delegate().intValue(coerce); } @Override public long longValue(boolean coerce) throws IOException { - return in.longValue(coerce); + return delegate().longValue(coerce); } @Override public float floatValue(boolean coerce) throws IOException { - return in.floatValue(coerce); + return delegate().floatValue(coerce); } @Override public double doubleValue(boolean coerce) throws IOException { - return in.doubleValue(coerce); + return delegate().doubleValue(coerce); } @Override public short shortValue() throws IOException { - return in.shortValue(); + return delegate().shortValue(); } @Override public int intValue() throws IOException { - return in.intValue(); + return delegate().intValue(); } @Override public long longValue() throws IOException { - return in.longValue(); + return delegate().longValue(); } @Override public float floatValue() throws IOException { - return in.floatValue(); + return delegate().floatValue(); } @Override public double doubleValue() throws IOException { - return in.doubleValue(); + return delegate().doubleValue(); } @Override public boolean isBooleanValue() throws IOException { - return in.isBooleanValue(); + return delegate().isBooleanValue(); } @Override public boolean booleanValue() throws IOException { - return in.booleanValue(); + return delegate().booleanValue(); } @Override public byte[] binaryValue() throws IOException { - return in.binaryValue(); + return delegate().binaryValue(); } @Override public XContentLocation getTokenLocation() { - return in.getTokenLocation(); + return delegate().getTokenLocation(); } @Override public T namedObject(Class categoryClass, String name, Object context) throws IOException { - return in.namedObject(categoryClass, name, context); + return delegate().namedObject(categoryClass, name, context); } @Override public NamedXContentRegistry getXContentRegistry() { - return in.getXContentRegistry(); + return delegate().getXContentRegistry(); } @Override public boolean isClosed() { - return in.isClosed(); + return delegate().isClosed(); } @Override public void close() throws IOException { - in.close(); + delegate().close(); } @Override public RestApiVersion getRestApiVersion() { - return in.getRestApiVersion(); + return delegate().getRestApiVersion(); } @Override public DeprecationHandler getDeprecationHandler() { - return in.getDeprecationHandler(); + return delegate().getDeprecationHandler(); } } diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParserWrapper.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParserWrapper.java new file mode 100644 index 0000000000000..8436d073155e9 --- /dev/null +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParserWrapper.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.xcontent; + +/** + * Wraps the provided {@link XContentParser} and delegates to it. + */ +public class FilterXContentParserWrapper extends FilterXContentParser { + private final XContentParser delegate; + + public FilterXContentParserWrapper(XContentParser delegate) { + this.delegate = delegate; + } + + @Override + protected final XContentParser delegate() { + return delegate; + } +} diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentSubParser.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentSubParser.java index 23285167cc750..851263d18d5d7 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentSubParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/XContentSubParser.java @@ -8,14 +8,7 @@ package org.elasticsearch.xcontent; -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.core.RestApiVersion; - import java.io.IOException; -import java.nio.CharBuffer; -import java.util.List; -import java.util.Map; -import java.util.function.Supplier; /** * Wrapper for a XContentParser that makes a single object/array look like a complete document. @@ -24,34 +17,23 @@ * as skipping to the end of the object in case of a parsing error. The wrapper is intended to be * used for parsing objects that should be ignored if they are malformed. */ -public class XContentSubParser implements XContentParser { +public class XContentSubParser extends FilterXContentParserWrapper { - private final XContentParser parser; private int level; private boolean closed; public XContentSubParser(XContentParser parser) { - this.parser = parser; + super(parser); if (parser.currentToken() != Token.START_OBJECT && parser.currentToken() != Token.START_ARRAY) { throw new IllegalStateException("The sub parser has to be created on the start of an object or array"); } level = 1; } - @Override - public XContentType contentType() { - return parser.contentType(); - } - - @Override - public void allowDuplicateKeys(boolean allowDuplicateKeys) { - parser.allowDuplicateKeys(allowDuplicateKeys); - } - @Override public Token nextToken() throws IOException { if (level > 0) { - Token token = parser.nextToken(); + Token token = super.nextToken(); if (token == Token.START_OBJECT || token == Token.START_ARRAY) { level++; } else if (token == Token.END_OBJECT || token == Token.END_ARRAY) { @@ -65,7 +47,7 @@ public Token nextToken() throws IOException { @Override public void skipChildren() throws IOException { - Token token = parser.currentToken(); + Token token = currentToken(); if (token != Token.START_OBJECT && token != Token.START_ARRAY) { // skip if not starting on an object or an array return; @@ -78,202 +60,11 @@ public void skipChildren() throws IOException { } } - @Override - public Token currentToken() { - return parser.currentToken(); - } - - @Override - public String currentName() throws IOException { - return parser.currentName(); - } - - @Override - public Map map() throws IOException { - return parser.map(); - } - - @Override - public Map mapOrdered() throws IOException { - return parser.mapOrdered(); - } - - @Override - public Map mapStrings() throws IOException { - return parser.mapStrings(); - } - - @Override - public Map map(Supplier> mapFactory, CheckedFunction mapValueParser) - throws IOException { - return parser.map(mapFactory, mapValueParser); - } - - @Override - public List list() throws IOException { - return parser.list(); - } - - @Override - public List listOrderedMap() throws IOException { - return parser.listOrderedMap(); - } - - @Override - public String text() throws IOException { - return parser.text(); - } - - @Override - public String textOrNull() throws IOException { - return parser.textOrNull(); - } - - @Override - public CharBuffer charBufferOrNull() throws IOException { - return parser.charBufferOrNull(); - } - - @Override - public CharBuffer charBuffer() throws IOException { - return parser.charBuffer(); - } - - @Override - public Object objectText() throws IOException { - return parser.objectText(); - } - - @Override - public Object objectBytes() throws IOException { - return parser.objectBytes(); - } - - @Override - public boolean hasTextCharacters() { - return parser.hasTextCharacters(); - } - - @Override - public char[] textCharacters() throws IOException { - return parser.textCharacters(); - } - - @Override - public int textLength() throws IOException { - return parser.textLength(); - } - - @Override - public int textOffset() throws IOException { - return parser.textOffset(); - } - - @Override - public Number numberValue() throws IOException { - return parser.numberValue(); - } - - @Override - public NumberType numberType() throws IOException { - return parser.numberType(); - } - - @Override - public short shortValue(boolean coerce) throws IOException { - return parser.shortValue(coerce); - } - - @Override - public int intValue(boolean coerce) throws IOException { - return parser.intValue(coerce); - } - - @Override - public long longValue(boolean coerce) throws IOException { - return parser.longValue(coerce); - } - - @Override - public float floatValue(boolean coerce) throws IOException { - return parser.floatValue(coerce); - } - - @Override - public double doubleValue(boolean coerce) throws IOException { - return parser.doubleValue(); - } - - @Override - public short shortValue() throws IOException { - return parser.shortValue(); - } - - @Override - public int intValue() throws IOException { - return parser.intValue(); - } - - @Override - public long longValue() throws IOException { - return parser.longValue(); - } - - @Override - public float floatValue() throws IOException { - return parser.floatValue(); - } - - @Override - public double doubleValue() throws IOException { - return parser.doubleValue(); - } - - @Override - public boolean isBooleanValue() throws IOException { - return parser.isBooleanValue(); - } - - @Override - public boolean booleanValue() throws IOException { - return parser.booleanValue(); - } - - @Override - public byte[] binaryValue() throws IOException { - return parser.binaryValue(); - } - - @Override - public XContentLocation getTokenLocation() { - return parser.getTokenLocation(); - } - - @Override - public T namedObject(Class categoryClass, String name, Object context) throws IOException { - return parser.namedObject(categoryClass, name, context); - } - - @Override - public NamedXContentRegistry getXContentRegistry() { - return parser.getXContentRegistry(); - } - @Override public boolean isClosed() { return closed; } - @Override - public RestApiVersion getRestApiVersion() { - return parser.getRestApiVersion(); - } - - @Override - public DeprecationHandler getDeprecationHandler() { - return parser.getDeprecationHandler(); - } - @Override public void close() throws IOException { if (closed == false) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java index c685d438dfd5e..d3c5d3193adc1 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/CompletionFieldMapper.java @@ -30,8 +30,8 @@ import org.elasticsearch.search.suggest.completion.CompletionSuggester; import org.elasticsearch.search.suggest.completion.context.ContextMapping; import org.elasticsearch.search.suggest.completion.context.ContextMappings; -import org.elasticsearch.xcontent.DelegatingXContentParser; import org.elasticsearch.xcontent.DeprecationHandler; +import org.elasticsearch.xcontent.FilterXContentParser; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentLocation; @@ -610,8 +610,8 @@ public void doValidate(MappingLookup mappers) { * consumer supports the object structure. */ // This parser changes behaviour depending on which methods are called by consumers, which is extremely delicate. This kind of works for - // our internal mappers, but what about mappers from plugins - static class MultiFieldParser extends DelegatingXContentParser { + // our internal mappers, but what about mappers from plugins? + static class MultiFieldParser extends FilterXContentParser { private final String textValue; private final String fieldName; private final XContentLocation locationOffset; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java index 07aa1b0f23e2c..9f234598f5e76 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/DocumentParserContext.java @@ -13,7 +13,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.xcontent.DotExpandingXContentParser; -import org.elasticsearch.xcontent.FilterXContentParser; +import org.elasticsearch.xcontent.FilterXContentParserWrapper; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -380,7 +380,7 @@ public final DynamicTemplate findDynamicTemplate(String fieldName, DynamicTempla // XContentParser that wraps an existing parser positioned on a value, // and a field name, and returns a stream that looks like { 'field' : 'value' } - private static class CopyToParser extends FilterXContentParser { + private static class CopyToParser extends FilterXContentParserWrapper { enum State { FIELD, @@ -400,7 +400,7 @@ enum State { public Token nextToken() throws IOException { if (state == State.FIELD) { state = State.VALUE; - return in.currentToken(); + return delegate().currentToken(); } return Token.END_OBJECT; } @@ -410,7 +410,7 @@ public Token currentToken() { if (state == State.FIELD) { return Token.FIELD_NAME; } - return in.currentToken(); + return delegate().currentToken(); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index cff2cbdc25359..2a9bde2cca1c7 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -42,7 +42,7 @@ import org.elasticsearch.search.lookup.FieldValues; import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.runtime.GeoPointScriptFieldDistanceFeatureQuery; -import org.elasticsearch.xcontent.FilterXContentParser; +import org.elasticsearch.xcontent.FilterXContentParserWrapper; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -225,7 +225,7 @@ protected void index(DocumentParserContext context, GeoPoint geometry) throws IO * in the incoming document. We rely on the fact that consumers are only ever call {@link XContentParser#textOrNull()} and never * advance tokens, which is explicitly disallowed by this parser. */ - static class GeoHashMultiFieldParser extends FilterXContentParser { + static class GeoHashMultiFieldParser extends FilterXContentParserWrapper { private final String value; GeoHashMultiFieldParser(XContentParser innerParser, String value) { diff --git a/test/framework/src/main/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java b/test/framework/src/main/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java index 5d01eb6031134..4a3b3d27c75ab 100644 --- a/test/framework/src/main/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/common/xcontent/support/AbstractFilteringTestCase.java @@ -13,7 +13,7 @@ import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.DeprecationHandler; -import org.elasticsearch.xcontent.FilterXContentParser; +import org.elasticsearch.xcontent.FilterXContentParserWrapper; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -55,7 +55,7 @@ protected static Builder builderFor(String file) { .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, stream) ) { // copyCurrentStructure does not property handle filters when it is passed a json parser. So we hide it. - return builder.copyCurrentStructure(new FilterXContentParser(parser) { + return builder.copyCurrentStructure(new FilterXContentParserWrapper(parser) { }); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/WatcherXContentParser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/WatcherXContentParser.java index 8fc38e22eb969..96fa4de6c0d9b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/WatcherXContentParser.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/xcontent/WatcherXContentParser.java @@ -8,7 +8,7 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.FilterXContentParser; +import org.elasticsearch.xcontent.FilterXContentParserWrapper; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.core.watcher.common.secret.Secret; import org.elasticsearch.xpack.core.watcher.crypto.CryptoService; @@ -26,7 +26,7 @@ * {@link Secret}s are encrypted values that are stored in memory and are decrypted * on demand when needed. */ -public class WatcherXContentParser extends FilterXContentParser { +public class WatcherXContentParser extends FilterXContentParserWrapper { public static final String REDACTED_PASSWORD = "::es_redacted::"; From e27145030cfa4c6b269549f61f776cbe71520d45 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 17 Feb 2022 10:08:38 +0000 Subject: [PATCH 148/167] Preserve context in snapshotDeletionListeners (#84089) Closes #84036 --- docs/changelog/84089.yaml | 6 ++++++ .../java/org/elasticsearch/snapshots/SnapshotsService.java | 3 ++- 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/84089.yaml diff --git a/docs/changelog/84089.yaml b/docs/changelog/84089.yaml new file mode 100644 index 0000000000000..5404d331ebd3c --- /dev/null +++ b/docs/changelog/84089.yaml @@ -0,0 +1,6 @@ +pr: 84089 +summary: Preserve context in `snapshotDeletionListeners` +area: Snapshot/Restore +type: bug +issues: + - 84036 diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index a0384b9efcb43..70da66185f370 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -2289,7 +2289,8 @@ private static boolean isWritingToRepository(SnapshotsInProgress.Entry entry) { } private void addDeleteListener(String deleteUUID, ActionListener listener) { - snapshotDeletionListeners.computeIfAbsent(deleteUUID, k -> new CopyOnWriteArrayList<>()).add(listener); + snapshotDeletionListeners.computeIfAbsent(deleteUUID, k -> new CopyOnWriteArrayList<>()) + .add(ContextPreservingActionListener.wrapPreservingContext(listener, threadPool.getThreadContext())); } /** From 1b7b2a1a7a9d4ac0c30bd56264c1f09c60ec074f Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 17 Feb 2022 10:24:38 +0000 Subject: [PATCH 149/167] Use static empty store files metadata (#84034) In a large cluster we expect most nodes not to have a copy of most shards, but today during replica shard allocation we create a new (and nontrivial) object for each node that has no copy of a shard. With this commit we check at deserialization time whether the response is empty and, if so, avoid the unnecessary instantiation. Relates #77466 --- docs/changelog/84034.yaml | 5 ++ .../org/elasticsearch/index/store/Store.java | 44 ++++++--------- .../recovery/RecoveryCleanFilesRequest.java | 2 +- .../recovery/StartRecoveryRequest.java | 2 +- .../TransportNodesListShardStoreMetadata.java | 55 ++++++++++--------- .../gateway/ReplicaShardAllocatorTests.java | 1 - .../elasticsearch/index/store/StoreTests.java | 51 ++++++++++++++--- .../PutCcrRestoreSessionAction.java | 2 +- 8 files changed, 96 insertions(+), 66 deletions(-) create mode 100644 docs/changelog/84034.yaml diff --git a/docs/changelog/84034.yaml b/docs/changelog/84034.yaml new file mode 100644 index 0000000000000..30065232231ef --- /dev/null +++ b/docs/changelog/84034.yaml @@ -0,0 +1,5 @@ +pr: 84034 +summary: Use static empty store files metadata +area: Allocation +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/index/store/Store.java b/server/src/main/java/org/elasticsearch/index/store/Store.java index 5aabb13e957e4..28b6452acc5ed 100644 --- a/server/src/main/java/org/elasticsearch/index/store/Store.java +++ b/server/src/main/java/org/elasticsearch/index/store/Store.java @@ -52,6 +52,7 @@ import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.RefCounted; @@ -759,25 +760,17 @@ public String toString() { */ public static final class MetadataSnapshot implements Iterable, Writeable { private final Map metadata; - - public static final MetadataSnapshot EMPTY = new MetadataSnapshot(); - private final Map commitUserData; - private final long numDocs; + public static final MetadataSnapshot EMPTY = new MetadataSnapshot(emptyMap(), emptyMap(), 0L); + public MetadataSnapshot(Map metadata, Map commitUserData, long numDocs) { this.metadata = metadata; this.commitUserData = commitUserData; this.numDocs = numDocs; } - MetadataSnapshot() { - metadata = emptyMap(); - commitUserData = emptyMap(); - numDocs = 0; - } - MetadataSnapshot(IndexCommit commit, Directory directory, Logger logger) throws IOException { LoadedMetadata loadedMetadata = loadMetadata(commit, directory, logger); metadata = loadedMetadata.fileMetadata; @@ -786,26 +779,21 @@ public MetadataSnapshot(Map metadata, Map metadata = new HashMap<>(); - for (int i = 0; i < size; i++) { - StoreFileMetadata meta = new StoreFileMetadata(in); - metadata.put(meta.name(), meta); - } - Map commitUserData = new HashMap<>(); - int num = in.readVInt(); - for (int i = num; i > 0; i--) { - commitUserData.put(in.readString(), in.readString()); + public static MetadataSnapshot readFrom(StreamInput in) throws IOException { + final int metadataSize = in.readVInt(); + final Map metadata = metadataSize == 0 ? emptyMap() : Maps.newMapWithExpectedSize(metadataSize); + for (int i = 0; i < metadataSize; i++) { + final var storeFileMetadata = new StoreFileMetadata(in); + metadata.put(storeFileMetadata.name(), storeFileMetadata); } + final var commitUserData = in.readMap(StreamInput::readString, StreamInput::readString); + final var numDocs = in.readLong(); - this.metadata = unmodifiableMap(metadata); - this.commitUserData = unmodifiableMap(commitUserData); - this.numDocs = in.readLong(); - assert metadata.isEmpty() || numSegmentFiles() == 1 : "numSegmentFiles: " + numSegmentFiles(); + if (metadataSize == 0 && commitUserData.size() == 0 && numDocs == 0) { + return MetadataSnapshot.EMPTY; + } else { + return new MetadataSnapshot(metadata, commitUserData, numDocs); + } } @Override diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCleanFilesRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCleanFilesRequest.java index 2e6ba419752f3..d5997938e715b 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCleanFilesRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/RecoveryCleanFilesRequest.java @@ -43,7 +43,7 @@ public RecoveryCleanFilesRequest( super(in); recoveryId = in.readLong(); shardId = new ShardId(in); - snapshotFiles = new Store.MetadataSnapshot(in); + snapshotFiles = Store.MetadataSnapshot.readFrom(in); totalTranslogOps = in.readVInt(); globalCheckpoint = in.readZLong(); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java b/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java index fd6d287f5f5fa..a19388ca26126 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/StartRecoveryRequest.java @@ -40,7 +40,7 @@ public StartRecoveryRequest(StreamInput in) throws IOException { targetAllocationId = in.readString(); sourceNode = new DiscoveryNode(in); targetNode = new DiscoveryNode(in); - metadataSnapshot = new Store.MetadataSnapshot(in); + metadataSnapshot = Store.MetadataSnapshot.readFrom(in); primaryRelocation = in.readBoolean(); startingSeqNo = in.readLong(); if (in.getVersion().onOrAfter(RecoverySettings.SNAPSHOT_FILE_DOWNLOAD_THROTTLING_SUPPORTED_VERSION)) { diff --git a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetadata.java b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetadata.java index d28249eefc716..016841af9601d 100644 --- a/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetadata.java +++ b/server/src/main/java/org/elasticsearch/indices/store/TransportNodesListShardStoreMetadata.java @@ -10,6 +10,7 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.support.ActionFilters; @@ -45,12 +46,13 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Objects; import java.util.concurrent.TimeUnit; +import static java.util.Collections.emptyList; + public class TransportNodesListShardStoreMetadata extends TransportNodesAction< TransportNodesListShardStoreMetadata.Request, TransportNodesListShardStoreMetadata.NodesStoreFilesMetadata, @@ -132,7 +134,6 @@ private StoreFilesMetadata listStoreMetadata(NodeRequest request) throws IOExcep if (indexShard != null) { try { final StoreFilesMetadata storeFilesMetadata = new StoreFilesMetadata( - shardId, indexShard.snapshotStoreMetadata(), indexShard.getPeerRecoveryRetentionLeases() ); @@ -140,10 +141,10 @@ private StoreFilesMetadata listStoreMetadata(NodeRequest request) throws IOExcep return storeFilesMetadata; } catch (org.apache.lucene.index.IndexNotFoundException e) { logger.trace(new ParameterizedMessage("[{}] node is missing index, responding with empty", shardId), e); - return new StoreFilesMetadata(shardId, Store.MetadataSnapshot.EMPTY, Collections.emptyList()); + return StoreFilesMetadata.EMPTY; } catch (IOException e) { logger.warn(new ParameterizedMessage("[{}] can't read metadata from store, responding with empty", shardId), e); - return new StoreFilesMetadata(shardId, Store.MetadataSnapshot.EMPTY, Collections.emptyList()); + return StoreFilesMetadata.EMPTY; } } } @@ -166,7 +167,7 @@ private StoreFilesMetadata listStoreMetadata(NodeRequest request) throws IOExcep } final ShardPath shardPath = ShardPath.loadShardPath(logger, nodeEnv, shardId, customDataPath); if (shardPath == null) { - return new StoreFilesMetadata(shardId, Store.MetadataSnapshot.EMPTY, Collections.emptyList()); + return StoreFilesMetadata.EMPTY; } // note that this may fail if it can't get access to the shard lock. Since we check above there is an active shard, this means: // 1) a shard is being constructed, which means the master will not use a copy of this replica @@ -180,7 +181,7 @@ private StoreFilesMetadata listStoreMetadata(NodeRequest request) throws IOExcep ); // We use peer recovery retention leases from the primary for allocating replicas. We should always have retention leases when // we refresh shard info after the primary has started. Hence, we can ignore retention leases if there is no active shard. - return new StoreFilesMetadata(shardId, metadataSnapshot, Collections.emptyList()); + return new StoreFilesMetadata(metadataSnapshot, emptyList()); } finally { TimeValue took = new TimeValue(System.nanoTime() - startTimeNS, TimeUnit.NANOSECONDS); if (exists) { @@ -192,37 +193,43 @@ private StoreFilesMetadata listStoreMetadata(NodeRequest request) throws IOExcep } public static class StoreFilesMetadata implements Iterable, Writeable { - private final ShardId shardId; private final Store.MetadataSnapshot metadataSnapshot; private final List peerRecoveryRetentionLeases; - public StoreFilesMetadata( - ShardId shardId, - Store.MetadataSnapshot metadataSnapshot, - List peerRecoveryRetentionLeases - ) { - this.shardId = shardId; + private static final ShardId FAKE_SHARD_ID = new ShardId("_na_", "_na_", 0); + public static final StoreFilesMetadata EMPTY = new StoreFilesMetadata(Store.MetadataSnapshot.EMPTY, emptyList()); + + public StoreFilesMetadata(Store.MetadataSnapshot metadataSnapshot, List peerRecoveryRetentionLeases) { this.metadataSnapshot = metadataSnapshot; this.peerRecoveryRetentionLeases = peerRecoveryRetentionLeases; } - public StoreFilesMetadata(StreamInput in) throws IOException { - this.shardId = new ShardId(in); - this.metadataSnapshot = new Store.MetadataSnapshot(in); - this.peerRecoveryRetentionLeases = in.readList(RetentionLease::new); + public static StoreFilesMetadata readFrom(StreamInput in) throws IOException { + if (in.getVersion().before(Version.V_8_2_0)) { + new ShardId(in); + } + final var metadataSnapshot = Store.MetadataSnapshot.readFrom(in); + final var peerRecoveryRetentionLeases = in.readList(RetentionLease::new); + if (metadataSnapshot == Store.MetadataSnapshot.EMPTY && peerRecoveryRetentionLeases.isEmpty()) { + return EMPTY; + } else { + return new StoreFilesMetadata(metadataSnapshot, peerRecoveryRetentionLeases); + } } @Override public void writeTo(StreamOutput out) throws IOException { - shardId.writeTo(out); + if (out.getVersion().before(Version.V_8_2_0)) { + // no compatible version cares about the shard ID, we can just make one up + FAKE_SHARD_ID.writeTo(out); + + // NB only checked this for versions back to 7.17.0, we are assuming that we don't use this with earlier versions: + assert out.getVersion().onOrAfter(Version.V_7_17_0) : out.getVersion(); + } metadataSnapshot.writeTo(out); out.writeList(peerRecoveryRetentionLeases); } - public ShardId shardId() { - return this.shardId; - } - public boolean isEmpty() { return metadataSnapshot.size() == 0; } @@ -267,8 +274,6 @@ public String syncId() { @Override public String toString() { return "StoreFilesMetadata{" - + ", shardId=" - + shardId + ", metadataSnapshot{size=" + metadataSnapshot.size() + ", syncId=" @@ -385,7 +390,7 @@ public static class NodeStoreFilesMetadata extends BaseNodeResponse { public NodeStoreFilesMetadata(StreamInput in, DiscoveryNode node) throws IOException { super(in, node); - storeFilesMetadata = new StoreFilesMetadata(in); + storeFilesMetadata = StoreFilesMetadata.readFrom(in); } public NodeStoreFilesMetadata(DiscoveryNode node, StoreFilesMetadata storeFilesMetadata) { diff --git a/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java b/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java index d386e080606a7..758da73def503 100644 --- a/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/ReplicaShardAllocatorTests.java @@ -684,7 +684,6 @@ TestAllocator addData( data.put( node, new TransportNodesListShardStoreMetadata.StoreFilesMetadata( - shardId, new Store.MetadataSnapshot(unmodifiableMap(filesAsMap), unmodifiableMap(commitData), randomInt()), peerRecoveryRetentionLeases ) diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index f5a5d6ed4c009..efc1b846492b8 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -80,7 +80,10 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.function.LongUnaryOperator; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.test.VersionUtils.randomCompatibleVersion; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; @@ -93,6 +96,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.sameInstance; public class StoreTests extends ESTestCase { @@ -918,7 +922,7 @@ public void testMetadataSnapshotStreaming() throws Exception { ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); InputStreamStreamInput in = new InputStreamStreamInput(inBuffer); in.setVersion(targetNodeVersion); - Store.MetadataSnapshot inMetadataSnapshot = new Store.MetadataSnapshot(in); + Store.MetadataSnapshot inMetadataSnapshot = Store.MetadataSnapshot.readFrom(in); Map origEntries = new HashMap<>(); origEntries.putAll(outMetadataSnapshot.asMap()); for (Map.Entry entry : inMetadataSnapshot.asMap().entrySet()) { @@ -928,6 +932,21 @@ public void testMetadataSnapshotStreaming() throws Exception { assertThat(inMetadataSnapshot.getCommitUserData(), equalTo(outMetadataSnapshot.getCommitUserData())); } + public void testEmptyMetadataSnapshotStreaming() throws Exception { + var outMetadataSnapshot = randomBoolean() ? Store.MetadataSnapshot.EMPTY : new Store.MetadataSnapshot(emptyMap(), emptyMap(), 0L); + var targetNodeVersion = randomCompatibleVersion(random(), org.elasticsearch.Version.CURRENT); + + var outBuffer = new ByteArrayOutputStream(); + var out = new OutputStreamStreamOutput(outBuffer); + out.setVersion(targetNodeVersion); + outMetadataSnapshot.writeTo(out); + + var inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); + var in = new InputStreamStreamInput(inBuffer); + in.setVersion(targetNodeVersion); + assertThat(Store.MetadataSnapshot.readFrom(in), sameInstance(Store.MetadataSnapshot.EMPTY)); + } + protected Store.MetadataSnapshot createMetadataSnapshot() { StoreFileMetadata storeFileMetadata1 = new StoreFileMetadata("segments", 1, "666", MIN_SUPPORTED_LUCENE_VERSION.toString()); StoreFileMetadata storeFileMetadata2 = new StoreFileMetadata("no_segments", 1, "666", MIN_SUPPORTED_LUCENE_VERSION.toString()); @@ -978,21 +997,16 @@ public void testStreamStoreFilesMetadata() throws Exception { ); } TransportNodesListShardStoreMetadata.StoreFilesMetadata outStoreFileMetadata = - new TransportNodesListShardStoreMetadata.StoreFilesMetadata( - new ShardId("test", "_na_", 0), - metadataSnapshot, - peerRecoveryRetentionLeases - ); + new TransportNodesListShardStoreMetadata.StoreFilesMetadata(metadataSnapshot, peerRecoveryRetentionLeases); ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); - org.elasticsearch.Version targetNodeVersion = randomVersion(random()); + org.elasticsearch.Version targetNodeVersion = randomCompatibleVersion(random(), org.elasticsearch.Version.CURRENT); out.setVersion(targetNodeVersion); outStoreFileMetadata.writeTo(out); ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); InputStreamStreamInput in = new InputStreamStreamInput(inBuffer); in.setVersion(targetNodeVersion); - TransportNodesListShardStoreMetadata.StoreFilesMetadata inStoreFileMetadata = - new TransportNodesListShardStoreMetadata.StoreFilesMetadata(in); + var inStoreFileMetadata = TransportNodesListShardStoreMetadata.StoreFilesMetadata.readFrom(in); Iterator outFiles = outStoreFileMetadata.iterator(); for (StoreFileMetadata inFile : inStoreFileMetadata) { assertThat(inFile.name(), equalTo(outFiles.next().name())); @@ -1001,6 +1015,25 @@ public void testStreamStoreFilesMetadata() throws Exception { assertThat(outStoreFileMetadata.peerRecoveryRetentionLeases(), equalTo(peerRecoveryRetentionLeases)); } + public void testStreamEmptyStoreFilesMetadata() throws Exception { + var outStoreFileMetadata = randomBoolean() + ? TransportNodesListShardStoreMetadata.StoreFilesMetadata.EMPTY + : new TransportNodesListShardStoreMetadata.StoreFilesMetadata(Store.MetadataSnapshot.EMPTY, emptyList()); + var outBuffer = new ByteArrayOutputStream(); + var out = new OutputStreamStreamOutput(outBuffer); + var targetNodeVersion = randomCompatibleVersion(random(), org.elasticsearch.Version.CURRENT); + out.setVersion(targetNodeVersion); + outStoreFileMetadata.writeTo(out); + + var inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); + var in = new InputStreamStreamInput(inBuffer); + in.setVersion(targetNodeVersion); + assertThat( + TransportNodesListShardStoreMetadata.StoreFilesMetadata.readFrom(in), + sameInstance(TransportNodesListShardStoreMetadata.StoreFilesMetadata.EMPTY) + ); + } + public void testMarkCorruptedOnTruncatedSegmentsFile() throws IOException { IndexWriterConfig iwc = newIndexWriterConfig(); final ShardId shardId = new ShardId("index", "_na_", 1); diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java index eb8038755d782..d116106b044ae 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/repositories/PutCcrRestoreSessionAction.java @@ -114,7 +114,7 @@ public static class PutCcrRestoreSessionResponse extends ActionResponse { PutCcrRestoreSessionResponse(StreamInput in) throws IOException { super(in); node = new DiscoveryNode(in); - storeFileMetadata = new Store.MetadataSnapshot(in); + storeFileMetadata = Store.MetadataSnapshot.readFrom(in); mappingVersion = in.readVLong(); } From 3d2617136c8d8e6880d486204f6e393c6a9b9e17 Mon Sep 17 00:00:00 2001 From: David Turner Date: Thu, 17 Feb 2022 11:32:43 +0000 Subject: [PATCH 150/167] Avoid null threadContext in ResultDeduplicator (#84093) In #84038 we added a dependency on having a valid `threadContext` in a repository, but some tests use mocking and may end up with a `null` here. This seems not to be a problem in recent branches but causes failures in 8.0. With this commit we ensure that we always have a valid `threadContext` to avoid any problems. --- .../java/org/elasticsearch/action/ResultDeduplicator.java | 1 + .../repositories/RepositoriesServiceTests.java | 3 +++ .../repositories/blobstore/BlobStoreTestUtil.java | 4 ++++ .../repositories/encrypted/EncryptedRepositoryTests.java | 6 +++++- 4 files changed, 13 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/action/ResultDeduplicator.java b/server/src/main/java/org/elasticsearch/action/ResultDeduplicator.java index b63eeaf64e505..2a9887d8a30d7 100644 --- a/server/src/main/java/org/elasticsearch/action/ResultDeduplicator.java +++ b/server/src/main/java/org/elasticsearch/action/ResultDeduplicator.java @@ -28,6 +28,7 @@ public final class ResultDeduplicator { private final ConcurrentMap requests = ConcurrentCollections.newConcurrentMap(); public ResultDeduplicator(ThreadContext threadContext) { + assert threadContext != null; this.threadContext = threadContext; } diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java index 04c8a08462896..d4ec767e5817c 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.component.LifecycleListener; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; @@ -65,7 +66,9 @@ public class RepositoriesServiceTests extends ESTestCase { @Override public void setUp() throws Exception { super.setUp(); + ThreadContext threadContext = new ThreadContext(Settings.EMPTY); ThreadPool threadPool = mock(ThreadPool.class); + when(threadPool.getThreadContext()).thenReturn(threadContext); final TransportService transportService = new TransportService( Settings.EMPTY, mock(Transport.class), diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java index 47e12cd4934d4..c61bd0c2799ac 100644 --- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java +++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreTestUtil.java @@ -29,6 +29,8 @@ import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobMetadata; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.snapshots.blobstore.BlobStoreIndexShardSnapshots; @@ -417,7 +419,9 @@ public static ClusterService mockClusterService(RepositoryMetadata metadata) { } private static ClusterService mockClusterService(ClusterState initialState) { + final ThreadContext threadContext = new ThreadContext(Settings.EMPTY); final ThreadPool threadPool = mock(ThreadPool.class); + when(threadPool.getThreadContext()).thenReturn(threadContext); when(threadPool.executor(ThreadPool.Names.SNAPSHOT)).thenReturn(new SameThreadExecutorService()); when(threadPool.generic()).thenReturn(new SameThreadExecutorService()); when(threadPool.info(ThreadPool.Names.SNAPSHOT)).thenReturn( diff --git a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java index cbdf72b1b9cc9..ee1056977e906 100644 --- a/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java +++ b/x-pack/plugin/repository-encrypted/src/test/java/org/elasticsearch/repositories/encrypted/EncryptedRepositoryTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Tuple; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.license.XPackLicenseState; @@ -75,7 +76,10 @@ public void setUpMocks() throws Exception { Settings.EMPTY ); ClusterApplierService clusterApplierService = mock(ClusterApplierService.class); - when(clusterApplierService.threadPool()).thenReturn(mock(ThreadPool.class)); + final var threadContext = new ThreadContext(Settings.EMPTY); + final var threadPool = mock(ThreadPool.class); + when(threadPool.getThreadContext()).thenReturn(threadContext); + when(clusterApplierService.threadPool()).thenReturn(threadPool); ClusterService clusterService = mock(ClusterService.class); when(clusterService.getClusterApplierService()).thenReturn(clusterApplierService); this.encryptedRepository = new EncryptedRepository( From 4e55efc09d46ef11f99de7347b325840fa5f0c93 Mon Sep 17 00:00:00 2001 From: Elasticsearch addict Date: Thu, 17 Feb 2022 17:14:12 +0530 Subject: [PATCH 151/167] Update Lucene analysis base url (#84094) Moving to Lucene 9 slightly changed the base url for the analysis components which this PR fixes. --- docs/reference/analysis.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/analysis.asciidoc b/docs/reference/analysis.asciidoc index 3db1a89bbe847..72ab42d22b911 100644 --- a/docs/reference/analysis.asciidoc +++ b/docs/reference/analysis.asciidoc @@ -1,7 +1,7 @@ [[analysis]] = Text analysis -:lucene-analysis-docs: https://lucene.apache.org/core/{lucene_version_path}/analyzers-common/org/apache/lucene/analysis +:lucene-analysis-docs: https://lucene.apache.org/core/{lucene_version_path}/analysis/common/org/apache/lucene/analysis :lucene-gh-main-link: https://github.com/apache/lucene/blob/main/lucene :lucene-stop-word-link: {lucene-gh-main-link}/analysis/common/src/resources/org/apache/lucene/analysis From 3a903ca17cad1b620df83b541d3e679570711a22 Mon Sep 17 00:00:00 2001 From: Yang Wang Date: Thu, 17 Feb 2022 23:15:23 +1100 Subject: [PATCH 152/167] Revert "Clean up for superuser role name references (#83627)" (#84096) This reverts commit a9cdbf42c6140e7c0e2ac49d8ccd25683741084d. The role name change does not play well with API key creation. --- .../elasticsearch/xpack/core/security/user/UsernamesField.java | 2 +- .../security/enrollment/InternalEnrollmentTokenGenerator.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java index 9ba7c01eb69e2..c5e17656658e6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/UsernamesField.java @@ -15,7 +15,7 @@ public final class UsernamesField { public static final String SYSTEM_NAME = "_system"; public static final String SYSTEM_ROLE = "_system"; public static final String XPACK_SECURITY_NAME = "_xpack_security"; - public static final String XPACK_SECURITY_ROLE = "_xpack_security"; + public static final String XPACK_SECURITY_ROLE = "superuser"; public static final String XPACK_NAME = "_xpack"; public static final String XPACK_ROLE = "_xpack"; public static final String LOGSTASH_NAME = "logstash_system"; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGenerator.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGenerator.java index 455fd0c83ea79..53f0eacb88b06 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGenerator.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/enrollment/InternalEnrollmentTokenGenerator.java @@ -51,7 +51,7 @@ public class InternalEnrollmentTokenGenerator extends BaseEnrollmentTokenGenerat public InternalEnrollmentTokenGenerator(Environment environment, SSLService sslService, Client client) { this.environment = environment; this.sslService = sslService; - // enrollment tokens API keys will be owned by the "_xpack_security" system user + // enrollment tokens API keys will be owned by the "_xpack_security" system user ("superuser" role) this.client = new OriginSettingClient(client, SECURITY_ORIGIN); } From 5b1f162db8adcfa31be6e508b6b241bf41c185e0 Mon Sep 17 00:00:00 2001 From: Seth Michael Larson Date: Thu, 17 Feb 2022 06:59:50 -0600 Subject: [PATCH 153/167] Add API specs for OpenID Connect APIs --- .../api/security.oidc_authenticate.json | 28 +++++++++++++++++++ .../api/security.oidc_logout.json | 28 +++++++++++++++++++ .../security.oidc_prepare_authentication.json | 28 +++++++++++++++++++ 3 files changed, 84 insertions(+) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_authenticate.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_logout.json create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_prepare_authentication.json diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_authenticate.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_authenticate.json new file mode 100644 index 0000000000000..969f5e350705f --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_authenticate.json @@ -0,0 +1,28 @@ +{ + "security.oidc_authenticate":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-oidc-authenticate.html", + "description":"Exchanges an OpenID Connection authentication response message for an Elasticsearch access token and refresh token pair" + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_security/oidc/authenticate", + "methods":[ + "POST" + ] + } + ] + }, + "body":{ + "description":"The OpenID Connect response to authenticate", + "required":true + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_logout.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_logout.json new file mode 100644 index 0000000000000..83f16e5c4de6c --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_logout.json @@ -0,0 +1,28 @@ +{ + "security.oidc_logout":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-oidc-logout.html", + "description":"Invalidates a refresh token and access token that was generated from the OpenID Connect Authenticate API" + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_security/oidc/logout", + "methods":[ + "POST" + ] + } + ] + }, + "body":{ + "description":"Access token and refresh token to invalidate", + "required":true + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_prepare_authentication.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_prepare_authentication.json new file mode 100644 index 0000000000000..fed4897037435 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.oidc_prepare_authentication.json @@ -0,0 +1,28 @@ +{ + "security.oidc_prepare_authentication":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-oidc-prepare-authentication.html", + "description":"Creates an OAuth 2.0 authentication request as a URL string" + }, + "stability":"stable", + "visibility":"public", + "headers":{ + "accept": [ "application/json"], + "content_type": ["application/json"] + }, + "url":{ + "paths":[ + { + "path":"/_security/oidc/prepare", + "methods":[ + "POST" + ] + } + ] + }, + "body":{ + "description":"The OpenID Connect authentication realm configuration", + "required":true + } + } +} From f976414cf79ca17302b5bce624cb63e138840d8f Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Thu, 17 Feb 2022 14:14:02 +0100 Subject: [PATCH 154/167] Make Metadata extend AbstractCollection (#83791) --- .../org/elasticsearch/cluster/metadata/Metadata.java | 8 +++++++- .../elasticsearch/rest/action/cat/RestIndicesAction.java | 9 ++++----- .../existence/FrozenExistenceDeciderService.java | 5 +++-- .../autoscaling/shards/FrozenShardsDeciderService.java | 3 +-- .../autoscaling/storage/FrozenStorageDeciderService.java | 3 +-- .../upgrade/SearchableSnapshotIndexMetadataUpgrader.java | 7 ++++--- .../SearchableSnapshotIndexMetadataUpgraderTests.java | 4 +--- 7 files changed, 21 insertions(+), 18 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index 975d68bdea9c8..2531ee686ae50 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -54,6 +54,7 @@ import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; +import java.util.AbstractCollection; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -86,7 +87,7 @@ * The details of how this is persisted are covered in {@link org.elasticsearch.gateway.PersistedClusterStateService}. *

    */ -public class Metadata implements Iterable, Diffable, ToXContentFragment { +public class Metadata extends AbstractCollection implements Diffable, ToXContentFragment { private static final Logger logger = LogManager.getLogger(Metadata.class); @@ -898,6 +899,11 @@ public Iterator iterator() { return indices.valuesIt(); } + @Override + public int size() { + return indices.size(); + } + public static boolean isGlobalStateEquals(Metadata metadata1, Metadata metadata2) { if (metadata1.coordinationMetadata.equals(metadata2.coordinationMetadata) == false) { return false; diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 204aa3b6f002f..e6a85f30b76aa 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -49,7 +49,6 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; -import java.util.stream.StreamSupport; import static java.util.Arrays.asList; import static org.elasticsearch.action.support.master.MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT; @@ -229,10 +228,10 @@ public void onResponse(final Collection responses) { .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); ClusterStateResponse stateResponse = extractResponse(responses, ClusterStateResponse.class); - Map indicesStates = StreamSupport.stream( - stateResponse.getState().getMetadata().spliterator(), - false - ).collect(Collectors.toMap(indexMetadata -> indexMetadata.getIndex().getName(), Function.identity())); + Map indicesStates = stateResponse.getState() + .getMetadata() + .stream() + .collect(Collectors.toMap(indexMetadata -> indexMetadata.getIndex().getName(), Function.identity())); ClusterHealthResponse healthResponse = extractResponse(responses, ClusterHealthResponse.class); Map indicesHealths = healthResponse.getIndices(); diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/existence/FrozenExistenceDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/existence/FrozenExistenceDeciderService.java index dd534dfd40915..6b1035c9b7b23 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/existence/FrozenExistenceDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/existence/FrozenExistenceDeciderService.java @@ -25,7 +25,6 @@ import java.util.List; import java.util.Objects; import java.util.stream.Collectors; -import java.util.stream.StreamSupport; /** * This decider looks at all indices and ensures a minimum capacity is available if any indices are in the frozen ILM phase, since that @@ -45,7 +44,9 @@ public String name() { @Override public AutoscalingDeciderResult scale(Settings configuration, AutoscalingDeciderContext context) { - List indicesNeedingFrozen = StreamSupport.stream(context.state().metadata().spliterator(), false) + List indicesNeedingFrozen = context.state() + .metadata() + .stream() .filter(this::needsTier) .map(imd -> imd.getIndex().getName()) .limit(10) diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/shards/FrozenShardsDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/shards/FrozenShardsDeciderService.java index 9d405593bb4bb..18044de7b5954 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/shards/FrozenShardsDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/shards/FrozenShardsDeciderService.java @@ -25,7 +25,6 @@ import java.io.IOException; import java.util.List; import java.util.Objects; -import java.util.stream.StreamSupport; /** * This decider enforces that on a 64GB memory node (31GB heap) we can max have 2000 shards. We arrive at 2000 because our current limit is @@ -58,7 +57,7 @@ public AutoscalingDeciderResult scale(Settings configuration, AutoscalingDecider } static int countFrozenShards(Metadata metadata) { - return StreamSupport.stream(metadata.spliterator(), false) + return metadata.stream() .filter(imd -> FrozenUtils.isFrozenIndex(imd.getSettings())) .mapToInt(IndexMetadata::getTotalNumberOfShards) .sum(); diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderService.java index dbaa812c5eafc..5c96e9029c530 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderService.java @@ -26,7 +26,6 @@ import java.io.IOException; import java.util.List; import java.util.Objects; -import java.util.stream.StreamSupport; public class FrozenStorageDeciderService implements AutoscalingDeciderService { public static final String NAME = "frozen_storage"; @@ -42,7 +41,7 @@ public String name() { @Override public AutoscalingDeciderResult scale(Settings configuration, AutoscalingDeciderContext context) { Metadata metadata = context.state().metadata(); - long dataSetSize = StreamSupport.stream(metadata.spliterator(), false) + long dataSetSize = metadata.stream() .filter(imd -> FrozenUtils.isFrozenIndex(imd.getSettings())) .mapToLong(imd -> estimateSize(imd, context.info())) .sum(); diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java index 29a05f4607f98..41810789e0646 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgrader.java @@ -24,7 +24,6 @@ import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicBoolean; -import java.util.stream.StreamSupport; /** * This class upgrades frozen indices to apply the index.shard_limit.group=frozen setting after all nodes have been upgraded to 7.13+ @@ -91,7 +90,8 @@ public void onFailure(Exception e) { } static boolean needsUpgrade(ClusterState state) { - return StreamSupport.stream(state.metadata().spliterator(), false) + return state.metadata() + .stream() .filter( imd -> imd.getCompatibilityVersion().onOrAfter(Version.V_7_12_0) && imd.getCompatibilityVersion().before(Version.V_8_0_0) ) @@ -105,7 +105,8 @@ static ClusterState upgradeIndices(ClusterState currentState) { return currentState; } Metadata.Builder builder = Metadata.builder(currentState.metadata()); - StreamSupport.stream(currentState.metadata().spliterator(), false) + currentState.metadata() + .stream() .filter( imd -> imd.getCompatibilityVersion().onOrAfter(Version.V_7_12_0) && imd.getCompatibilityVersion().before(Version.V_8_0_0) ) diff --git a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java index 6571f46cb1fd3..5430d039c3eb8 100644 --- a/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java +++ b/x-pack/plugin/searchable-snapshots/src/test/java/org/elasticsearch/xpack/searchablesnapshots/upgrade/SearchableSnapshotIndexMetadataUpgraderTests.java @@ -19,8 +19,6 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; -import java.util.stream.StreamSupport; - import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -71,7 +69,7 @@ public void testUpgradeIndices() { assertThat(upgradedState, not(sameInstance(originalState))); assertThat(upgradedState.metadata().indices().size(), equalTo(originalState.metadata().indices().size())); - assertTrue(StreamSupport.stream(upgradedState.metadata().spliterator(), false).anyMatch(upgraded -> { + assertTrue(upgradedState.metadata().stream().anyMatch(upgraded -> { IndexMetadata original = originalState.metadata().index(upgraded.getIndex()); assertThat(original, notNullValue()); if (upgraded.isPartialSearchableSnapshot() == false From f9a64b2e8685a95fe27b89daefbe898668298b13 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Thu, 17 Feb 2022 08:24:06 -0500 Subject: [PATCH 155/167] [DOCS] Fix `ignore_unavailable` parameter definition (#84071) The current `ignore_unavailable` definition is a bit misleading. The parameter primarily determines if a request that targets a missing or closed index returns an error. --- docs/reference/eql/eql-search-api.asciidoc | 4 ++-- docs/reference/rest-api/common-parms.asciidoc | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/reference/eql/eql-search-api.asciidoc b/docs/reference/eql/eql-search-api.asciidoc index 29c43271f851d..93084cad2f9e3 100644 --- a/docs/reference/eql/eql-search-api.asciidoc +++ b/docs/reference/eql/eql-search-api.asciidoc @@ -109,8 +109,8 @@ Comma-separated list of filters for the API response. See <>. `ignore_unavailable`:: -(Optional, Boolean) If `true`, missing or closed indices are not included in the -response. Defaults to `true`. +(Optional, Boolean) If `false`, the request returns an error if it targets a +missing or closed index. Defaults to `true`. `keep_alive`:: + diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 9e01f7d5c6343..c750abb7c3ba1 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -398,8 +398,8 @@ end::ignore_throttled[] tag::index-ignore-unavailable[] `ignore_unavailable`:: -(Optional, Boolean) If `true`, missing or closed indices are not included in the -response. Defaults to `false`. +(Optional, Boolean) If `false`, the request returns an error if it targets a +missing or closed index. Defaults to `false`. end::index-ignore-unavailable[] tag::include-defaults[] From ae7defa9f9578f6872a3a8cdd33ffd39331ef881 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 17 Feb 2022 15:16:26 +0100 Subject: [PATCH 156/167] Allow regular data streams to be migrated to tsdb data streams. (#83843) A regular data stream can be migrated to a tsdb data stream if in template that created the data stream, the `index_mode` field is set to `time_series` and the data stream's `index_mode` property is either not specified or set to `standard`. Then on the next rollover the data stream is migrated to be a tsdb data stream. When that happens the data stream's `index_mode` property is set to `time_series` and the new backing index's `index.mode` index setting is also set to `time_series`. Closes #83520 --- .../datastreams/TsdbDataStreamRestIT.java | 131 +++++++++++++ .../DataStreamIndexSettingsProvider.java | 12 +- ...etadataDataStreamRolloverServiceTests.java | 183 +++++++++++++++++- .../rollover/MetadataRolloverService.java | 4 +- .../cluster/metadata/DataStream.java | 49 +++-- .../cluster/metadata/DataStreamTests.java | 61 +++++- .../metadata/DataStreamTestHelper.java | 4 + .../ReactiveStorageDeciderService.java | 2 +- 8 files changed, 425 insertions(+), 21 deletions(-) diff --git a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/TsdbDataStreamRestIT.java b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/TsdbDataStreamRestIT.java index 14ea7ddc3793d..b3a1629176770 100644 --- a/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/TsdbDataStreamRestIT.java +++ b/modules/data-streams/src/javaRestTest/java/org/elasticsearch/datastreams/TsdbDataStreamRestIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.datastreams; import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.FormatNames; import org.elasticsearch.test.rest.ESRestTestCase; @@ -15,16 +16,19 @@ import java.io.IOException; import java.time.Instant; +import java.time.temporal.ChronoUnit; import java.util.Map; import static org.elasticsearch.cluster.metadata.DataStreamTestHelper.backingIndexEqualTo; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; public class TsdbDataStreamRestIT extends ESRestTestCase { @@ -84,6 +88,57 @@ public class TsdbDataStreamRestIT extends ESRestTestCase { } }"""; + private static final String NON_TSDB_TEMPLATE = """ + { + "index_patterns": ["k8s*"], + "template": { + "settings":{ + "index": { + "number_of_replicas": 0, + "number_of_shards": 2 + } + }, + "mappings":{ + "properties": { + "@timestamp" : { + "type": "date" + }, + "metricset": { + "type": "keyword" + }, + "k8s": { + "properties": { + "pod": { + "properties": { + "uid": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "ip": { + "type": "ip" + }, + "network": { + "properties": { + "tx": { + "type": "long" + }, + "rx": { + "type": "long" + } + } + } + } + } + } + } + } + } + }, + "data_stream": {} + }"""; + private static final String DOC = """ { "@timestamp": "$time", @@ -235,6 +290,82 @@ public void testSubsequentRollovers() throws Exception { } } + public void testMigrateRegularDataStreamToTsdbDataStream() throws Exception { + // Create a non tsdb template + var putComposableIndexTemplateRequest = new Request("POST", "/_index_template/1"); + putComposableIndexTemplateRequest.setJsonEntity(NON_TSDB_TEMPLATE); + assertOK(client().performRequest(putComposableIndexTemplateRequest)); + + // Index a few docs and sometimes rollover + int numRollovers = 4; + int numDocs = 32; + var currentTime = Instant.now(); + var currentMinus30Days = currentTime.minus(30, ChronoUnit.DAYS); + for (int i = 0; i < numRollovers; i++) { + for (int j = 0; j < numDocs; j++) { + var indexRequest = new Request("POST", "/k8s/_doc"); + var time = Instant.ofEpochMilli(randomLongBetween(currentMinus30Days.toEpochMilli(), currentTime.toEpochMilli())); + indexRequest.setJsonEntity(DOC.replace("$time", formatInstant(time))); + var response = client().performRequest(indexRequest); + assertOK(response); + var responseBody = entityAsMap(response); + // i rollovers and +1 offset: + assertThat((String) responseBody.get("_index"), backingIndexEqualTo("k8s", i + 1)); + } + var rolloverRequest = new Request("POST", "/k8s/_rollover"); + var rolloverResponse = client().performRequest(rolloverRequest); + assertOK(rolloverResponse); + var rolloverResponseBody = entityAsMap(rolloverResponse); + assertThat(rolloverResponseBody.get("rolled_over"), is(true)); + } + + var getDataStreamsRequest = new Request("GET", "/_data_stream"); + var getDataStreamResponse = client().performRequest(getDataStreamsRequest); + assertOK(getDataStreamResponse); + var dataStreams = entityAsMap(getDataStreamResponse); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.name"), equalTo("k8s")); + assertThat(ObjectPath.evaluate(dataStreams, "data_streams.0.generation"), equalTo(5)); + for (int i = 0; i < 5; i++) { + String backingIndex = ObjectPath.evaluate(dataStreams, "data_streams.0.indices." + i + ".index_name"); + assertThat(backingIndex, backingIndexEqualTo("k8s", i + 1)); + var indices = getIndex(backingIndex); + var escapedBackingIndex = backingIndex.replace(".", "\\."); + assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".data_stream"), equalTo("k8s")); + assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.mode"), nullValue()); + assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.start_time"), nullValue()); + assertThat(ObjectPath.evaluate(indices, escapedBackingIndex + ".settings.index.time_series.end_time"), nullValue()); + } + + // Update template + putComposableIndexTemplateRequest = new Request("POST", "/_index_template/1"); + putComposableIndexTemplateRequest.setJsonEntity(TEMPLATE); + assertOK(client().performRequest(putComposableIndexTemplateRequest)); + + var rolloverRequest = new Request("POST", "/k8s/_rollover"); + var rolloverResponse = client().performRequest(rolloverRequest); + assertOK(rolloverResponse); + var rolloverResponseBody = entityAsMap(rolloverResponse); + assertThat(rolloverResponseBody.get("rolled_over"), is(true)); + var newIndex = (String) rolloverResponseBody.get("new_index"); + assertThat(newIndex, backingIndexEqualTo("k8s", 6)); + + // Ingest documents that will land in the new tsdb backing index: + for (int i = 0; i < numDocs; i++) { + var indexRequest = new Request("POST", "/k8s/_doc"); + indexRequest.setJsonEntity(DOC.replace("$time", formatInstant(currentTime))); + var response = client().performRequest(indexRequest); + assertOK(response); + var responseBody = entityAsMap(response); + assertThat((String) responseBody.get("_index"), backingIndexEqualTo("k8s", 6)); + } + + // Fail if documents target older non tsdb backing index: + var indexRequest = new Request("POST", "/k8s/_doc"); + indexRequest.setJsonEntity(DOC.replace("$time", formatInstant(currentMinus30Days))); + var e = expectThrows(ResponseException.class, () -> client().performRequest(indexRequest)); + assertThat(e.getMessage(), containsString("is outside of ranges of currently writable indices")); + } + private static Map getIndex(String indexName) throws IOException { var getIndexRequest = new Request("GET", "/" + indexName + "?human"); var response = client().performRequest(getIndexRequest); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java index 41126f6d9f0df..531dcc1ac9a19 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java @@ -36,8 +36,16 @@ public Settings getAdditionalIndexSettings( ) { if (dataStreamName != null) { DataStream dataStream = metadata.dataStreams().get(dataStreamName); + // First backing index is created and then data stream is rolled over (in a single cluster state update). + // So at this point we can't check index_mode==time_series, + // so checking that index_mode==null|standard and templateIndexMode == TIME_SERIES + boolean migrating = dataStream != null + && (dataStream.getIndexMode() == null || dataStream.getIndexMode() == IndexMode.STANDARD) + && templateIndexMode == IndexMode.TIME_SERIES; IndexMode indexMode; - if (dataStream != null) { + if (migrating) { + indexMode = IndexMode.TIME_SERIES; + } else if (dataStream != null) { indexMode = dataStream.getIndexMode(); } else { indexMode = templateIndexMode; @@ -50,7 +58,7 @@ public Settings getAdditionalIndexSettings( TimeValue lookAheadTime = IndexSettings.LOOK_AHEAD_TIME.get(allSettings); final Instant start; final Instant end; - if (dataStream == null) { + if (dataStream == null || migrating) { start = resolvedAt.minusMillis(lookAheadTime.getMillis()); end = resolvedAt.plusMillis(lookAheadTime.getMillis()); } else { diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java index 84c897f4717be..d7d2652481534 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/MetadataDataStreamRolloverServiceTests.java @@ -44,6 +44,7 @@ import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; public class MetadataDataStreamRolloverServiceTests extends ESTestCase { @@ -63,7 +64,7 @@ public void testRolloverClusterStateForDataStream() throws Exception { IndexMode.TIME_SERIES ); ComposableIndexTemplate template = new ComposableIndexTemplate.Builder().indexPatterns(List.of(dataStream.getName() + "*")) - .template(new Template(Settings.builder().put("index.mode", "time_series").build(), null, null)) + .template(new Template(Settings.builder().put("index.routing_path", "uid").build(), null, null)) .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, IndexMode.TIME_SERIES)) .build(); Metadata.Builder builder = Metadata.builder(); @@ -75,6 +76,7 @@ public void testRolloverClusterStateForDataStream() throws Exception { .put("index.hidden", true) .put(SETTING_INDEX_UUID, dataStream.getWriteIndex().getUUID()) .put("index.mode", "time_series") + .put("index.routing_path", "uid") .put("index.time_series.start_time", FORMATTER.format(now.minus(4, ChronoUnit.HOURS))) .put("index.time_series.end_time", FORMATTER.format(now.minus(2, ChronoUnit.HOURS))) ) @@ -144,4 +146,183 @@ public void testRolloverClusterStateForDataStream() throws Exception { } } + public void testRolloverAndMigrateDataStream() throws Exception { + Instant now = Instant.now().truncatedTo(ChronoUnit.MILLIS); + String dataStreamName = "logs-my-app"; + IndexMode dsIndexMode = randomBoolean() ? null : IndexMode.STANDARD; + final DataStream dataStream = new DataStream( + dataStreamName, + new DataStream.TimestampField("@timestamp"), + List.of(new Index(DataStream.getDefaultBackingIndexName(dataStreamName, 1, now.toEpochMilli()), "uuid")), + 1, + null, + false, + false, + false, + false, + dsIndexMode + ); + ComposableIndexTemplate template = new ComposableIndexTemplate.Builder().indexPatterns(List.of(dataStream.getName() + "*")) + .template(new Template(Settings.builder().put("index.routing_path", "uid").build(), null, null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, IndexMode.TIME_SERIES)) + .build(); + Metadata.Builder builder = Metadata.builder(); + builder.put("template", template); + Settings.Builder indexSettings = ESTestCase.settings(Version.CURRENT) + .put("index.hidden", true) + .put(SETTING_INDEX_UUID, dataStream.getWriteIndex().getUUID()); + if (dsIndexMode != null) { + indexSettings.put("index.mode", dsIndexMode.getName()); + } + builder.put( + IndexMetadata.builder(dataStream.getWriteIndex().getName()).settings(indexSettings).numberOfShards(1).numberOfReplicas(0) + ); + builder.put(dataStream); + final ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metadata(builder).build(); + + ThreadPool testThreadPool = new TestThreadPool(getTestName()); + try { + MetadataRolloverService rolloverService = DataStreamTestHelper.getMetadataRolloverService( + dataStream, + testThreadPool, + Set.of(new DataStreamIndexSettingsProvider()), + xContentRegistry() + ); + MaxDocsCondition condition = new MaxDocsCondition(randomNonNegativeLong()); + List> metConditions = Collections.singletonList(condition); + CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_"); + + MetadataRolloverService.RolloverResult rolloverResult = rolloverService.rolloverClusterState( + clusterState, + dataStream.getName(), + null, + createIndexRequest, + metConditions, + now, + randomBoolean(), + false + ); + + String sourceIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration()); + String newIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration() + 1); + assertEquals(sourceIndexName, rolloverResult.sourceIndexName()); + assertEquals(newIndexName, rolloverResult.rolloverIndexName()); + Metadata rolloverMetadata = rolloverResult.clusterState().metadata(); + assertEquals(dataStream.getIndices().size() + 1, rolloverMetadata.indices().size()); + + // Assert data stream's index_mode has been changed to time_series. + assertThat(rolloverMetadata.dataStreams().get(dataStreamName), notNullValue()); + assertThat(rolloverMetadata.dataStreams().get(dataStreamName).getIndexMode(), equalTo(IndexMode.TIME_SERIES)); + + // Nothing changed for the original backing index: + IndexMetadata im = rolloverMetadata.index(rolloverMetadata.dataStreams().get(dataStreamName).getIndices().get(0)); + assertThat(IndexSettings.MODE.get(im.getSettings()), equalTo(IndexMode.STANDARD)); + assertThat(IndexSettings.TIME_SERIES_START_TIME.exists(im.getSettings()), is(false)); + assertThat(IndexSettings.TIME_SERIES_END_TIME.exists(im.getSettings()), is(false)); + // New backing index is a tsdb index: + im = rolloverMetadata.index(rolloverMetadata.dataStreams().get(dataStreamName).getIndices().get(1)); + assertThat(IndexSettings.MODE.get(im.getSettings()), equalTo(IndexMode.TIME_SERIES)); + Instant startTime = IndexSettings.TIME_SERIES_START_TIME.get(im.getSettings()); + Instant endTime = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings()); + assertThat(startTime.isBefore(endTime), is(true)); + assertThat(startTime, equalTo(now.minus(2, ChronoUnit.HOURS))); + assertThat(endTime, equalTo(now.plus(2, ChronoUnit.HOURS))); + } finally { + testThreadPool.shutdown(); + } + } + + public void testChangingIndexModeFromTimeSeriesToSomethingElseNoEffectOnExistingDataStreams() throws Exception { + Instant now = Instant.now().truncatedTo(ChronoUnit.MILLIS); + String dataStreamName = "logs-my-app"; + final DataStream dataStream = new DataStream( + dataStreamName, + new DataStream.TimestampField("@timestamp"), + List.of(new Index(DataStream.getDefaultBackingIndexName(dataStreamName, 1, now.toEpochMilli()), "uuid")), + 1, + null, + false, + false, + false, + false, + IndexMode.TIME_SERIES + ); + ComposableIndexTemplate template = new ComposableIndexTemplate.Builder().indexPatterns(List.of(dataStream.getName() + "*")) + .template(new Template(Settings.builder().put("index.routing_path", "uid").build(), null, null)) + .dataStreamTemplate(new ComposableIndexTemplate.DataStreamTemplate(false, false, randomBoolean() ? IndexMode.STANDARD : null)) + .build(); + Metadata.Builder builder = Metadata.builder(); + builder.put("template", template); + builder.put( + IndexMetadata.builder(dataStream.getWriteIndex().getName()) + .settings( + ESTestCase.settings(Version.CURRENT) + .put("index.hidden", true) + .put(SETTING_INDEX_UUID, dataStream.getWriteIndex().getUUID()) + .put("index.mode", "time_series") + .put("index.routing_path", "uid") + .put("index.time_series.start_time", FORMATTER.format(now.minus(4, ChronoUnit.HOURS))) + .put("index.time_series.end_time", FORMATTER.format(now.minus(2, ChronoUnit.HOURS))) + ) + .numberOfShards(1) + .numberOfReplicas(0) + ); + builder.put(dataStream); + final ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metadata(builder).build(); + + ThreadPool testThreadPool = new TestThreadPool(getTestName()); + try { + MetadataRolloverService rolloverService = DataStreamTestHelper.getMetadataRolloverService( + dataStream, + testThreadPool, + Set.of(new DataStreamIndexSettingsProvider()), + xContentRegistry() + ); + MaxDocsCondition condition = new MaxDocsCondition(randomNonNegativeLong()); + List> metConditions = Collections.singletonList(condition); + CreateIndexRequest createIndexRequest = new CreateIndexRequest("_na_"); + + MetadataRolloverService.RolloverResult rolloverResult = rolloverService.rolloverClusterState( + clusterState, + dataStream.getName(), + null, + createIndexRequest, + metConditions, + now, + randomBoolean(), + false + ); + + String sourceIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration()); + String newIndexName = DataStream.getDefaultBackingIndexName(dataStream.getName(), dataStream.getGeneration() + 1); + assertEquals(sourceIndexName, rolloverResult.sourceIndexName()); + assertEquals(newIndexName, rolloverResult.rolloverIndexName()); + Metadata rolloverMetadata = rolloverResult.clusterState().metadata(); + assertEquals(dataStream.getIndices().size() + 1, rolloverMetadata.indices().size()); + + // Assert data stream's index_mode remains time_series. + assertThat(rolloverMetadata.dataStreams().get(dataStreamName), notNullValue()); + assertThat(rolloverMetadata.dataStreams().get(dataStreamName).getIndexMode(), equalTo(IndexMode.TIME_SERIES)); + + // Nothing changed for the original tsdb backing index: + IndexMetadata im = rolloverMetadata.index(rolloverMetadata.dataStreams().get(dataStreamName).getIndices().get(0)); + assertThat(IndexSettings.MODE.exists(im.getSettings()), is(true)); + Instant startTime = IndexSettings.TIME_SERIES_START_TIME.get(im.getSettings()); + Instant endTime = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings()); + assertThat(startTime.isBefore(endTime), is(true)); + assertThat(startTime, equalTo(now.minus(4, ChronoUnit.HOURS))); + assertThat(endTime, equalTo(now.minus(2, ChronoUnit.HOURS))); + // New backing index is also a tsdb index: + im = rolloverMetadata.index(rolloverMetadata.dataStreams().get(dataStreamName).getIndices().get(1)); + assertThat(IndexSettings.MODE.get(im.getSettings()), equalTo(IndexMode.TIME_SERIES)); + startTime = IndexSettings.TIME_SERIES_START_TIME.get(im.getSettings()); + endTime = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings()); + assertThat(startTime.isBefore(endTime), is(true)); + assertThat(startTime, equalTo(now.minus(2, ChronoUnit.HOURS))); + assertThat(endTime, equalTo(now.plus(2, ChronoUnit.HOURS))); + } finally { + testThreadPool.shutdown(); + } + } + } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java index a0b0f7341ce10..cdea744b6fec1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -278,7 +278,9 @@ private RolloverResult rolloverDataStream( currentState, createIndexClusterStateRequest, silent, - (builder, indexMetadata) -> builder.put(ds.rollover(indexMetadata.getIndex(), newGeneration)) + (builder, indexMetadata) -> builder.put( + ds.rollover(indexMetadata.getIndex(), newGeneration, templateV2.getDataStreamTemplate().getIndexMode()) + ) ); RolloverInfo rolloverInfo = new RolloverInfo(dataStreamName, metConditions, threadPool.absoluteTimeInMillis()); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 3e1ef209c3ea6..f16812a1570ef 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -171,8 +171,14 @@ public Index selectTimeSeriesWriteIndex(Instant timestamp, Metadata metadata) { Index index = indices.get(i); IndexMetadata im = metadata.index(index); - // TODO: make start and end time fields in IndexMetadata class. + // TODO: make index_mode, start and end time fields in IndexMetadata class. // (this to avoid the overhead that occurs when reading a setting) + if (IndexSettings.MODE.get(im.getSettings()) != IndexMode.TIME_SERIES) { + // Not a tsdb backing index, so skip. + // (This can happen is this is a migrated tsdb data stream) + continue; + } + Instant start = IndexSettings.TIME_SERIES_START_TIME.get(im.getSettings()); Instant end = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings()); // Check should be in sync with DataStreamTimestampFieldMapper#validateTimestamp(...) method @@ -192,12 +198,19 @@ public Index selectTimeSeriesWriteIndex(Instant timestamp, Metadata metadata) { public void validate(Function imSupplier) { if (indexMode == IndexMode.TIME_SERIES) { // Get a sorted overview of each backing index with there start and end time range: - var startAndEndTimes = indices.stream().map(index -> imSupplier.apply(index.getName())).map(im -> { - Instant start = IndexSettings.TIME_SERIES_START_TIME.get(im.getSettings()); - Instant end = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings()); - assert end.isAfter(start); // This is also validated by TIME_SERIES_END_TIME setting. - return new Tuple<>(im.getIndex().getName(), new Tuple<>(start, end)); - }) + var startAndEndTimes = indices.stream() + .map(index -> imSupplier.apply(index.getName())) + .filter( + // Migrated tsdb data streams have non tsdb backing indices: + im -> IndexSettings.TIME_SERIES_START_TIME.exists(im.getSettings()) + && IndexSettings.TIME_SERIES_END_TIME.exists(im.getSettings()) + ) + .map(im -> { + Instant start = IndexSettings.TIME_SERIES_START_TIME.get(im.getSettings()); + Instant end = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings()); + assert end.isAfter(start); // This is also validated by TIME_SERIES_END_TIME setting. + return new Tuple<>(im.getIndex().getName(), new Tuple<>(start, end)); + }) .sorted(Comparator.comparing(entry -> entry.v2().v1())) // Sort by start time .collect(Collectors.toList()); @@ -265,21 +278,29 @@ public IndexMode getIndexMode() { * Performs a rollover on a {@code DataStream} instance and returns a new instance containing * the updated list of backing indices and incremented generation. * - * @param writeIndex new write index - * @param generation new generation + * @param writeIndex new write index + * @param generation new generation + * @param indexModeFromTemplate the index mode as is defined in the template that created this data stream * * @return new {@code DataStream} instance with the rollover operation applied */ - public DataStream rollover(Index writeIndex, long generation) { + public DataStream rollover(Index writeIndex, long generation, IndexMode indexModeFromTemplate) { ensureNotReplicated(); - return unsafeRollover(writeIndex, generation); + return unsafeRollover(writeIndex, generation, indexModeFromTemplate); } /** - * Like {@link #rollover(Index, long)}, but does no validation, use with care only. + * Like {@link #rollover(Index, long, IndexMode)}, but does no validation, use with care only. */ - public DataStream unsafeRollover(Index writeIndex, long generation) { + public DataStream unsafeRollover(Index writeIndex, long generation, IndexMode indexModeFromTemplate) { + IndexMode indexMode = this.indexMode; + // This allows for migrating a data stream to be a tsdb data stream: + // (only if index_mode=null|standard then allow it to be set to time_series) + if ((indexMode == null || indexMode == IndexMode.STANDARD) && indexModeFromTemplate == IndexMode.TIME_SERIES) { + indexMode = IndexMode.TIME_SERIES; + } + List backingIndices = new ArrayList<>(indices); backingIndices.add(writeIndex); return new DataStream( @@ -298,7 +319,7 @@ public DataStream unsafeRollover(Index writeIndex, long generation) { /** * Performs a dummy rollover on a {@code DataStream} instance and returns the tuple of the next write index name and next generation - * that this {@code DataStream} should roll over to using {@link #rollover(Index, long)}. + * that this {@code DataStream} should roll over to using {@link #rollover(Index, long, IndexMode)}. * * @param clusterMetadata Cluster metadata * diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index 472d69b19c619..63b73cbaa77cd 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.test.AbstractSerializingTestCase; @@ -60,7 +61,7 @@ protected DataStream createTestInstance() { public void testRollover() { DataStream ds = DataStreamTestHelper.randomInstance().promoteDataStream(); Tuple newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); - final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2()); + final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), null); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getTimeStampField(), equalTo(ds.getTimeStampField())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); @@ -86,13 +87,69 @@ public void testRolloverWithConflictingBackingIndexName() { } final Tuple newCoordinates = ds.nextWriteIndexAndGeneration(builder.build()); - final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2()); + final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), null); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getTimeStampField(), equalTo(ds.getTimeStampField())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + numConflictingIndices + 1)); assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); + assertThat(rolledDs.getIndexMode(), equalTo(ds.getIndexMode())); + } + + public void testRolloverIndexMode() { + IndexMode indexMode = randomBoolean() ? IndexMode.STANDARD : null; + DataStream ds = DataStreamTestHelper.randomInstance().promoteDataStream(); + // Unsure index_mode=null + ds = new DataStream( + ds.getName(), + ds.getTimeStampField(), + ds.getIndices(), + ds.getGeneration(), + ds.getMetadata(), + ds.isHidden(), + ds.isReplicated(), + ds.isSystem(), + ds.isAllowCustomRouting(), + indexMode + ); + var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); + + var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), IndexMode.TIME_SERIES); + assertThat(rolledDs.getName(), equalTo(ds.getName())); + assertThat(rolledDs.getTimeStampField(), equalTo(ds.getTimeStampField())); + assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); + assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); + assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); + assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); + assertThat(rolledDs.getIndexMode(), equalTo(IndexMode.TIME_SERIES)); + } + + public void testRolloverIndexMode_keepIndexMode() { + DataStream ds = DataStreamTestHelper.randomInstance().promoteDataStream(); + ds = new DataStream( + ds.getName(), + ds.getTimeStampField(), + ds.getIndices(), + ds.getGeneration(), + ds.getMetadata(), + ds.isHidden(), + ds.isReplicated(), + ds.isSystem(), + ds.isAllowCustomRouting(), + IndexMode.TIME_SERIES + ); + var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA); + + IndexMode indexMode = randomBoolean() ? IndexMode.STANDARD : null; + var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), indexMode); + assertThat(rolledDs.getName(), equalTo(ds.getName())); + assertThat(rolledDs.getTimeStampField(), equalTo(ds.getTimeStampField())); + assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); + assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size() + 1)); + assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); + assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); + assertThat(rolledDs.getIndexMode(), equalTo(IndexMode.TIME_SERIES)); } public void testRemoveBackingIndex() { diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java index 584a2a130afac..3e81b71292390 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/metadata/DataStreamTestHelper.java @@ -289,6 +289,10 @@ public static ClusterState getClusterStateWithDataStreams( boolean replicated ) { Metadata.Builder builder = Metadata.builder(); + builder.put( + "template_1", + new ComposableIndexTemplate(List.of("*"), null, null, null, null, null, new ComposableIndexTemplate.DataStreamTemplate()) + ); List allIndices = new ArrayList<>(); for (Tuple dsTuple : dataStreams) { diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java index 9a10b5a98952c..1989fece08243 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java @@ -558,7 +558,7 @@ private SingleForecast forecast(Metadata metadata, IndexAbstraction.DataStream s for (int i = 0; i < numberNewIndices; ++i) { final String uuid = UUIDs.randomBase64UUID(); final Tuple rolledDataStreamInfo = dataStream.unsafeNextWriteIndexAndGeneration(state.metadata()); - dataStream = dataStream.unsafeRollover(new Index(rolledDataStreamInfo.v1(), uuid), rolledDataStreamInfo.v2()); + dataStream = dataStream.unsafeRollover(new Index(rolledDataStreamInfo.v1(), uuid), rolledDataStreamInfo.v2(), null); // this unintentionally copies the in-sync allocation ids too. This has the fortunate effect of these indices // not being regarded new by the disk threshold decider, thereby respecting the low watermark threshold even for primaries. From 8ff13361276416cc1d6de43e047902ce227bc6aa Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Thu, 17 Feb 2022 09:20:17 -0500 Subject: [PATCH 157/167] [DOCS] Remove note about partial response from Bulk API docs (#84053) The bulk API response with a `200 OK` HTTP status always returns an entry for each action in the request. Partial responses aren't applicable. --- docs/reference/docs/bulk.asciidoc | 8 -------- docs/reference/docs/data-replication.asciidoc | 1 - 2 files changed, 9 deletions(-) diff --git a/docs/reference/docs/bulk.asciidoc b/docs/reference/docs/bulk.asciidoc index 8a06f641d0dd2..0b8ecdb3bd9f7 100644 --- a/docs/reference/docs/bulk.asciidoc +++ b/docs/reference/docs/bulk.asciidoc @@ -351,14 +351,6 @@ The bulk API's response contains the individual results of each operation in the request, returned in the order submitted. The success or failure of an individual operation does not affect other operations in the request. -[[bulk-partial-responses]] -.Partial responses -**** -To ensure fast responses, the bulk API will respond with partial results if one -or more shards fail. See <> for more -information. -**** - `took`:: (integer) How long, in milliseconds, it took to process the bulk request. diff --git a/docs/reference/docs/data-replication.asciidoc b/docs/reference/docs/data-replication.asciidoc index d9cc3c6ab0224..9503b6b6bb29d 100644 --- a/docs/reference/docs/data-replication.asciidoc +++ b/docs/reference/docs/data-replication.asciidoc @@ -126,7 +126,6 @@ respond with partial results if one or more shards fail: * <> * <> -* <> * <> Responses containing partial results still provide a `200 OK` HTTP status code. From c84c7d484e40925c82f0c3bca3a387cf2ff35125 Mon Sep 17 00:00:00 2001 From: weizijun Date: Thu, 17 Feb 2022 22:39:57 +0800 Subject: [PATCH 158/167] TSDB: Reject the nested object fields that are configured time_series_dimension (#83920) At the moment we really don't know what configuring a `time_series_dimension` should *do* when there are nested documents. So, for now, we're going to disable it. One day when someone has a good idea of how it should work we can build that. But for now we don't want to guess wrong and then lock us into some annoying behavior that no one needs but we have to support for backwards compatibility reasons. Closes: #83915 --- docs/changelog/83920.yaml | 5 ++ ...dimension_and_metric_in_non_tsdb_index.yml | 18 ++----- .../rest-api-spec/test/tsdb/20_mapping.yml | 33 ++++++++++++- .../index/mapper/IpFieldMapper.java | 9 ++++ .../index/mapper/KeywordFieldMapper.java | 9 ++++ .../index/mapper/NumberFieldMapper.java | 9 ++++ .../index/mapper/NestedObjectMapperTests.java | 49 +++++++++++++++++++ .../unsignedlong/UnsignedLongFieldMapper.java | 9 ++++ 8 files changed, 126 insertions(+), 15 deletions(-) create mode 100644 docs/changelog/83920.yaml diff --git a/docs/changelog/83920.yaml b/docs/changelog/83920.yaml new file mode 100644 index 0000000000000..95bfc2536fc84 --- /dev/null +++ b/docs/changelog/83920.yaml @@ -0,0 +1,5 @@ +pr: 83920 +summary: "TSDB: Reject the nested object fields that are configured time_series_dimension" +area: TSDB +type: enhancement +issues: [] diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml index 04b2df3359068..4b6a376637617 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/05_dimension_and_metric_in_non_tsdb_index.yml @@ -216,12 +216,13 @@ no _tsid in standard indices: - is_false: fields._tsid # _tsid metadata field must not exist in non-time-series indices --- -nested dimensions: +no nested dimensions: - skip: - version: all - reason: Awaits fix https://github.com/elastic/elasticsearch/issues/83915 + version: " - 8.1.99" + reason: introduced in 8.2.0 - do: + catch: /time_series_dimension can't be configured in nested field \[nested.dim\]/ indices.create: index: test body: @@ -235,14 +236,3 @@ nested dimensions: dim: type: keyword time_series_dimension: true - - - do: - index: - index: test - refresh: true - body: - "@timestamp": "2021-04-28T18:35:24.467Z" - nested: - - dim: foo - - dim: bar - - dim: baz diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml index 3774526653b03..59f480e42c98a 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/20_mapping.yml @@ -312,7 +312,7 @@ nested dimensions: reason: message changed in 8.2.0 - do: - catch: /cannot have nested fields when index is in \[index.mode=time_series\]/ + catch: /time_series_dimension can't be configured in nested field \[nested.dim\]/ indices.create: index: test body: @@ -333,3 +333,34 @@ nested dimensions: dim: type: keyword time_series_dimension: true + +--- +nested fields: + - skip: + version: " - 8.1.99" + reason: message changed in 8.2.0 + + - do: + catch: /cannot have nested fields when index is in \[index.mode=time_series\]/ + indices.create: + index: test + body: + settings: + index: + mode: time_series + routing_path: [dim] + time_series: + start_time: 2021-04-28T00:00:00Z + end_time: 2021-04-29T00:00:00Z + mappings: + properties: + "@timestamp": + type: date + dim: + type: keyword + time_series_dimension: true + nested: + type: nested + properties: + foo: + type: keyword diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 901f0bda6801f..ab1b580d7a5be 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -520,4 +520,13 @@ protected void indexScriptValues( public FieldMapper.Builder getMergeBuilder() { return new Builder(simpleName(), scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion).dimension(dimension).init(this); } + + @Override + public void doValidate(MappingLookup lookup) { + if (dimension && null != lookup.nestedLookup().getNestedParent(name())) { + throw new IllegalArgumentException( + TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]" + ); + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index bdc9976208d4e..b3835364a9e36 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -979,4 +979,13 @@ protected String contentType() { public FieldMapper.Builder getMergeBuilder() { return new Builder(simpleName(), indexAnalyzers, scriptCompiler).dimension(dimension).init(this); } + + @Override + public void doValidate(MappingLookup lookup) { + if (dimension && null != lookup.nestedLookup().getNestedParent(name())) { + throw new IllegalArgumentException( + TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]" + ); + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 6abfd25f194b0..bc6e9fb1f1ef6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -1501,4 +1501,13 @@ public FieldMapper.Builder getMergeBuilder() { .metric(metricType) .init(this); } + + @Override + public void doValidate(MappingLookup lookup) { + if (dimension && null != lookup.nestedLookup().getNestedParent(name())) { + throw new IllegalArgumentException( + TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]" + ); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java index 5355ecb676323..2d5d12dd599b0 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java @@ -25,6 +25,7 @@ import java.io.UncheckedIOException; import java.util.Collection; import java.util.HashSet; +import java.util.List; import java.util.function.Function; import static org.hamcrest.Matchers.containsString; @@ -1380,4 +1381,52 @@ public void testFieldNamesIncludeInRoot() throws Exception { assertThat(doc.docs().get(4).get("_field_names"), nullValue()); } + public void testNoDimensionNestedFields() { + { + Exception e = expectThrows(IllegalArgumentException.class, () -> createDocumentMapper(mapping(b -> { + b.startObject("nested"); + { + b.field("type", "nested"); + b.startObject("properties"); + { + b.startObject("foo") + .field("type", randomFrom(List.of("keyword", "ip", "long", "short", "integer", "byte"))) + .field("time_series_dimension", true) + .endObject(); + } + b.endObject(); + } + b.endObject(); + }))); + assertThat(e.getMessage(), containsString("time_series_dimension can't be configured in nested field [nested.foo]")); + } + + { + Exception e = expectThrows(IllegalArgumentException.class, () -> createDocumentMapper(mapping(b -> { + b.startObject("nested"); + { + b.field("type", "nested"); + b.startObject("properties"); + { + b.startObject("other").field("type", "keyword").endObject(); + b.startObject("object").field("type", "object"); + { + b.startObject("properties"); + { + b.startObject("foo") + .field("type", randomFrom(List.of("keyword", "ip", "long", "short", "integer", "byte"))) + .field("time_series_dimension", true) + .endObject(); + } + b.endObject(); + } + b.endObject(); + } + b.endObject(); + } + b.endObject(); + }))); + assertThat(e.getMessage(), containsString("time_series_dimension can't be configured in nested field [nested.object.foo]")); + } + } } diff --git a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java index 2300644128733..153e3f63dc71a 100644 --- a/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java +++ b/x-pack/plugin/mapper-unsigned-long/src/main/java/org/elasticsearch/xpack/unsignedlong/UnsignedLongFieldMapper.java @@ -29,6 +29,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MappingLookup; import org.elasticsearch.index.mapper.SimpleMappedFieldType; import org.elasticsearch.index.mapper.SourceValueFetcher; import org.elasticsearch.index.mapper.TextSearchInfo; @@ -648,4 +649,12 @@ protected static long sortableSignedLongToUnsigned(long value) { return value ^ MASK_2_63; } + @Override + public void doValidate(MappingLookup lookup) { + if (dimension && null != lookup.nestedLookup().getNestedParent(name())) { + throw new IllegalArgumentException( + TimeSeriesParams.TIME_SERIES_DIMENSION_PARAM + " can't be configured in nested field [" + name() + "]" + ); + } + } } From acf9968f9d397d755d5ad9fcf815cca23bbeda61 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 17 Feb 2022 09:45:25 -0500 Subject: [PATCH 159/167] Shrink join queries in slow log (#83914) This removes the defaults from the slow log for the remaining queries in the `parent-join` module. So it should be easier to read the slow log when it contains these queries. Relates to #76515 --- .../join/query/HasParentQueryBuilder.java | 14 +++++-- .../join/query/ParentIdQueryBuilder.java | 8 ++-- .../query/HasParentQueryBuilderTests.java | 40 ++++++++++++++++++- .../join/query/ParentIdQueryBuilderTests.java | 21 +++++++++- 4 files changed, 73 insertions(+), 10 deletions(-) diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java index c4b6ad3a043da..64c3e627df75f 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java @@ -48,6 +48,8 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder Date: Thu, 17 Feb 2022 10:19:47 -0500 Subject: [PATCH 160/167] Group field caps response by index mapping hash (#83494) This commit utilizes the index mapping hash to share the fields-caps for indices with the same index mapping to reduce the memory usage and the size of transport messages. Closes #78665 Closes #82879 --- docs/changelog/83494.yaml | 7 ++ .../fieldcaps/FieldCapabilitiesFetcher.java | 31 ++++- .../FieldCapabilitiesIndexResponse.java | 109 +++++++++++++++-- .../FieldCapabilitiesNodeResponse.java | 4 +- .../fieldcaps/FieldCapabilitiesResponse.java | 4 +- .../TransportFieldCapabilitiesAction.java | 25 +++- .../FieldCapabilitiesFilterTests.java | 77 ++++++------ .../FieldCapabilitiesIndexResponseTests.java | 114 ++++++++++++++++++ .../FieldCapabilitiesResponseTests.java | 2 +- 9 files changed, 302 insertions(+), 71 deletions(-) create mode 100644 docs/changelog/83494.yaml create mode 100644 server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponseTests.java diff --git a/docs/changelog/83494.yaml b/docs/changelog/83494.yaml new file mode 100644 index 0000000000000..e933ad666860e --- /dev/null +++ b/docs/changelog/83494.yaml @@ -0,0 +1,7 @@ +pr: 83494 +summary: Group field caps response by index mapping hash +area: Search +type: enhancement +issues: + - 78665 + - 82879 diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java index b2815dbb05a09..2926b277ba52e 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFetcher.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.fieldcaps; +import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.MappedFieldType; @@ -37,6 +38,7 @@ */ class FieldCapabilitiesFetcher { private final IndicesService indicesService; + private final Map> indexMappingHashToResponses = new HashMap<>(); FieldCapabilitiesFetcher(IndicesService indicesService) { this.indicesService = indicesService; @@ -65,17 +67,34 @@ FieldCapabilitiesIndexResponse fetch( ); if (canMatchShard(shardId, indexFilter, nowInMillis, searchExecutionContext) == false) { - return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), Collections.emptyMap(), false); + return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), null, Collections.emptyMap(), false); } - Predicate fieldPredicate = indicesService.getFieldFilter().apply(shardId.getIndexName()); + final MappingMetadata mapping = indexService.getMetadata().mapping(); + final String indexMappingHash = mapping != null ? mapping.getSha256() : null; + if (indexMappingHash != null) { + final Map existing = indexMappingHashToResponses.get(indexMappingHash); + if (existing != null) { + return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), indexMappingHash, existing, true); + } + } - return retrieveFieldCaps(shardId.getIndexName(), searchExecutionContext, fieldPatterns, filters, fieldTypes, fieldPredicate); + Predicate fieldPredicate = indicesService.getFieldFilter().apply(shardId.getIndexName()); + final Map responseMap = retrieveFieldCaps( + searchExecutionContext, + fieldPatterns, + filters, + fieldTypes, + fieldPredicate + ); + if (indexMappingHash != null) { + indexMappingHashToResponses.put(indexMappingHash, responseMap); + } + return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), indexMappingHash, responseMap, true); } } - public static FieldCapabilitiesIndexResponse retrieveFieldCaps( - String indexName, + static Map retrieveFieldCaps( SearchExecutionContext context, String[] fieldPatterns, String[] filters, @@ -141,7 +160,7 @@ public static FieldCapabilitiesIndexResponse retrieveFieldCaps( } } } - return new FieldCapabilitiesIndexResponse(indexName, responseMap, true); + return responseMap; } private static boolean checkIncludeParents(String[] filters) { diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java index 397018208202b..3d03cfc92e1e2 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponse.java @@ -9,34 +9,113 @@ package org.elasticsearch.action.fieldcaps; import org.elasticsearch.Version; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.Nullable; import java.io.IOException; +import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +final class FieldCapabilitiesIndexResponse implements Writeable { + private static final Version MAPPING_HASH_VERSION = Version.V_8_2_0; -public class FieldCapabilitiesIndexResponse extends ActionResponse implements Writeable { private final String indexName; + @Nullable + private final String indexMappingHash; private final Map responseMap; private final boolean canMatch; private final transient Version originVersion; - FieldCapabilitiesIndexResponse(String indexName, Map responseMap, boolean canMatch) { + FieldCapabilitiesIndexResponse( + String indexName, + @Nullable String indexMappingHash, + Map responseMap, + boolean canMatch + ) { this.indexName = indexName; + this.indexMappingHash = indexMappingHash; this.responseMap = responseMap; this.canMatch = canMatch; this.originVersion = Version.CURRENT; } FieldCapabilitiesIndexResponse(StreamInput in) throws IOException { - super(in); this.indexName = in.readString(); this.responseMap = in.readMap(StreamInput::readString, IndexFieldCapabilities::new); this.canMatch = in.readBoolean(); this.originVersion = in.getVersion(); + if (in.getVersion().onOrAfter(MAPPING_HASH_VERSION)) { + this.indexMappingHash = in.readOptionalString(); + } else { + this.indexMappingHash = null; + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(indexName); + out.writeMap(responseMap, StreamOutput::writeString, (valueOut, fc) -> fc.writeTo(valueOut)); + out.writeBoolean(canMatch); + if (out.getVersion().onOrAfter(MAPPING_HASH_VERSION)) { + out.writeOptionalString(indexMappingHash); + } + } + + private record GroupByMappingHash(List indices, String indexMappingHash, Map responseMap) + implements + Writeable { + GroupByMappingHash(StreamInput in) throws IOException { + this(in.readStringList(), in.readString(), in.readMap(StreamInput::readString, IndexFieldCapabilities::new)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeStringCollection(indices); + out.writeString(indexMappingHash); + out.writeMap(responseMap, StreamOutput::writeString, (valueOut, fc) -> fc.writeTo(valueOut)); + } + + List getResponses() { + return indices.stream().map(index -> new FieldCapabilitiesIndexResponse(index, indexMappingHash, responseMap, true)).toList(); + } + } + + static List readList(StreamInput input) throws IOException { + if (input.getVersion().before(MAPPING_HASH_VERSION)) { + return input.readList(FieldCapabilitiesIndexResponse::new); + } + final List ungroupedList = input.readList(FieldCapabilitiesIndexResponse::new); + final List groups = input.readList(GroupByMappingHash::new); + return Stream.concat(ungroupedList.stream(), groups.stream().flatMap(g -> g.getResponses().stream())).toList(); + } + + static void writeList(StreamOutput output, List responses) throws IOException { + if (output.getVersion().before(MAPPING_HASH_VERSION)) { + output.writeCollection(responses); + return; + } + final Predicate canGroup = r -> r.canMatch && r.indexMappingHash != null; + final List ungroupedResponses = responses.stream().filter(r -> canGroup.test(r) == false).toList(); + final List groupedResponses = responses.stream() + .filter(canGroup) + .collect(Collectors.groupingBy(r -> r.indexMappingHash)) + .values() + .stream() + .map(rs -> { + final String indexMappingHash = rs.get(0).indexMappingHash; + final Map responseMap = rs.get(0).responseMap; + final List indices = rs.stream().map(r -> r.indexName).toList(); + return new GroupByMappingHash(indices, indexMappingHash, responseMap); + }) + .toList(); + output.writeList(ungroupedResponses); + output.writeList(groupedResponses); } /** @@ -46,6 +125,14 @@ public String getIndexName() { return indexName; } + /** + * Returns the index mapping hash associated with this index if exists + */ + @Nullable + public String getIndexMappingHash() { + return indexMappingHash; + } + public boolean canMatch() { return canMatch; } @@ -69,23 +156,19 @@ Version getOriginVersion() { return originVersion; } - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(indexName); - out.writeMap(responseMap, StreamOutput::writeString, (valueOut, fc) -> fc.writeTo(valueOut)); - out.writeBoolean(canMatch); - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; FieldCapabilitiesIndexResponse that = (FieldCapabilitiesIndexResponse) o; - return canMatch == that.canMatch && Objects.equals(indexName, that.indexName) && Objects.equals(responseMap, that.responseMap); + return canMatch == that.canMatch + && Objects.equals(indexName, that.indexName) + && Objects.equals(indexMappingHash, that.indexMappingHash) + && Objects.equals(responseMap, that.responseMap); } @Override public int hashCode() { - return Objects.hash(indexName, responseMap, canMatch); + return Objects.hash(indexName, indexMappingHash, responseMap, canMatch); } } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponse.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponse.java index 6d103fbe863cc..91f079cadbd99 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesNodeResponse.java @@ -37,14 +37,14 @@ class FieldCapabilitiesNodeResponse extends ActionResponse implements Writeable FieldCapabilitiesNodeResponse(StreamInput in) throws IOException { super(in); - this.indexResponses = in.readList(FieldCapabilitiesIndexResponse::new); + this.indexResponses = FieldCapabilitiesIndexResponse.readList(in); this.failures = in.readMap(ShardId::new, StreamInput::readException); this.unmatchedShardIds = in.readSet(ShardId::new); } @Override public void writeTo(StreamOutput out) throws IOException { - out.writeList(indexResponses); + FieldCapabilitiesIndexResponse.writeList(out, indexResponses); out.writeMap(failures, (o, v) -> v.writeTo(o), StreamOutput::writeException); out.writeCollection(unmatchedShardIds); } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java index 7e14fb667c96e..dba604db2faf2 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java @@ -75,7 +75,7 @@ public FieldCapabilitiesResponse(StreamInput in) throws IOException { super(in); indices = in.readStringArray(); this.responseMap = in.readMap(StreamInput::readString, FieldCapabilitiesResponse::readField); - indexResponses = in.readList(FieldCapabilitiesIndexResponse::new); + this.indexResponses = FieldCapabilitiesIndexResponse.readList(in); this.failures = in.readList(FieldCapabilitiesFailure::new); } @@ -141,7 +141,7 @@ private static Map readField(StreamInput in) throws I public void writeTo(StreamOutput out) throws IOException { out.writeStringArray(indices); out.writeMap(responseMap, StreamOutput::writeString, FieldCapabilitiesResponse::writeField); - out.writeList(indexResponses); + FieldCapabilitiesIndexResponse.writeList(out, indexResponses); out.writeList(failures); } diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index 2b17b37a2d893..a12bef8aa74d4 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -42,6 +42,7 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Consumer; import java.util.function.Predicate; import java.util.stream.Collectors; @@ -55,8 +56,8 @@ public class TransportFieldCapabilitiesAction extends HandledTransportAction metadataFieldPred; + private final IndicesService indicesService; private final boolean ccsCheckCompatibility; @Inject @@ -73,7 +74,7 @@ public TransportFieldCapabilitiesAction( this.transportService = transportService; this.clusterService = clusterService; this.indexNameExpressionResolver = indexNameExpressionResolver; - this.fieldCapabilitiesFetcher = new FieldCapabilitiesFetcher(indicesService); + this.indicesService = indicesService; final Set metadataFields = indicesService.getAllMetadataFields(); this.metadataFieldPred = metadataFields::contains; transportService.registerRequestHandler( @@ -112,6 +113,17 @@ protected void doExecute(Task task, FieldCapabilitiesRequest request, final Acti checkIndexBlocks(clusterState, concreteIndices); final Map indexResponses = Collections.synchronizedMap(new HashMap<>()); + // This map is used to share the index response for indices which have the same index mapping hash to reduce the memory usage. + final Map> indexMappingHashToResponses = Collections.synchronizedMap(new HashMap<>()); + final Consumer handleIndexResponse = resp -> { + if (resp.canMatch() && resp.getIndexMappingHash() != null) { + Map curr = indexMappingHashToResponses.putIfAbsent(resp.getIndexMappingHash(), resp.get()); + if (curr != null) { + resp = new FieldCapabilitiesIndexResponse(resp.getIndexName(), resp.getIndexMappingHash(), curr, true); + } + } + indexResponses.putIfAbsent(resp.getIndexName(), resp); + }; final FailureCollector indexFailures = new FailureCollector(); // One for each cluster including the local cluster final CountDown completionCounter = new CountDown(1 + remoteClusterIndices.size()); @@ -125,7 +137,7 @@ protected void doExecute(Task task, FieldCapabilitiesRequest request, final Acti nowInMillis, concreteIndices, threadPool.executor(ThreadPool.Names.SEARCH_COORDINATION), - indexResponse -> indexResponses.putIfAbsent(indexResponse.getIndexName(), indexResponse), + handleIndexResponse, indexFailures::collect, countDown ); @@ -141,7 +153,9 @@ protected void doExecute(Task task, FieldCapabilitiesRequest request, final Acti remoteClusterClient.fieldCaps(remoteRequest, ActionListener.wrap(response -> { for (FieldCapabilitiesIndexResponse resp : response.getIndexResponses()) { String indexName = RemoteClusterAware.buildRemoteIndexName(clusterAlias, resp.getIndexName()); - indexResponses.putIfAbsent(indexName, new FieldCapabilitiesIndexResponse(indexName, resp.get(), resp.canMatch())); + handleIndexResponse.accept( + new FieldCapabilitiesIndexResponse(indexName, resp.getIndexMappingHash(), resp.get(), resp.canMatch()) + ); } for (FieldCapabilitiesFailure failure : response.getFailures()) { Exception ex = failure.getException(); @@ -347,12 +361,13 @@ public void messageReceived(FieldCapabilitiesNodeRequest request, TransportChann final Map> groupedShardIds = request.shardIds() .stream() .collect(Collectors.groupingBy(ShardId::getIndexName)); + final FieldCapabilitiesFetcher fetcher = new FieldCapabilitiesFetcher(indicesService); for (List shardIds : groupedShardIds.values()) { final Map failures = new HashMap<>(); final Set unmatched = new HashSet<>(); for (ShardId shardId : shardIds) { try { - final FieldCapabilitiesIndexResponse response = fieldCapabilitiesFetcher.fetch( + final FieldCapabilitiesIndexResponse response = fetcher.fetch( shardId, request.fields(), request.filters(), diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java index 25f64107e5b23..d0ca8a7bf5d33 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesFilterTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.index.query.SearchExecutionContext; import java.io.IOException; +import java.util.Map; import java.util.function.Predicate; public class FieldCapabilitiesFilterTests extends MapperServiceTestCase { @@ -35,8 +36,7 @@ public void testExcludeNestedFields() throws IOException { """); SearchExecutionContext sec = createSearchExecutionContext(mapperService); - FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( - "index", + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( sec, new String[] { "*" }, new String[] { "-nested" }, @@ -44,10 +44,10 @@ public void testExcludeNestedFields() throws IOException { f -> true ); - assertNotNull(response.getField("field1")); - assertNotNull(response.getField("field4")); - assertNull(response.getField("field2")); - assertNull(response.getField("field2.field3")); + assertNotNull(response.get("field1")); + assertNotNull(response.get("field4")); + assertNull(response.get("field2")); + assertNull(response.get("field2.field3")); } public void testMetadataFilters() throws IOException { @@ -62,28 +62,26 @@ public void testMetadataFilters() throws IOException { SearchExecutionContext sec = createSearchExecutionContext(mapperService); { - FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( - "index", + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( sec, new String[] { "*" }, new String[] { "+metadata" }, Strings.EMPTY_ARRAY, f -> true ); - assertNotNull(response.getField("_index")); - assertNull(response.getField("field1")); + assertNotNull(response.get("_index")); + assertNull(response.get("field1")); } { - FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( - "index", + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( sec, new String[] { "*" }, new String[] { "-metadata" }, Strings.EMPTY_ARRAY, f -> true ); - assertNull(response.getField("_index")); - assertNotNull(response.getField("field1")); + assertNull(response.get("_index")); + assertNotNull(response.get("field1")); } } @@ -106,19 +104,18 @@ public void testExcludeMultifields() throws IOException { """); SearchExecutionContext sec = createSearchExecutionContext(mapperService); - FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( - "index", + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( sec, new String[] { "*" }, new String[] { "-multifield" }, Strings.EMPTY_ARRAY, f -> true ); - assertNotNull(response.getField("field1")); - assertNull(response.getField("field1.keyword")); - assertNotNull(response.getField("field2")); - assertNotNull(response.getField("field2.keyword")); - assertNotNull(response.getField("_index")); + assertNotNull(response.get("field1")); + assertNull(response.get("field1.keyword")); + assertNotNull(response.get("field2")); + assertNotNull(response.get("field2.keyword")); + assertNotNull(response.get("_index")); } public void testDontIncludeParentInfo() throws IOException { @@ -136,17 +133,16 @@ public void testDontIncludeParentInfo() throws IOException { """); SearchExecutionContext sec = createSearchExecutionContext(mapperService); - FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( - "index", + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( sec, new String[] { "*" }, new String[] { "-parent" }, Strings.EMPTY_ARRAY, f -> true ); - assertNotNull(response.getField("parent.field1")); - assertNotNull(response.getField("parent.field2")); - assertNull(response.getField("parent")); + assertNotNull(response.get("parent.field1")); + assertNotNull(response.get("parent.field2")); + assertNull(response.get("parent")); } public void testSecurityFilter() throws IOException { @@ -163,8 +159,7 @@ public void testSecurityFilter() throws IOException { Predicate securityFilter = f -> f.startsWith("permitted"); { - FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( - "index", + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( sec, new String[] { "*" }, Strings.EMPTY_ARRAY, @@ -172,14 +167,13 @@ public void testSecurityFilter() throws IOException { securityFilter ); - assertNotNull(response.getField("permitted1")); - assertNull(response.getField("forbidden")); - assertNotNull(response.getField("_index")); // security filter doesn't apply to metadata + assertNotNull(response.get("permitted1")); + assertNull(response.get("forbidden")); + assertNotNull(response.get("_index")); // security filter doesn't apply to metadata } { - FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( - "index", + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( sec, new String[] { "*" }, new String[] { "-metadata" }, @@ -187,9 +181,9 @@ public void testSecurityFilter() throws IOException { securityFilter ); - assertNotNull(response.getField("permitted1")); - assertNull(response.getField("forbidden")); - assertNull(response.getField("_index")); // -metadata filter applies on top + assertNotNull(response.get("permitted1")); + assertNull(response.get("forbidden")); + assertNull(response.get("_index")); // -metadata filter applies on top } } @@ -205,17 +199,16 @@ public void testFieldTypeFiltering() throws IOException { """); SearchExecutionContext sec = createSearchExecutionContext(mapperService); - FieldCapabilitiesIndexResponse response = FieldCapabilitiesFetcher.retrieveFieldCaps( - "index", + Map response = FieldCapabilitiesFetcher.retrieveFieldCaps( sec, new String[] { "*" }, Strings.EMPTY_ARRAY, new String[] { "text", "keyword" }, f -> true ); - assertNotNull(response.getField("field1")); - assertNull(response.getField("field2")); - assertNotNull(response.getField("field3")); - assertNull(response.getField("_index")); + assertNotNull(response.get("field1")); + assertNull(response.get("field2")); + assertNotNull(response.get("field3")); + assertNull(response.get("_index")); } } diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponseTests.java new file mode 100644 index 0000000000000..39715f26d9dc7 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesIndexResponseTests.java @@ -0,0 +1,114 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.fieldcaps; + +import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.hamcrest.Matchers.equalTo; + +public class FieldCapabilitiesIndexResponseTests extends ESTestCase { + + public void testShareResponsesUsingMappingHash() throws Exception { + final Supplier> randomFieldCaps = () -> { + final Map fieldCaps = new HashMap<>(); + final List fields = randomList(1, 5, () -> randomAlphaOfLength(5)); + for (String field : fields) { + final IndexFieldCapabilities fieldCap = new IndexFieldCapabilities( + field, + randomAlphaOfLengthBetween(5, 20), + randomBoolean(), + randomBoolean(), + randomBoolean(), + false, + null, + Map.of() + ); + fieldCaps.put(field, fieldCap); + } + return fieldCaps; + }; + final List inList = new ArrayList<>(); + int numGroups = randomIntBetween(0, 20); + Map> mappingHashToIndices = new HashMap<>(); + for (int i = 0; i < numGroups; i++) { + String groupName = "group_" + i; + String hashing = UUIDs.randomBase64UUID(); + List indices = IntStream.range(0, randomIntBetween(1, 5)).mapToObj(n -> groupName + "_" + n).toList(); + mappingHashToIndices.put(hashing, indices); + Map fieldCaps = randomFieldCaps.get(); + for (String index : indices) { + inList.add(new FieldCapabilitiesIndexResponse(index, hashing, fieldCaps, true)); + } + } + int numUngroups = randomIntBetween(0, 5); + for (int i = 0; i < numUngroups; i++) { + String index = "ungrouped_" + i; + final String hashing; + final boolean canMatch; + Map fieldCaps = Map.of(); + if (randomBoolean()) { + canMatch = false; + hashing = UUIDs.randomBase64UUID(); + } else { + canMatch = randomBoolean(); + hashing = null; + if (canMatch) { + fieldCaps = randomFieldCaps.get(); + } + } + inList.add(new FieldCapabilitiesIndexResponse(index, hashing, fieldCaps, canMatch)); + } + Randomness.shuffle(inList); + final List serializedList; + try (BytesStreamOutput output = new BytesStreamOutput()) { + FieldCapabilitiesIndexResponse.writeList(output, inList); + try ( + StreamInput in = new NamedWriteableAwareStreamInput( + output.bytes().streamInput(), + new NamedWriteableRegistry(Collections.emptyList()) + ) + ) { + serializedList = FieldCapabilitiesIndexResponse.readList(in); + } + } + assertThat( + serializedList.stream().sorted(Comparator.comparing(FieldCapabilitiesIndexResponse::getIndexName)).toList(), + equalTo(inList.stream().sorted(Comparator.comparing(FieldCapabilitiesIndexResponse::getIndexName)).toList()) + ); + Map> groupedResponses = serializedList.stream() + .filter(r -> r.canMatch() && r.getIndexMappingHash() != null) + .collect(Collectors.groupingBy(FieldCapabilitiesIndexResponse::getIndexMappingHash)); + assertThat(groupedResponses.keySet(), equalTo(mappingHashToIndices.keySet())); + for (Map.Entry> e : groupedResponses.entrySet()) { + List indices = mappingHashToIndices.get(e.getKey()); + List rs = e.getValue(); + assertThat(rs.stream().map(FieldCapabilitiesIndexResponse::getIndexName).sorted().toList(), equalTo(indices)); + for (FieldCapabilitiesIndexResponse r : rs) { + assertTrue(r.canMatch()); + assertSame(r.get(), rs.get(0).get()); + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java index fbea856caface..1f43f88c192ae 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponseTests.java @@ -60,7 +60,7 @@ public static FieldCapabilitiesIndexResponse randomIndexResponse(String index, b for (String field : fields) { responses.put(field, randomFieldCaps(field)); } - return new FieldCapabilitiesIndexResponse(index, responses, canMatch); + return new FieldCapabilitiesIndexResponse(index, null, responses, canMatch); } public static IndexFieldCapabilities randomFieldCaps(String fieldName) { From 6ad3f8bfdd7c7ea1b0d8e35cba90555d9fc73250 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Thu, 17 Feb 2022 10:33:06 -0500 Subject: [PATCH 161/167] [DOCS] Clarify `orientation` usage for WKT and GeoJSON polygons (#84025) Clarifies that the `orientation` mapping parameter only applies to WKT polygons. GeoJSON polygons use a default orientation of `RIGHT`, regardless of the mapping parameter. Also notes that the document-level `orientation` parameter overrides the default orientation for both WKT and GeoJSON polygons. Closes https://github.com/elastic/elasticsearch/issues/84009. --- .../mapping/types/geo-shape.asciidoc | 77 ++++++++++--------- 1 file changed, 42 insertions(+), 35 deletions(-) diff --git a/docs/reference/mapping/types/geo-shape.asciidoc b/docs/reference/mapping/types/geo-shape.asciidoc index 2565ae5f320ff..0eee58a1a2f90 100644 --- a/docs/reference/mapping/types/geo-shape.asciidoc +++ b/docs/reference/mapping/types/geo-shape.asciidoc @@ -5,7 +5,7 @@ ++++ The `geo_shape` data type facilitates the indexing of and searching -with arbitrary geo shapes such as rectangles and polygons. It should be +with arbitrary geoshapes such as rectangles and polygons. It should be used when either the data being indexed or the queries being executed contain shapes other than just points. @@ -26,7 +26,7 @@ type. |`orientation` a|Optional. Default <> for the field's -polygons. +WKT polygons. This parameter sets and returns only a `RIGHT` (counterclockwise) or `LEFT` (clockwise) value. However, you can specify either value in multiple ways. @@ -66,7 +66,7 @@ and reject the whole document. [[geoshape-indexing-approach]] [discrete] ==== Indexing approach -GeoShape types are indexed by decomposing the shape into a triangular mesh and +Geoshape types are indexed by decomposing the shape into a triangular mesh and indexing each triangle as a 7 dimension point in a BKD tree. This provides near perfect spatial resolution (down to 1e-7 decimal degree precision) since all spatial relations are computed using an encoded vector representation of the @@ -144,7 +144,7 @@ API. The following is an example of a point in GeoJSON. POST /example/_doc { "location" : { - "type" : "point", + "type" : "Point", "coordinates" : [-77.03653, 38.897676] } } @@ -164,23 +164,23 @@ POST /example/_doc [[geo-linestring]] ===== http://geojson.org/geojson-spec.html#id3[LineString] -A `linestring` defined by an array of two or more positions. By -specifying only two points, the `linestring` will represent a straight +A linestring defined by an array of two or more positions. By +specifying only two points, the linestring will represent a straight line. Specifying more than two points creates an arbitrary path. The -following is an example of a LineString in GeoJSON. +following is an example of a linestring in GeoJSON. [source,console] -------------------------------------------------- POST /example/_doc { "location" : { - "type" : "linestring", + "type" : "LineString", "coordinates" : [[-77.03653, 38.897676], [-77.009051, 38.889939]] } } -------------------------------------------------- -The following is an example of a LineString in WKT: +The following is an example of a linestring in WKT: [source,console] -------------------------------------------------- @@ -190,7 +190,7 @@ POST /example/_doc } -------------------------------------------------- -The above `linestring` would draw a straight line starting at the White +The above linestring would draw a straight line starting at the White House to the US Capitol Building. [discrete] @@ -199,14 +199,14 @@ House to the US Capitol Building. A polygon is defined by a list of a list of points. The first and last points in each (outer) list must be the same (the polygon must be -closed). The following is an example of a Polygon in GeoJSON. +closed). The following is an example of a polygon in GeoJSON. [source,console] -------------------------------------------------- POST /example/_doc { "location" : { - "type" : "polygon", + "type" : "Polygon", "coordinates" : [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ] ] @@ -214,7 +214,7 @@ POST /example/_doc } -------------------------------------------------- -The following is an example of a Polygon in WKT: +The following is an example of a polygon in WKT: [source,console] -------------------------------------------------- @@ -233,7 +233,7 @@ of a polygon with a hole: POST /example/_doc { "location" : { - "type" : "polygon", + "type" : "Polygon", "coordinates" : [ [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0] ], [ [100.2, 0.2], [100.8, 0.2], [100.8, 0.8], [100.2, 0.8], [100.2, 0.2] ] @@ -242,7 +242,7 @@ POST /example/_doc } -------------------------------------------------- -The following is an example of a Polygon with a hole in WKT: +The following is an example of a polygon with a hole in WKT: [source,console] -------------------------------------------------- @@ -257,22 +257,29 @@ POST /example/_doc ===== Polygon orientation A polygon's orientation indicates the order of its vertices: `RIGHT` -(counterclockwise) or `LEFT` (clockwise). +(counterclockwise) or `LEFT` (clockwise). {es} uses a polygon’s orientation to +determine if it crosses the international dateline (+/-180° longitude). -You can set a default orientation for a `geo_shape` field using the -<>. You can override -this default for specific polygons using the document-level `orientation` -parameter. +You can set a default orientation for WKT polygons using the +<>. This is because +the WKT specification doesn't specify or enforce a default orientation. -For example, the following indexing request specifies a document-level -`orientation` of `LEFT`. +GeoJSON polygons use a default orientation of `RIGHT`, regardless of +`orientation` mapping parameter's value. This is because the +https://tools.ietf.org/html/rfc7946#section-3.1.6[GeoJSON specification] +mandates that an outer polygon use a counterclockwise orientation and interior +shapes use a clockwise orientation. + +You can override the default orientation for GeoJSON polygons using the +document-level `orientation` parameter. For example, the following indexing +request specifies a document-level `orientation` of `LEFT`. [source,console] ---- POST /example/_doc { "location" : { - "type" : "polygon", + "type" : "Polygon", "orientation" : "LEFT", "coordinates" : [ [ [-177.0, 10.0], [176.0, 15.0], [172.0, 0.0], [176.0, -15.0], [-177.0, -10.0], [-177.0, 10.0] ] @@ -282,15 +289,15 @@ POST /example/_doc ---- {es} only uses a polygon’s orientation to determine if it crosses the -international dateline (+/-180° longitude). If the difference between a -polygon’s minimum longitude and the maximum longitude is less than 180°, the -polygon doesn't cross the dateline and its orientation has no effect. +international dateline. If the difference between a polygon’s minimum longitude +and the maximum longitude is less than 180°, the polygon doesn't cross the +dateline and its orientation has no effect. If the difference between a polygon’s minimum longitude and the maximum longitude is 180° or greater, {es} checks whether the polygon's document-level -`orientation` differs from the default in the `orientation` mapping parameter. -If the orientation differs, {es} considers the polygon to cross the -international dateline and splits the polygon at the dateline. +`orientation` differs from the default orientation. If the orientation differs, +{es} considers the polygon to cross the international dateline and splits the +polygon at the dateline. [discrete] [[geo-multipoint]] @@ -303,7 +310,7 @@ The following is an example of a list of GeoJSON points: POST /example/_doc { "location" : { - "type" : "multipoint", + "type" : "MultiPoint", "coordinates" : [ [102.0, 2.0], [103.0, 2.0] ] @@ -332,7 +339,7 @@ The following is an example of a list of GeoJSON linestrings: POST /example/_doc { "location" : { - "type" : "multilinestring", + "type" : "MultiLineString", "coordinates" : [ [ [102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0] ], [ [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0] ], @@ -363,7 +370,7 @@ The following is an example of a list of GeoJSON polygons (second polygon contai POST /example/_doc { "location" : { - "type" : "multipolygon", + "type" : "MultiPolygon", "coordinates" : [ [ [[102.0, 2.0], [103.0, 2.0], [103.0, 3.0], [102.0, 3.0], [102.0, 2.0]] ], [ [[100.0, 0.0], [101.0, 0.0], [101.0, 1.0], [100.0, 1.0], [100.0, 0.0]], @@ -394,14 +401,14 @@ The following is an example of a collection of GeoJSON geometry objects: POST /example/_doc { "location" : { - "type": "geometrycollection", + "type": "GeometryCollection", "geometries": [ { - "type": "point", + "type": "Point", "coordinates": [100.0, 0.0] }, { - "type": "linestring", + "type": "LineString", "coordinates": [ [101.0, 0.0], [102.0, 1.0] ] } ] From 4bc6a121c49cacc876562318c8a5325fe9def7bb Mon Sep 17 00:00:00 2001 From: Gordon Brown Date: Thu, 17 Feb 2022 09:56:53 -0700 Subject: [PATCH 162/167] Always re-run Feature migrations which have encountered errors (#83918) This PR addressed the behavior described in #83917, in which Feature migrations which have encountered errors are not re-run in some cases. As of this PR, Features which have encountered errors during migration are treated the same as Features requiring migration. This PR also adds a test which artificially replicates #83917. --- docs/changelog/83918.yaml | 6 ++ .../migration/FeatureMigrationIT.java | 69 ++++++++++++++++++- .../TransportPostFeatureUpgradeAction.java | 8 ++- 3 files changed, 81 insertions(+), 2 deletions(-) create mode 100644 docs/changelog/83918.yaml diff --git a/docs/changelog/83918.yaml b/docs/changelog/83918.yaml new file mode 100644 index 0000000000000..941d0c6aea30a --- /dev/null +++ b/docs/changelog/83918.yaml @@ -0,0 +1,6 @@ +pr: 83918 +summary: Always re-run Feature migrations which have encountered errors +area: Infra/Core +type: bug +issues: + - 83917 diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java index 14a7f86b8fdb0..958ebf14d8a23 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/FeatureMigrationIT.java @@ -25,6 +25,8 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskExecutor; +import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; @@ -37,6 +39,7 @@ import org.elasticsearch.reindex.ReindexPlugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.upgrades.FeatureMigrationResults; +import org.elasticsearch.upgrades.SingleFeatureMigrationResult; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; @@ -50,6 +53,8 @@ import java.util.Map; import java.util.Optional; import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Function; @@ -267,6 +272,67 @@ public void testMigrateIndexWithWriteBlock() throws Exception { }); } + public void testMigrationWillRunAfterError() throws Exception { + createSystemIndexForDescriptor(INTERNAL_MANAGED); + + TestPlugin.preMigrationHook.set((state) -> Collections.emptyMap()); + TestPlugin.postMigrationHook.set((state, metadata) -> {}); + + ensureGreen(); + + SetOnce failure = new SetOnce<>(); + CountDownLatch clusterStateUpdated = new CountDownLatch(1); + internalCluster().getCurrentMasterNodeInstance(ClusterService.class) + .submitStateUpdateTask(this.getTestName(), new ClusterStateUpdateTask() { + @Override + public ClusterState execute(ClusterState currentState) throws Exception { + FeatureMigrationResults newResults = new FeatureMigrationResults( + Collections.singletonMap( + FEATURE_NAME, + SingleFeatureMigrationResult.failure(INTERNAL_MANAGED_INDEX_NAME, new RuntimeException("it failed :(")) + ) + ); + Metadata newMetadata = Metadata.builder(currentState.metadata()) + .putCustom(FeatureMigrationResults.TYPE, newResults) + .build(); + return ClusterState.builder(currentState).metadata(newMetadata).build(); + } + + @Override + public void clusterStateProcessed(ClusterState oldState, ClusterState newState) { + clusterStateUpdated.countDown(); + } + + @Override + public void onFailure(Exception e) { + failure.set(e); + clusterStateUpdated.countDown(); + } + }, ClusterStateTaskExecutor.unbatched()); + + clusterStateUpdated.await(10, TimeUnit.SECONDS); // Should be basically instantaneous + if (failure.get() != null) { + logger.error("cluster state update to inject migration failure state did not succeed", failure.get()); + fail("cluster state update failed, see log for details"); + } + + PostFeatureUpgradeRequest migrationRequest = new PostFeatureUpgradeRequest(); + PostFeatureUpgradeResponse migrationResponse = client().execute(PostFeatureUpgradeAction.INSTANCE, migrationRequest).get(); + // Make sure we actually started the migration + assertTrue( + "could not find [" + FEATURE_NAME + "] in response: " + Strings.toString(migrationResponse), + migrationResponse.getFeatures().stream().anyMatch(feature -> feature.getFeatureName().equals(FEATURE_NAME)) + ); + + // Now wait for the migration to finish (otherwise the test infra explodes) + assertBusy(() -> { + GetFeatureUpgradeStatusRequest getStatusRequest = new GetFeatureUpgradeStatusRequest(); + GetFeatureUpgradeStatusResponse statusResp = client().execute(GetFeatureUpgradeStatusAction.INSTANCE, getStatusRequest).get(); + logger.info(Strings.toString(statusResp)); + assertThat(statusResp.getUpgradeStatus(), equalTo(GetFeatureUpgradeStatusResponse.UpgradeStatus.NO_MIGRATION_NEEDED)); + }); + } + public void assertIndexHasCorrectProperties( Metadata metadata, String indexName, @@ -344,6 +410,7 @@ public void createSystemIndexForDescriptor(SystemIndexDescriptor descriptor) thr static final String FEATURE_NAME = "A-test-feature"; // Sorts alphabetically before the feature from MultiFeatureMigrationIT static final String ORIGIN = FeatureMigrationIT.class.getSimpleName(); static final String FlAG_SETTING_KEY = IndexMetadata.INDEX_PRIORITY_SETTING.getKey(); + static final String INTERNAL_MANAGED_INDEX_NAME = ".int-man-old"; static final int INDEX_DOC_COUNT = 100; // arbitrarily chosen public static final Version NEEDS_UPGRADE_VERSION = Version.V_7_0_0; @@ -354,7 +421,7 @@ public void createSystemIndexForDescriptor(SystemIndexDescriptor descriptor) thr static final SystemIndexDescriptor INTERNAL_MANAGED = SystemIndexDescriptor.builder() .setIndexPattern(".int-man-*") .setAliasName(".internal-managed-alias") - .setPrimaryIndex(".int-man-old") + .setPrimaryIndex(INTERNAL_MANAGED_INDEX_NAME) .setType(SystemIndexDescriptor.Type.INTERNAL_MANAGED) .setSettings(createSimpleSettings(NEEDS_UPGRADE_VERSION, INTERNAL_MANAGED_FLAG_VALUE)) .setMappings(createSimpleMapping(true, true)) diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportPostFeatureUpgradeAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportPostFeatureUpgradeAction.java index 0b3c93c3061c7..6016369637f6e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportPostFeatureUpgradeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/migration/TransportPostFeatureUpgradeAction.java @@ -28,7 +28,9 @@ import org.elasticsearch.upgrades.SystemIndexMigrationTaskParams; import java.util.Comparator; +import java.util.EnumSet; import java.util.List; +import java.util.Set; import java.util.stream.Collectors; import static org.elasticsearch.action.admin.cluster.migration.TransportGetFeatureUpgradeStatusAction.getFeatureUpgradeStatus; @@ -75,11 +77,15 @@ protected void masterOperation( ClusterState state, ActionListener listener ) throws Exception { + final Set upgradableStatuses = EnumSet.of( + GetFeatureUpgradeStatusResponse.UpgradeStatus.MIGRATION_NEEDED, + GetFeatureUpgradeStatusResponse.UpgradeStatus.ERROR + ); List featuresToMigrate = systemIndices.getFeatures() .values() .stream() .map(feature -> getFeatureUpgradeStatus(state, feature)) - .filter(status -> status.getUpgradeStatus().equals(GetFeatureUpgradeStatusResponse.UpgradeStatus.MIGRATION_NEEDED)) + .filter(status -> upgradableStatuses.contains(status.getUpgradeStatus())) .map(GetFeatureUpgradeStatusResponse.FeatureUpgradeStatus::getFeatureName) .map(PostFeatureUpgradeResponse.Feature::new) .sorted(Comparator.comparing(PostFeatureUpgradeResponse.Feature::getFeatureName)) // consistent ordering to simplify testing From 3065c6d63b221da338ae84fc3ca9b54e27307507 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Thu, 17 Feb 2022 13:50:34 -0500 Subject: [PATCH 163/167] Small formatting clean up (#84144) Replaces some funny code formatting created when we applied spotless to the code globally. Spotless does a fine job, but this was a little funky looking. --- .../snapshots/sourceonly/SourceOnlySnapshotRepository.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotRepository.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotRepository.java index 66be6d081dbda..127376c83bc43 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotRepository.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/snapshots/sourceonly/SourceOnlySnapshotRepository.java @@ -140,7 +140,7 @@ public void snapshotShard(SnapshotShardContext context) { && mapperService.documentMapper().sourceMapper().isComplete() == false) { context.onFailure( new IllegalStateException( - "Can't snapshot _source only on an index that has incomplete source ie. has _source disabled " + "or filters the source" + "Can't snapshot _source only on an index that has incomplete source ie. has _source disabled or filters the source" ) ); return; From 69e75bad3b29c0d078e42b388cf49e7351e6cbcf Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Thu, 17 Feb 2022 14:06:16 -0800 Subject: [PATCH 164/167] Mute GeoGridTilerTestCase#testGeoGridSetValuesBoundingBoxes_UnboundedGeoShapeCellValues --- .../search/aggregations/bucket/geogrid/GeoGridTilerTestCase.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTilerTestCase.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTilerTestCase.java index 15dd9383e5022..c204857e88c9f 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTilerTestCase.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTilerTestCase.java @@ -145,6 +145,7 @@ public void testGeoGridSetValuesBoundingBoxes_coversAllLongitudeValues() throws assertThat(numBuckets, equalTo(expected)); } + @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/84152") public void testGeoGridSetValuesBoundingBoxes_UnboundedGeoShapeCellValues() throws Exception { for (int i = 0; i < 1000; i++) { int precision = randomIntBetween(0, 3); From d85b13cc8638c13e3b0ff4c6692113486b3512fe Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Thu, 17 Feb 2022 14:27:15 -0800 Subject: [PATCH 165/167] Fix spotless violation from last commit --- .../aggregations/bucket/geogrid/GeoGridTilerTestCase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTilerTestCase.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTilerTestCase.java index c204857e88c9f..ce1cee90745a0 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTilerTestCase.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/search/aggregations/bucket/geogrid/GeoGridTilerTestCase.java @@ -145,7 +145,7 @@ public void testGeoGridSetValuesBoundingBoxes_coversAllLongitudeValues() throws assertThat(numBuckets, equalTo(expected)); } - @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/84152") + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/84152") public void testGeoGridSetValuesBoundingBoxes_UnboundedGeoShapeCellValues() throws Exception { for (int i = 0; i < 1000; i++) { int precision = randomIntBetween(0, 3); From 1f6da034fd897dc0e24e09df282492d33878427e Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Thu, 17 Feb 2022 15:43:05 -0800 Subject: [PATCH 166/167] Mute LdapSessionFactoryTests#testSslTrustIsReloaded --- .../xpack/security/authc/ldap/LdapSessionFactoryTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java index 2ccaf567d6095..fee0034bf4a0d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapSessionFactoryTests.java @@ -281,6 +281,7 @@ public void testGroupLookupBase() throws Exception { * If the realm's CA path is monitored for changes and the underlying SSL context is reloaded, then we will get two different outcomes * (one failure, one success) depending on which file content is in place. */ + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/83560") public void testSslTrustIsReloaded() throws Exception { assumeFalse( "NPE thrown in BCFIPS JSSE - addressed in https://github.com/bcgit/bc-java/commit/" From 4ed85701610397070921881bed67e0f5269d2607 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Thu, 17 Feb 2022 16:06:57 -0800 Subject: [PATCH 167/167] Mute FrozenSearchableSnapshotsIntegTests#testCreateAndRestorePartialSearchableSnapshot --- .../searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java index 06ceb9623909b..f29aff38bc253 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java @@ -80,6 +80,7 @@ public class FrozenSearchableSnapshotsIntegTests extends BaseFrozenSearchableSnapshotsIntegTestCase { + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/84158") public void testCreateAndRestorePartialSearchableSnapshot() throws Exception { final String fsRepoName = randomAlphaOfLength(10); final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT);