From 749ad74d73083d25024756d9d078b1ddd6676b23 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 15 Feb 2022 08:37:12 +0100 Subject: [PATCH 01/37] License checks for archive tier (#83894) Implements enterprise license for archive data. Relates #81210 --- .../plugins/RepositoryPlugin.java | 11 + .../repositories/RepositoriesModule.java | 14 +- .../repositories/RepositoriesService.java | 11 +- .../snapshots/RestoreService.java | 61 +++--- ...ClusterStateServiceRandomUpdatesTests.java | 3 +- .../RepositoriesServiceTests.java | 3 +- .../snapshots/SnapshotResiliencyTests.java | 3 +- .../core/LocalStateCompositeXPackPlugin.java | 11 + .../plugin/old-lucene-versions/build.gradle | 2 + .../lucene/bwc/ArchiveLicenseIntegTests.java | 201 ++++++++++++++++++ .../bwc/LocalStateOldLuceneVersions.java | 32 +++ .../lucene/bwc/ArchiveAllocationDecider.java | 62 ++++++ .../xpack/lucene/bwc/ArchiveUsageTracker.java | 43 ++++ ...ShardsOnInvalidLicenseClusterListener.java | 98 +++++++++ .../xpack/lucene/bwc/OldLuceneVersions.java | 93 +++++++- 15 files changed, 612 insertions(+), 36 deletions(-) create mode 100644 x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java create mode 100644 x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/LocalStateOldLuceneVersions.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDecider.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTracker.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java diff --git a/server/src/main/java/org/elasticsearch/plugins/RepositoryPlugin.java b/server/src/main/java/org/elasticsearch/plugins/RepositoryPlugin.java index 41e0e9b3704cb..476baf1c28f63 100644 --- a/server/src/main/java/org/elasticsearch/plugins/RepositoryPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/RepositoryPlugin.java @@ -8,6 +8,7 @@ package org.elasticsearch.plugins; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.env.Environment; @@ -17,6 +18,7 @@ import java.util.Collections; import java.util.Map; +import java.util.function.Consumer; /** * An extension point for {@link Plugin} implementations to add custom snapshot repositories. @@ -59,4 +61,13 @@ default Map getInternalRepositories( return Collections.emptyMap(); } + /** + * Returns a check that is run on restore. This allows plugins to prevent certain restores from happening. + * + * returns null if no check is provided + */ + default Consumer addPreRestoreCheck() { + return null; + } + } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java index 89992b20fe96f..21de3f2f961c6 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesModule.java @@ -8,6 +8,7 @@ package org.elasticsearch.repositories; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; @@ -18,10 +19,12 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.NamedXContentRegistry; +import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Consumer; /** * Sets up classes for Snapshot/Restore. @@ -80,6 +83,14 @@ public RepositoriesModule( } } + List> preRestoreChecks = new ArrayList<>(); + for (RepositoryPlugin repoPlugin : repoPlugins) { + Consumer preRestoreCheck = repoPlugin.addPreRestoreCheck(); + if (preRestoreCheck != null) { + preRestoreChecks.add(preRestoreCheck); + } + } + Settings settings = env.settings(); Map repositoryTypes = Collections.unmodifiableMap(factories); Map internalRepositoryTypes = Collections.unmodifiableMap(internalFactories); @@ -89,7 +100,8 @@ public RepositoriesModule( transportService, repositoryTypes, internalRepositoryTypes, - transportService.getThreadPool() + transportService.getThreadPool(), + preRestoreChecks ); } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index 6d1bcd0a131cd..6b837f20eb045 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -56,6 +56,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -97,13 +98,16 @@ public class RepositoriesService extends AbstractLifecycleComponent implements C private volatile Map repositories = Collections.emptyMap(); private final RepositoriesStatsArchive repositoriesStatsArchive; + private final List> preRestoreChecks; + public RepositoriesService( Settings settings, ClusterService clusterService, TransportService transportService, Map typesRegistry, Map internalTypesRegistry, - ThreadPool threadPool + ThreadPool threadPool, + List> preRestoreChecks ) { this.typesRegistry = typesRegistry; this.internalTypesRegistry = internalTypesRegistry; @@ -122,6 +126,7 @@ public RepositoriesService( REPOSITORIES_STATS_ARCHIVE_MAX_ARCHIVED_STATS.get(settings), threadPool::relativeTimeInMillis ); + this.preRestoreChecks = preRestoreChecks; } /** @@ -776,6 +781,10 @@ private static RepositoryConflictException newRepositoryConflictException(String ); } + public List> getPreRestoreChecks() { + return preRestoreChecks; + } + @Override protected void doStart() { diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index b9e600e228de5..d1b996978aa31 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -1289,9 +1289,11 @@ public ClusterState execute(ClusterState currentState) { final String localNodeId = clusterService.state().nodes().getLocalNodeId(); for (Map.Entry indexEntry : indicesToRestore.entrySet()) { final IndexId index = indexEntry.getValue(); + final IndexMetadata originalIndexMetadata = metadata.index(index.getName()); + repositoriesService.getPreRestoreChecks().forEach(check -> check.accept(originalIndexMetadata)); IndexMetadata snapshotIndexMetadata = updateIndexSettings( snapshot, - metadata.index(index.getName()), + originalIndexMetadata, request.indexSettings(), request.ignoreIndexSettings() ); @@ -1591,39 +1593,40 @@ private IndexMetadata convertLegacyIndex(IndexMetadata snapshotIndexMetadata, Cl if (snapshotIndexMetadata.getCreationVersion().before(Version.fromString("5.0.0"))) { throw new IllegalArgumentException("can't restore an index created before version 5.0.0"); } + IndexMetadata.Builder convertedIndexMetadata = IndexMetadata.builder(snapshotIndexMetadata); MappingMetadata mappingMetadata = snapshotIndexMetadata.mapping(); - Map loadedMappingSource = mappingMetadata.rawSourceAsMap(); - - // store old mapping under _meta/legacy_mappings - Map legacyMapping = new LinkedHashMap<>(); - boolean sourceOnlySnapshot = snapshotIndexMetadata.getSettings().getAsBoolean("index.source_only", false); - if (sourceOnlySnapshot) { - // actual mapping is under "_meta" (but strip type first) - Object sourceOnlyMeta = mappingMetadata.sourceAsMap().get("_meta"); - if (sourceOnlyMeta instanceof Map sourceOnlyMetaMap) { - legacyMapping.put("legacy_mappings", sourceOnlyMetaMap); + if (mappingMetadata != null) { + Map loadedMappingSource = mappingMetadata.rawSourceAsMap(); + + // store old mapping under _meta/legacy_mappings + Map legacyMapping = new LinkedHashMap<>(); + boolean sourceOnlySnapshot = snapshotIndexMetadata.getSettings().getAsBoolean("index.source_only", false); + if (sourceOnlySnapshot) { + // actual mapping is under "_meta" (but strip type first) + Object sourceOnlyMeta = mappingMetadata.sourceAsMap().get("_meta"); + if (sourceOnlyMeta instanceof Map sourceOnlyMetaMap) { + legacyMapping.put("legacy_mappings", sourceOnlyMetaMap); + } + } else { + legacyMapping.put("legacy_mappings", loadedMappingSource); } - } else { - legacyMapping.put("legacy_mappings", loadedMappingSource); - } - Map newMappingSource = new LinkedHashMap<>(); - newMappingSource.put("_meta", legacyMapping); + Map newMappingSource = new LinkedHashMap<>(); + newMappingSource.put("_meta", legacyMapping); - Map newMapping = new LinkedHashMap<>(); - newMapping.put(mappingMetadata.type(), newMappingSource); + Map newMapping = new LinkedHashMap<>(); + newMapping.put(mappingMetadata.type(), newMappingSource); + + convertedIndexMetadata.putMapping(new MappingMetadata(mappingMetadata.type(), newMapping)); + } + + convertedIndexMetadata.settings( + Settings.builder() + .put(snapshotIndexMetadata.getSettings()) + .put(IndexMetadata.SETTING_INDEX_VERSION_COMPATIBILITY.getKey(), clusterState.getNodes().getSmallestNonClientNodeVersion()) + ); // TODO: _routing? Perhaps we don't need to obey any routing here as stuff is read-only anyway and get API will be disabled - return IndexMetadata.builder(snapshotIndexMetadata) - .putMapping(new MappingMetadata(mappingMetadata.type(), newMapping)) - .settings( - Settings.builder() - .put(snapshotIndexMetadata.getSettings()) - .put( - IndexMetadata.SETTING_INDEX_VERSION_COMPATIBILITY.getKey(), - clusterState.getNodes().getSmallestNonClientNodeVersion() - ) - ) - .build(); + return convertedIndexMetadata.build(); } private static IndexMetadata.Builder restoreToCreateNewIndex(IndexMetadata snapshotIndexMetadata, String renamedIndexName) { diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java index 57d82cb834b7f..1aa7902112710 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java @@ -525,7 +525,8 @@ private IndicesClusterStateService createIndicesClusterStateService( transportService, Collections.emptyMap(), Collections.emptyMap(), - threadPool + threadPool, + List.of() ); final PeerRecoveryTargetService recoveryTargetService = new PeerRecoveryTargetService( threadPool, diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java index 9525055b13b2d..04c8a08462896 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceTests.java @@ -93,7 +93,8 @@ public void setUp() throws Exception { transportService, typesRegistry, typesRegistry, - threadPool + threadPool, + List.of() ); repositoriesService.start(); } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index b0325195bc17b..9b4a06afa705a 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -1715,7 +1715,8 @@ protected void assertSnapshotOrGenericThread() { } ), emptyMap(), - threadPool + threadPool, + List.of() ); final ActionFilters actionFilters = new ActionFilters(emptySet()); snapshotsService = new SnapshotsService( diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java index de78339b67492..35dccbb3ef9ed 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/LocalStateCompositeXPackPlugin.java @@ -20,6 +20,7 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.coordination.ElectionStrategy; +import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexTemplateMetadata; import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata; @@ -104,6 +105,7 @@ import java.util.Optional; import java.util.Set; import java.util.function.BiConsumer; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.LongSupplier; import java.util.function.Predicate; @@ -568,6 +570,15 @@ public Map getInternalRepositories( return internalRepositories; } + @Override + public Consumer addPreRestoreCheck() { + List> checks = filterPlugins(RepositoryPlugin.class).stream() + .map(RepositoryPlugin::addPreRestoreCheck) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + return checks.isEmpty() ? null : imd -> checks.forEach(c -> c.accept(imd)); + } + @Override public void close() throws IOException { IOUtils.close(plugins); diff --git a/x-pack/plugin/old-lucene-versions/build.gradle b/x-pack/plugin/old-lucene-versions/build.gradle index 22ab9d7bf24ce..e59b68f040f6f 100644 --- a/x-pack/plugin/old-lucene-versions/build.gradle +++ b/x-pack/plugin/old-lucene-versions/build.gradle @@ -1,3 +1,4 @@ +apply plugin: 'elasticsearch.internal-cluster-test' apply plugin: 'elasticsearch.internal-es-plugin' apply plugin: 'elasticsearch.internal-test-artifact' @@ -11,6 +12,7 @@ archivesBaseName = 'x-pack-old-lucene-versions' dependencies { compileOnly project(path: xpackModule('core')) + internalClusterTestImplementation(testArtifact(project(xpackModule('core')))) } addQaCheckDependencies() diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java new file mode 100644 index 0000000000000..0c37eac048853 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java @@ -0,0 +1,201 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.health.ClusterHealthStatus; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.RepositoryMetadata; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.env.Environment; +import org.elasticsearch.indices.recovery.RecoverySettings; +import org.elasticsearch.license.DeleteLicenseAction; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicensesMetadata; +import org.elasticsearch.license.PostStartBasicAction; +import org.elasticsearch.license.PostStartBasicRequest; +import org.elasticsearch.license.PostStartTrialAction; +import org.elasticsearch.license.PostStartTrialRequest; +import org.elasticsearch.license.PostStartTrialResponse; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; +import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.repositories.Repository; +import org.elasticsearch.repositories.RepositoryData; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.snapshots.AbstractSnapshotIntegTestCase; +import org.elasticsearch.snapshots.RestoreService; +import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.mockstore.MockRepository; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.junit.Before; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Map; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.oneOf; + +@ESIntegTestCase.ClusterScope(supportsDedicatedMasters = false, numClientNodes = 0, scope = ESIntegTestCase.Scope.TEST) +public class ArchiveLicenseIntegTests extends AbstractSnapshotIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Arrays.asList(LocalStateOldLuceneVersions.class, TestRepositoryPlugin.class, MockRepository.Plugin.class); + } + + public static class TestRepositoryPlugin extends Plugin implements RepositoryPlugin { + public static final String FAKE_VERSIONS_TYPE = "fakeversionsrepo"; + + @Override + public Map getRepositories( + Environment env, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + BigArrays bigArrays, + RecoverySettings recoverySettings + ) { + return Map.of( + FAKE_VERSIONS_TYPE, + metadata -> new FakeVersionsRepo(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings) + ); + } + + // fakes an old index version format to activate license checks + private static class FakeVersionsRepo extends FsRepository { + FakeVersionsRepo( + RepositoryMetadata metadata, + Environment env, + NamedXContentRegistry namedXContentRegistry, + ClusterService clusterService, + BigArrays bigArrays, + RecoverySettings recoverySettings + ) { + super(metadata, env, namedXContentRegistry, clusterService, bigArrays, recoverySettings); + } + + @Override + public IndexMetadata getSnapshotIndexMetaData(RepositoryData repositoryData, SnapshotId snapshotId, IndexId index) + throws IOException { + final IndexMetadata original = super.getSnapshotIndexMetaData(repositoryData, snapshotId, index); + return IndexMetadata.builder(original) + .settings( + Settings.builder() + .put(original.getSettings()) + .put( + IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey(), + randomBoolean() ? Version.fromString("5.0.0") : Version.fromString("6.0.0") + ) + ) + .build(); + } + } + } + + private static final String repoName = "test-repo"; + private static final String indexName = "test-index"; + private static final String snapshotName = "test-snapshot"; + + @Before + public void createAndRestoreArchive() throws Exception { + createRepository( + repoName, + TestRepositoryPlugin.FAKE_VERSIONS_TYPE, + randomRepositorySettings().put(RestoreService.ALLOW_BWC_INDICES_SETTING.getKey(), true) + ); + createIndex(indexName); + createFullSnapshot(repoName, snapshotName); + + assertAcked(client().admin().indices().prepareDelete(indexName)); + + PostStartTrialRequest request = new PostStartTrialRequest().setType(License.LicenseType.TRIAL.getTypeName()).acknowledge(true); + client().execute(PostStartTrialAction.INSTANCE, request).get(); + } + + public void testFailRestoreOnInvalidLicense() throws Exception { + assertAcked(client().execute(DeleteLicenseAction.INSTANCE, new DeleteLicenseRequest()).get()); + assertAcked(client().execute(PostStartBasicAction.INSTANCE, new PostStartBasicRequest()).get()); + + ensureClusterSizeConsistency(); + ensureClusterStateConsistency(); + + final RestoreSnapshotRequest req = new RestoreSnapshotRequest(repoName, snapshotName).indices(indexName).waitForCompletion(true); + ElasticsearchSecurityException e = expectThrows( + ElasticsearchSecurityException.class, + () -> client().admin().cluster().restoreSnapshot(req).actionGet() + ); + assertThat(e.getMessage(), containsString("current license is non-compliant for [archive]")); + } + + // checks that shards are failed if license becomes invalid after successful restore + public void testShardAllocationOnInvalidLicense() throws Exception { + final RestoreSnapshotRequest req = new RestoreSnapshotRequest(repoName, snapshotName).indices(indexName).waitForCompletion(true); + + final RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster().restoreSnapshot(req).get(); + assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(0)); + ensureGreen(indexName); + + assertAcked(client().execute(DeleteLicenseAction.INSTANCE, new DeleteLicenseRequest()).get()); + assertAcked(client().execute(PostStartBasicAction.INSTANCE, new PostStartBasicRequest()).get()); + + ensureClusterSizeConsistency(); + ensureClusterStateConsistency(); + + // check that shards have been failed as part of invalid license + assertBusy( + () -> assertEquals( + ClusterHealthStatus.RED, + client().admin().cluster().prepareHealth(indexName).get().getIndices().get(indexName).getStatus() + ) + ); + + waitNoPendingTasksOnAll(); + ensureClusterStateConsistency(); + + // add a valid license again + // This is a bit of a hack in tests, as we can't readd a trial license + // We force this by clearing the existing basic license first + updateClusterState( + currentState -> ClusterState.builder(currentState) + .metadata(Metadata.builder(currentState.metadata()).removeCustom(LicensesMetadata.TYPE).build()) + .build() + ); + + waitNoPendingTasksOnAll(); + ensureClusterStateConsistency(); + + PostStartTrialRequest request = new PostStartTrialRequest().setType(License.LicenseType.TRIAL.getTypeName()).acknowledge(true); + final PostStartTrialResponse response = client().execute(PostStartTrialAction.INSTANCE, request).get(); + assertThat( + response.getStatus(), + oneOf( + PostStartTrialResponse.Status.UPGRADED_TO_TRIAL, + // The LicenceService automatically generates a license of {@link LicenceService#SELF_GENERATED_LICENSE_TYPE} type + // if there is no license found in the cluster state (see {@link LicenceService#registerOrUpdateSelfGeneratedLicense). + // Since this test explicitly removes the LicensesMetadata from cluster state it is possible that the self generated + // license is created before the PostStartTrialRequest is acked. + PostStartTrialResponse.Status.TRIAL_ALREADY_ACTIVATED + ) + ); + // check if cluster goes green again after valid license has been put in place + ensureGreen(indexName); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/LocalStateOldLuceneVersions.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/LocalStateOldLuceneVersions.java new file mode 100644 index 0000000000000..e4a6110be7693 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/LocalStateOldLuceneVersions.java @@ -0,0 +1,32 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; + +import java.nio.file.Path; + +public class LocalStateOldLuceneVersions extends LocalStateCompositeXPackPlugin { + + private final OldLuceneVersions plugin; + + public LocalStateOldLuceneVersions(final Settings settings, final Path configPath) { + super(settings, configPath); + this.plugin = new OldLuceneVersions() { + + @Override + protected XPackLicenseState getLicenseState() { + return LocalStateOldLuceneVersions.this.getLicenseState(); + } + + }; + plugins.add(plugin); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDecider.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDecider.java new file mode 100644 index 0000000000000..fdd3cf1f5f8e5 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveAllocationDecider.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.routing.RoutingNode; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.cluster.routing.allocation.RoutingAllocation; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; +import org.elasticsearch.cluster.routing.allocation.decider.Decision; + +import java.util.function.BooleanSupplier; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.isArchiveIndex; + +public class ArchiveAllocationDecider extends AllocationDecider { + + static final String NAME = "archive"; + + private final BooleanSupplier hasValidLicenseSupplier; + + public ArchiveAllocationDecider(BooleanSupplier hasValidLicenseSupplier) { + this.hasValidLicenseSupplier = hasValidLicenseSupplier; + } + + @Override + public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + return allowAllocation(allocation.metadata().getIndexSafe(shardRouting.index()), allocation); + } + + @Override + public Decision canAllocate(ShardRouting shardRouting, RoutingAllocation allocation) { + return allowAllocation(allocation.metadata().getIndexSafe(shardRouting.index()), allocation); + } + + @Override + public Decision canAllocate(IndexMetadata indexMetadata, RoutingNode node, RoutingAllocation allocation) { + return allowAllocation(indexMetadata, allocation); + } + + @Override + public Decision canForceAllocatePrimary(ShardRouting shardRouting, RoutingNode node, RoutingAllocation allocation) { + return allowAllocation(allocation.metadata().getIndexSafe(shardRouting.index()), allocation); + } + + private Decision allowAllocation(IndexMetadata indexMetadata, RoutingAllocation allocation) { + if (isArchiveIndex(indexMetadata.getCreationVersion())) { + if (hasValidLicenseSupplier.getAsBoolean()) { + return allocation.decision(Decision.YES, NAME, "valid license for archive functionality"); + } else { + return allocation.decision(Decision.NO, NAME, "invalid license for archive functionality"); + } + } else { + return allocation.decision(Decision.YES, NAME, "decider only applicable for indices backed by archive functionality"); + } + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTracker.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTracker.java new file mode 100644 index 0000000000000..1dcd658c1666c --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTracker.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.license.XPackLicenseState; + +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.ARCHIVE_FEATURE; + +final class ArchiveUsageTracker implements Runnable { + + private final XPackLicenseState licenseState; + private final Supplier clusterStateSupplier; + + ArchiveUsageTracker(XPackLicenseState licenseState, Supplier clusterStateSupplier) { + this.clusterStateSupplier = clusterStateSupplier; + this.licenseState = licenseState; + } + + @Override + public void run() { + if (hasArchiveIndices(clusterStateSupplier.get())) { + ARCHIVE_FEATURE.check(licenseState); + } + } + + private static boolean hasArchiveIndices(ClusterState state) { + for (IndexMetadata indexMetadata : state.metadata()) { + if (OldLuceneVersions.isArchiveIndex(indexMetadata.getCreationVersion())) { + return true; + } + } + return false; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java new file mode 100644 index 0000000000000..2cf7160518d74 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/FailShardsOnInvalidLicenseClusterListener.java @@ -0,0 +1,98 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.apache.lucene.store.AlreadyClosedException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.routing.RerouteService; +import org.elasticsearch.common.Priority; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.shard.IndexEventListener; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.license.LicenseStateListener; +import org.elasticsearch.license.XPackLicenseState; + +import java.util.HashSet; +import java.util.Set; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.ARCHIVE_FEATURE; + +public class FailShardsOnInvalidLicenseClusterListener implements LicenseStateListener, IndexEventListener { + + private static final Logger logger = LogManager.getLogger(FailShardsOnInvalidLicenseClusterListener.class); + + private final XPackLicenseState xPackLicenseState; + + private final RerouteService rerouteService; + + final Set shardsToFail = new HashSet<>(); + + private boolean allowed; + + public FailShardsOnInvalidLicenseClusterListener(XPackLicenseState xPackLicenseState, RerouteService rerouteService) { + this.xPackLicenseState = xPackLicenseState; + this.rerouteService = rerouteService; + this.allowed = ARCHIVE_FEATURE.checkWithoutTracking(xPackLicenseState); + xPackLicenseState.addListener(this); + } + + @Override + public synchronized void afterIndexShardStarted(IndexShard indexShard) { + shardsToFail.add(indexShard); + failActiveShardsIfNecessary(); + } + + @Override + public synchronized void beforeIndexShardClosed(ShardId shardId, @Nullable IndexShard indexShard, Settings indexSettings) { + if (indexShard != null) { + shardsToFail.remove(indexShard); + } + } + + @Override + public synchronized void licenseStateChanged() { + final boolean allowed = ARCHIVE_FEATURE.checkWithoutTracking(xPackLicenseState); + if (allowed && this.allowed == false) { + rerouteService.reroute("reroute after license activation", Priority.NORMAL, new ActionListener() { + @Override + public void onResponse(ClusterState clusterState) { + logger.trace("successful reroute after license activation"); + } + + @Override + public void onFailure(Exception e) { + logger.debug("unsuccessful reroute after license activation"); + } + }); + } + this.allowed = allowed; + failActiveShardsIfNecessary(); + } + + private void failActiveShardsIfNecessary() { + assert Thread.holdsLock(this); + if (allowed == false) { + for (IndexShard indexShard : shardsToFail) { + try { + indexShard.failShard("invalid license", null); + } catch (AlreadyClosedException ignored) { + // ignore + } catch (Exception e) { + logger.warn(new ParameterizedMessage("Could not close shard {} due to invalid license", indexShard.shardId()), e); + } + } + shardsToFail.clear(); + } + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java index 40b021f9ea9d8..631de49d4fa1d 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java @@ -10,37 +10,126 @@ import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfo; import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.Version; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.routing.allocation.decider.AllocationDecider; +import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.license.License; +import org.elasticsearch.license.LicenseUtils; +import org.elasticsearch.license.LicensedFeature; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.repositories.RepositoriesService; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.lucene.bwc.codecs.BWCCodec; import java.io.IOException; import java.io.UncheckedIOException; +import java.util.Collection; import java.util.HashMap; +import java.util.List; import java.util.Map; +import java.util.function.Consumer; +import java.util.function.Supplier; -public class OldLuceneVersions extends Plugin implements IndexStorePlugin { +public class OldLuceneVersions extends Plugin implements IndexStorePlugin, ClusterPlugin, RepositoryPlugin { + + public static final LicensedFeature.Momentary ARCHIVE_FEATURE = LicensedFeature.momentary( + null, + "archive", + License.OperationMode.ENTERPRISE + ); + + public static boolean isArchiveIndex(Version version) { + return version.before(Version.CURRENT.minimumIndexCompatibilityVersion()); + } + + private final SetOnce failShardsListener = new SetOnce<>(); + + @Override + public Collection createComponents( + final Client client, + final ClusterService clusterService, + final ThreadPool threadPool, + final ResourceWatcherService resourceWatcherService, + final ScriptService scriptService, + final NamedXContentRegistry xContentRegistry, + final Environment environment, + final NodeEnvironment nodeEnvironment, + final NamedWriteableRegistry registry, + final IndexNameExpressionResolver resolver, + final Supplier repositoriesServiceSupplier + ) { + this.failShardsListener.set(new FailShardsOnInvalidLicenseClusterListener(getLicenseState(), clusterService.getRerouteService())); + if (DiscoveryNode.isMasterNode(environment.settings())) { + // We periodically look through the indices and identify if there are any archive indices, + // then marking the feature as used. We do this on each master node so that if one master fails, the + // continue reporting usage state. + var usageTracker = new ArchiveUsageTracker(getLicenseState(), clusterService::state); + threadPool.scheduleWithFixedDelay(usageTracker, TimeValue.timeValueMinutes(15), ThreadPool.Names.GENERIC); + } + return List.of(); + } + + // overridable by tests + protected XPackLicenseState getLicenseState() { + return XPackPlugin.getSharedLicenseState(); + } + + @Override + public Collection createAllocationDeciders(Settings settings, ClusterSettings clusterSettings) { + return List.of(new ArchiveAllocationDecider(() -> ARCHIVE_FEATURE.checkWithoutTracking(getLicenseState()))); + } @Override public void onIndexModule(IndexModule indexModule) { - if (indexModule.indexSettings().getIndexVersionCreated().before(Version.CURRENT.minimumIndexCompatibilityVersion())) { + if (isArchiveIndex(indexModule.indexSettings().getIndexVersionCreated())) { indexModule.addIndexEventListener(new IndexEventListener() { @Override public void afterFilesRestoredFromRepository(IndexShard indexShard) { convertToNewFormat(indexShard); } }); + + indexModule.addIndexEventListener(failShardsListener.get()); } } + @Override + public Consumer addPreRestoreCheck() { + return indexMetadata -> { + if (isArchiveIndex(indexMetadata.getCreationVersion())) { + if (ARCHIVE_FEATURE.checkWithoutTracking(getLicenseState()) == false) { + throw LicenseUtils.newComplianceException("archive"); + } + } + }; + } + /** * The trick used to allow newer Lucene versions to read older Lucene indices is to convert the old directory to a directory that new * Lucene versions happily operate on. The way newer Lucene versions happily comply with reading older data is to put in place a From 505b0d9451a05d7dc8b9572b7a387f78b53ddb3f Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Tue, 15 Feb 2022 08:43:50 +0100 Subject: [PATCH 02/37] refactor health service creation (#83831) This commit extracts health service creation into a separate method in order to reduce overall service wiring method complexity --- .../java/org/elasticsearch/node/Node.java | 35 ++++++++++--------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index e86ebc4494b11..e2939d47bcebe 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -96,7 +96,6 @@ import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.gateway.MetaStateService; import org.elasticsearch.gateway.PersistedClusterStateService; -import org.elasticsearch.health.HealthIndicatorService; import org.elasticsearch.health.HealthService; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.index.IndexSettingProviders; @@ -865,7 +864,7 @@ protected Node( metadataCreateIndexService, settingsModule.getIndexScopedSettings() ); - final List> builtinTaskExecutors = Arrays.asList(systemIndexMigrationExecutor); + final List> builtinTaskExecutors = List.of(systemIndexMigrationExecutor); final List> pluginTaskExectors = pluginsService.filterPlugins(PersistentTaskPlugin.class) .stream() .map( @@ -879,10 +878,9 @@ protected Node( ) .flatMap(List::stream) .collect(toList()); - final List> allTasksExectors = Stream.of(pluginTaskExectors, builtinTaskExecutors) - .flatMap(List::stream) - .collect(toList()); - final PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry(allTasksExectors); + final PersistentTasksExecutorRegistry registry = new PersistentTasksExecutorRegistry( + concatLists(pluginTaskExectors, builtinTaskExecutors) + ); final PersistentTasksClusterService persistentTasksClusterService = new PersistentTasksClusterService( settings, registry, @@ -901,15 +899,7 @@ protected Node( clusterService.getClusterSettings() ); - List serverHealthIndicatorServices = List.of( - new InstanceHasMasterHealthIndicatorService(clusterService), - new RepositoryIntegrityHealthIndicatorService(clusterService) - ); - List pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) - .stream() - .flatMap(plugin -> plugin.getHealthIndicatorServices().stream()) - .toList(); - HealthService healthService = new HealthService(concatLists(serverHealthIndicatorServices, pluginHealthIndicatorServices)); + HealthService healthService = createHealthService(clusterService); modules.add(b -> { b.bind(Node.class).toInstance(this); @@ -1042,6 +1032,18 @@ protected Node( } } + private HealthService createHealthService(ClusterService clusterService) { + var serverHealthIndicatorServices = List.of( + new InstanceHasMasterHealthIndicatorService(clusterService), + new RepositoryIntegrityHealthIndicatorService(clusterService) + ); + var pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) + .stream() + .flatMap(plugin -> plugin.getHealthIndicatorServices().stream()) + .toList(); + return new HealthService(concatLists(serverHealthIndicatorServices, pluginHealthIndicatorServices)); + } + private RecoveryPlannerService getRecoveryPlannerService( ThreadPool threadPool, ClusterService clusterService, @@ -1062,8 +1064,7 @@ private RecoveryPlannerService getRecoveryPlannerService( threadPool, clusterService ); - final RecoveryPlannerPlugin recoveryPlannerPlugin = recoveryPlannerPlugins.get(0); - return recoveryPlannerPlugin.createRecoveryPlannerService(shardSnapshotsService); + return recoveryPlannerPlugins.get(0).createRecoveryPlannerService(shardSnapshotsService); } protected TransportService newTransportService( From 5d8421744aaafa1c9f56ef509b35523db6817997 Mon Sep 17 00:00:00 2001 From: Quentin Pradet Date: Tue, 15 Feb 2022 13:00:52 +0400 Subject: [PATCH 03/37] Fix link to benchmark page (#83887) --- benchmarks/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/benchmarks/README.md b/benchmarks/README.md index a4d238c343c1c..9e86dd49b60f6 100644 --- a/benchmarks/README.md +++ b/benchmarks/README.md @@ -5,7 +5,7 @@ This directory contains the microbenchmark suite of Elasticsearch. It relies on ## Purpose We do not want to microbenchmark everything but the kitchen sink and should typically rely on our -[macrobenchmarks](https://elasticsearch-benchmarks.elastic.co/app/kibana#/dashboard/Nightly-Benchmark-Overview) with +[macrobenchmarks](https://elasticsearch-benchmarks.elastic.co/) with [Rally](http://github.com/elastic/rally). Microbenchmarks are intended to spot performance regressions in performance-critical components. The microbenchmark suite is also handy for ad-hoc microbenchmarks but please remove them again before merging your PR. From 68a04a36cfa2308302df269f5ed2d0b81cb228da Mon Sep 17 00:00:00 2001 From: Lukas Wegmann Date: Tue, 15 Feb 2022 11:10:55 +0100 Subject: [PATCH 04/37] SQL: Replace scroll cursors with point-in-time and search_after (#83381) Resolves #61873 The goal of this PR is to remove the use of the deprecated scroll cursors in SQL. Functionality and APIs should remain the same with one notable difference: The last page of a search hit query used to always include a scroll cursor if it is non-empty. This is no longer the case, if a result set is exhausted, the PIT will be closed and the last page does not include a cursor. Note, PIT can also be used for aggregation and PIVOT queries but this is not in the scope of this PR and will be implemented in a follow up. Additionally, this PR resolves #80523 because the total doc count is no longer required. --- docs/changelog/83381.yaml | 7 + x-pack/plugin/build.gradle | 55 +++-- .../sql/qa/jdbc/JdbcIntegrationTestCase.java | 2 +- .../xpack/sql/qa/mixed_node/SqlCompatIT.java | 55 ++++- .../sql/qa/security/RestSqlSecurityIT.java | 31 ++- .../sql/qa/jdbc/JdbcIntegrationTestCase.java | 2 +- .../xpack/sql/qa/rest/RestSqlTestCase.java | 39 ++- .../xpack/sql/action/SqlCancellationIT.java | 12 +- .../xpack/sql/execution/PlanExecutor.java | 2 +- .../execution/search/CompositeAggCursor.java | 37 +-- .../xpack/sql/execution/search/Querier.java | 158 +++++++----- .../search/SchemaSearchHitRowSet.java | 6 +- .../sql/execution/search/ScrollCursor.java | 176 -------------- .../sql/execution/search/SearchHitCursor.java | 229 ++++++++++++++++++ .../sql/execution/search/SearchHitRowSet.java | 42 ++-- .../xpack/sql/plugin/TextFormatterCursor.java | 4 +- .../xpack/sql/session/Cursor.java | 2 +- .../xpack/sql/session/Cursors.java | 4 +- .../xpack/sql/session/EmptyCursor.java | 2 +- .../xpack/sql/session/ListCursor.java | 2 +- .../xpack/sql/analysis/CancellationTests.java | 36 ++- .../sql/execution/search/QuerierTests.java | 2 +- ...orTests.java => SearchHitCursorTests.java} | 32 +-- .../xpack/sql/plugin/CursorTests.java | 36 +-- .../resources/rest-api-spec/test/sql/sql.yml | 38 +++ 25 files changed, 585 insertions(+), 426 deletions(-) create mode 100644 docs/changelog/83381.yaml delete mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursor.java create mode 100644 x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java rename x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/{ScrollCursorTests.java => SearchHitCursorTests.java} (72%) diff --git a/docs/changelog/83381.yaml b/docs/changelog/83381.yaml new file mode 100644 index 0000000000000..73066155ac6ba --- /dev/null +++ b/docs/changelog/83381.yaml @@ -0,0 +1,7 @@ +pr: 83381 +summary: Replace scroll cursors with point-in-time and `search_after` +area: SQL +type: enhancement +issues: + - 61873 + - 80523 diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index f79cdc65cdab1..e184c9ffb2567 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -1,8 +1,8 @@ import org.elasticsearch.gradle.Version +import org.elasticsearch.gradle.VersionProperties import org.elasticsearch.gradle.internal.info.BuildParams -import org.elasticsearch.gradle.util.GradleUtils import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.VersionProperties +import org.elasticsearch.gradle.util.GradleUtils apply plugin: 'elasticsearch.internal-yaml-rest-test' apply plugin: 'elasticsearch.yaml-rest-compat-test' @@ -77,16 +77,19 @@ tasks.named("yamlRestTest").configure { } tasks.named("yamlRestTestV7CompatTest").configure { - systemProperty 'tests.rest.blacklist', [ - 'unsigned_long/50_script_values/Scripted sort values', - 'unsigned_long/50_script_values/script_score query', - 'unsigned_long/50_script_values/Script query', - 'data_stream/140_data_stream_aliases/Fix IndexNotFoundException error when handling remove alias action', - ].join(',') + systemProperty 'tests.rest.blacklist', [ + 'unsigned_long/50_script_values/Scripted sort values', + 'unsigned_long/50_script_values/script_score query', + 'unsigned_long/50_script_values/Script query', + 'data_stream/140_data_stream_aliases/Fix IndexNotFoundException error when handling remove alias action', + ].join(',') } -tasks.named("yamlRestTestV7CompatTransform").configure{ task -> - task.skipTest("vectors/10_dense_vector_basic/Deprecated function signature", "to support it, it would require to almost revert back the #48725 and complicate the code" ) +tasks.named("yamlRestTestV7CompatTransform").configure { task -> + task.skipTest( + "vectors/10_dense_vector_basic/Deprecated function signature", + "to support it, it would require to almost revert back the #48725 and complicate the code" + ) task.skipTest("vectors/30_sparse_vector_basic/Cosine Similarity", "not supported for compatibility") task.skipTest("vectors/30_sparse_vector_basic/Deprecated function signature", "not supported for compatibility") task.skipTest("vectors/30_sparse_vector_basic/Dot Product", "not supported for compatibility") @@ -94,26 +97,42 @@ tasks.named("yamlRestTestV7CompatTransform").configure{ task -> task.skipTest("vectors/35_sparse_vector_l1l2/L2 norm", "not supported for compatibility") task.skipTest("vectors/40_sparse_vector_special_cases/Dimensions can be sorted differently", "not supported for compatibility") task.skipTest("vectors/40_sparse_vector_special_cases/Documents missing a vector field", "not supported for compatibility") - task.skipTest("vectors/40_sparse_vector_special_cases/Query vector has different dimensions from documents' vectors", "not supported for compatibility") + task.skipTest( + "vectors/40_sparse_vector_special_cases/Query vector has different dimensions from documents' vectors", + "not supported for compatibility" + ) task.skipTest("vectors/40_sparse_vector_special_cases/Sparse vectors should error with dense vector functions", "not supported for compatibility") task.skipTest("vectors/40_sparse_vector_special_cases/Vectors of different dimensions and data types", "not supported for compatibility") task.skipTest("vectors/50_vector_stats/Usage stats on vector fields", "not supported for compatibility") - task.skipTest("roles/30_prohibited_role_query/Test use prohibited query inside role query", "put role request with a term lookup (deprecated) and type. Requires validation in REST layer") + task.skipTest( + "roles/30_prohibited_role_query/Test use prohibited query inside role query", + "put role request with a term lookup (deprecated) and type. Requires validation in REST layer" + ) task.skipTest("ml/jobs_crud/Test create job with delimited format", "removing undocumented functionality") task.skipTest("ml/datafeeds_crud/Test update datafeed to point to missing job", "behaviour change #44752 - not allowing to update datafeed job_id") - task.skipTest("ml/datafeeds_crud/Test update datafeed to point to different job", "behaviour change #44752 - not allowing to update datafeed job_id") - task.skipTest("ml/datafeeds_crud/Test update datafeed to point to job already attached to another datafeed", "behaviour change #44752 - not allowing to update datafeed job_id") + task.skipTest( + "ml/datafeeds_crud/Test update datafeed to point to different job", + "behaviour change #44752 - not allowing to update datafeed job_id" + ) + task.skipTest( + "ml/datafeeds_crud/Test update datafeed to point to job already attached to another datafeed", + "behaviour change #44752 - not allowing to update datafeed job_id" + ) task.skipTest("rollup/delete_job/Test basic delete_job", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/delete_job/Test delete job twice", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/delete_job/Test delete running job", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/get_jobs/Test basic get_jobs", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/put_job/Test basic put_job", "rollup was an experimental feature, also see #41227") task.skipTest("rollup/start_job/Test start job twice", "rollup was an experimental feature, also see #41227") - task.skipTest("ml/trained_model_cat_apis/Test cat trained models", "A type field was added to cat.ml_trained_models #73660, this is a backwards compatible change. Still this is a cat api, and we don't support them with rest api compatibility. (the test would be very hard to transform too)") + task.skipTest( + "ml/trained_model_cat_apis/Test cat trained models", + "A type field was added to cat.ml_trained_models #73660, this is a backwards compatible change. Still this is a cat api, and we don't support them with rest api compatibility. (the test would be very hard to transform too)" + ) task.skipTest("indices.freeze/30_usage/Usage stats on frozen indices", "#70192 -- the freeze index API is removed from 8.0") task.skipTest("indices.freeze/20_stats/Translog stats on frozen indices", "#70192 -- the freeze index API is removed from 8.0") task.skipTest("indices.freeze/10_basic/Basic", "#70192 -- the freeze index API is removed from 8.0") task.skipTest("indices.freeze/10_basic/Test index options", "#70192 -- the freeze index API is removed from 8.0") + task.skipTest("sql/sql/Paging through results", "scrolling through search hit queries no longer produces empty last page in 8.2") task.skipTest("service_accounts/10_basic/Test get service accounts", "new service accounts are added") task.replaceValueInMatch("_type", "_doc") @@ -121,10 +140,12 @@ tasks.named("yamlRestTestV7CompatTransform").configure{ task -> task.addAllowedWarningRegexForTest("Including \\[accept_enterprise\\] in get license.*", "Installing enterprise license") task.addAllowedWarningRegex("bucket_span .* is not an integral .* of the number of seconds in 1d.* This is now deprecated.*") - task.replaceValueTextByKeyValue("catch", + task.replaceValueTextByKeyValue( + "catch", 'bad_request', '/It is no longer possible to freeze indices, but existing frozen indices can still be unfrozen/', - "Cannot freeze write index for data stream") + "Cannot freeze write index for data stream" + ) } diff --git a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java index 34d91f69d8fd4..16bd33ca31d74 100644 --- a/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java +++ b/x-pack/plugin/sql/qa/jdbc/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java @@ -31,7 +31,7 @@ public abstract class JdbcIntegrationTestCase extends ESRestTestCase { @After public void checkSearchContent() throws IOException { - // Some context might linger due to fire and forget nature of scroll cleanup + // Some context might linger due to fire and forget nature of PIT cleanup assertNoSearchContexts(); } diff --git a/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java b/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java index 3cc5383defed8..5d93943f4223a 100644 --- a/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java +++ b/x-pack/plugin/sql/qa/mixed-node/src/test/java/org/elasticsearch/xpack/sql/qa/mixed_node/SqlCompatIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentHelper; @@ -21,6 +22,7 @@ import org.elasticsearch.xpack.ql.TestNode; import org.elasticsearch.xpack.ql.TestNodes; import org.elasticsearch.xpack.sql.qa.rest.BaseRestSqlTestCase; +import org.hamcrest.Matchers; import org.junit.AfterClass; import org.junit.Before; @@ -111,8 +113,7 @@ private void testNullsOrderWithMissingOrderSupport(RestClient client) throws IOE assertNull(result.get(2)); } - @SuppressWarnings("unchecked") - private List runOrderByNullsLastQuery(RestClient queryClient) throws IOException { + private void indexDocs() throws IOException { Request putIndex = new Request("PUT", "/test"); putIndex.setJsonEntity(""" {"settings":{"index":{"number_of_shards":3}}}"""); @@ -124,17 +125,19 @@ private List runOrderByNullsLastQuery(RestClient queryClient) throws IO for (String doc : Arrays.asList("{\"int\":1,\"kw\":\"foo\"}", "{\"int\":2,\"kw\":\"bar\"}", "{\"kw\":\"bar\"}")) { bulk.append("{\"index\":{}}\n").append(doc).append("\n"); } + indexDocs.setJsonEntity(bulk.toString()); client().performRequest(indexDocs); + } + + @SuppressWarnings("unchecked") + private List runOrderByNullsLastQuery(RestClient queryClient) throws IOException { + indexDocs(); Request query = new Request("POST", "_sql"); query.setJsonEntity(sqlQueryEntityWithOptionalMode("SELECT int FROM test GROUP BY 1 ORDER BY 1 NULLS LAST", bwcVersion)); - Response queryResponse = queryClient.performRequest(query); - - assertEquals(200, queryResponse.getStatusLine().getStatusCode()); + Map result = performRequestAndReadBodyAsJson(queryClient, query); - InputStream content = queryResponse.getEntity().getContent(); - Map result = XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); List> rows = (List>) result.get("rows"); return rows.stream().map(row -> (Integer) row.get(0)).collect(Collectors.toList()); } @@ -156,4 +159,42 @@ public static String sqlQueryEntityWithOptionalMode(String query, Version bwcVer return Strings.toString(json); } + public void testCursorFromOldNodeFailsOnNewNode() throws IOException { + assertCursorNotCompatibleAcrossVersions(bwcVersion, oldNodesClient, Version.CURRENT, newNodesClient); + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/83726") + public void testCursorFromNewNodeFailsOnOldNode() throws IOException { + assertCursorNotCompatibleAcrossVersions(Version.CURRENT, newNodesClient, bwcVersion, oldNodesClient); + } + + private void assertCursorNotCompatibleAcrossVersions(Version version1, RestClient client1, Version version2, RestClient client2) + throws IOException { + indexDocs(); + + Request req = new Request("POST", "_sql"); + // GROUP BY queries always return a cursor + req.setJsonEntity(sqlQueryEntityWithOptionalMode("SELECT int FROM test GROUP BY 1", bwcVersion)); + Map json = performRequestAndReadBodyAsJson(client1, req); + String cursor = (String) json.get("cursor"); + assertThat(cursor, Matchers.not(Matchers.emptyString())); + + Request scrollReq = new Request("POST", "_sql"); + scrollReq.setJsonEntity("{\"cursor\": \"%s\"}".formatted(cursor)); + ResponseException exception = expectThrows(ResponseException.class, () -> client2.performRequest(scrollReq)); + + assertThat( + exception.getMessage(), + Matchers.containsString("Unsupported cursor version [" + version1 + "], expected [" + version2 + "]") + ); + } + + private Map performRequestAndReadBodyAsJson(RestClient client, Request request) throws IOException { + Response response = client.performRequest(request); + assertEquals(200, response.getStatusLine().getStatusCode()); + try (InputStream content = response.getEntity().getContent()) { + return XContentHelper.convertToMap(JsonXContent.jsonXContent, content, false); + } + } + } diff --git a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java index b27d6c2138a01..ab7b594e10b21 100644 --- a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java +++ b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/RestSqlSecurityIT.java @@ -281,18 +281,27 @@ protected AuditLogAsserter createAuditLogAsserter() { } /** - * Test the hijacking a scroll fails. This test is only implemented for - * REST because it is the only API where it is simple to hijack a scroll. + * Test the hijacking a cursor fails. This test is only implemented for + * REST because it is the only API where it is simple to hijack a cursor. * It should exercise the same code as the other APIs but if we were truly * paranoid we'd hack together something to test the others as well. */ - public void testHijackScrollFails() throws Exception { - createUser("full_access", "rest_minimal"); + public void testHijackCursorFails() throws Exception { + createUser("no_read", "read_nothing"); final String mode = randomMode(); + final String query = randomFrom( + List.of( + "SELECT * FROM test", + "SELECT a FROM test GROUP BY a", + "SELECT MAX(a) FROM test GROUP BY a ORDER BY 1", + "SHOW COLUMNS IN test" + ) + ); + Map adminResponse = RestActions.runSql( null, - new StringEntity(query("SELECT * FROM test").mode(mode).fetchSize(1).toString(), ContentType.APPLICATION_JSON), + new StringEntity(query(query).mode(mode).fetchSize(1).toString(), ContentType.APPLICATION_JSON), mode, false ); @@ -303,20 +312,18 @@ public void testHijackScrollFails() throws Exception { ResponseException e = expectThrows( ResponseException.class, () -> RestActions.runSql( - "full_access", + "no_read", new StringEntity(cursor(cursor).mode(mode).toString(), ContentType.APPLICATION_JSON), mode, false ) ); - // TODO return a better error message for bad scrolls - assertThat(e.getMessage(), containsString("No search context found for id")); - assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); + + assertThat(e.getMessage(), containsString("is unauthorized for user")); + assertEquals(403, e.getResponse().getStatusLine().getStatusCode()); createAuditLogAsserter().expectSqlCompositeActionFieldCaps("test_admin", "test") - .expect(true, SQL_ACTION_NAME, "full_access", empty()) - // one scroll access denied per shard - .expect("access_denied", SQL_ACTION_NAME, "full_access", "default_native", empty(), "InternalScrollSearchRequest") + .expect("access_denied", SQL_ACTION_NAME, "no_read", "default_native", empty(), "SqlQueryRequest") .assertLogs(); } diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java index 900e257e9d56f..2f7bcb71a79f4 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/jdbc/JdbcIntegrationTestCase.java @@ -31,7 +31,7 @@ public abstract class JdbcIntegrationTestCase extends RemoteClusterAwareSqlRestT @After public void checkSearchContent() throws Exception { - // Some context might linger due to fire and forget nature of scroll cleanup + // Some context might linger due to fire and forget nature of PIT cleanup assertNoSearchContexts(provisioningClient()); } diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java index 6879422786822..8a236202fbec7 100644 --- a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/rest/RestSqlTestCase.java @@ -254,6 +254,7 @@ public void testNextPageWithDatetimeAndTimezoneParam() throws IOException { expected.put("columns", singletonList(columnInfo(mode, "tz", "integer", JDBCType.INTEGER, 11))); response = runSql(new StringEntity(sqlRequest, ContentType.APPLICATION_JSON), "", mode); } else { + assertNotNull(cursor); response = runSql( new StringEntity(cursor(cursor).mode(mode).toString(), ContentType.APPLICATION_JSON), StringUtils.EMPTY, @@ -270,16 +271,12 @@ public void testNextPageWithDatetimeAndTimezoneParam() throws IOException { ); } expected.put("rows", values); + assertTrue(response.containsKey("cursor") == false || response.get("cursor") != null); cursor = (String) response.remove("cursor"); assertResponse(expected, response); - assertNotNull(cursor); } - Map expected = new HashMap<>(); - expected.put("rows", emptyList()); - assertResponse( - expected, - runSql(new StringEntity(cursor(cursor).mode(mode).toString(), ContentType.APPLICATION_JSON), StringUtils.EMPTY, mode) - ); + + assertNull(cursor); deleteIndex("test_date_timezone"); } @@ -1182,7 +1179,7 @@ private void executeQueryWithNextPage(String format, String expectedHeader, Stri .toString(); String cursor = null; - for (int i = 0; i < 20; i += 2) { + for (int i = 0; i <= 20; i += 2) { Tuple response; if (i == 0) { response = runSqlAsText(StringUtils.EMPTY, new StringEntity(request, ContentType.APPLICATION_JSON), format); @@ -1201,25 +1198,17 @@ private void executeQueryWithNextPage(String format, String expectedHeader, Stri expected.append("---------------+---------------+---------------\n"); } } - expected.append(String.format(Locale.ROOT, expectedLineFormat, "text" + i, i, i + 5)); - expected.append(String.format(Locale.ROOT, expectedLineFormat, "text" + (i + 1), i + 1, i + 6)); + cursor = response.v2(); - assertEquals(expected.toString(), response.v1()); - assertNotNull(cursor); + if (i < 20) { + expected.append(String.format(Locale.ROOT, expectedLineFormat, "text" + i, i, i + 5)); + expected.append(String.format(Locale.ROOT, expectedLineFormat, "text" + (i + 1), i + 1, i + 6)); + assertEquals(expected.toString(), response.v1()); + assertNotNull(cursor); + } else { + assertNull(cursor); + } } - Map expected = new HashMap<>(); - expected.put("rows", emptyList()); - assertResponse( - expected, - runSql(new StringEntity(cursor(cursor).toString(), ContentType.APPLICATION_JSON), StringUtils.EMPTY, Mode.PLAIN.toString()) - ); - - Map response = runSql( - new StringEntity(cursor(cursor).toString(), ContentType.APPLICATION_JSON), - "/close", - Mode.PLAIN.toString() - ); - assertEquals(true, response.get("succeeded")); assertEquals(0, getNumberOfSearchContexts(provisioningClient(), "test")); } diff --git a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java index 51cbe5f4ca215..1ef55fc6d911e 100644 --- a/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java +++ b/x-pack/plugin/sql/src/internalClusterTest/java/org/elasticsearch/xpack/sql/action/SqlCancellationIT.java @@ -9,7 +9,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.index.IndexRequestBuilder; -import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; import org.junit.After; @@ -24,7 +23,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.greaterThan; public class SqlCancellationIT extends AbstractSqlBlockingIntegTestCase { @@ -91,18 +90,13 @@ public void testCancellation() throws Exception { disableBlocks(plugins); Exception exception = expectThrows(Exception.class, future::get); - Throwable inner = ExceptionsHelper.unwrap(exception, SearchPhaseExecutionException.class); + assertNotNull(ExceptionsHelper.unwrap(exception, TaskCancelledException.class)); if (cancelDuringSearch) { // Make sure we cancelled inside search - assertNotNull(inner); - assertThat(inner, instanceOf(SearchPhaseExecutionException.class)); - assertThat(inner.getCause(), instanceOf(TaskCancelledException.class)); + assertThat(getNumberOfContexts(plugins), greaterThan(0)); } else { // Make sure we were not cancelled inside search - assertNull(inner); assertThat(getNumberOfContexts(plugins), equalTo(0)); - Throwable cancellationException = ExceptionsHelper.unwrap(exception, TaskCancelledException.class); - assertNotNull(cancellationException); } } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java index c8f8ebbf268ab..19a3a8d18bee0 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/PlanExecutor.java @@ -124,7 +124,7 @@ public void nextPageInternal(SqlConfiguration cfg, Cursor cursor, ActionListener } public void cleanCursor(Cursor cursor, ActionListener listener) { - cursor.clear(client, listener); + cursor.clear(client, writableRegistry, listener); } public Client client() { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java index 3c51a495f3568..5fd156a8a8b84 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/CompositeAggCursor.java @@ -12,9 +12,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -41,6 +38,11 @@ import java.util.function.BiFunction; import java.util.function.Supplier; +import static org.elasticsearch.xpack.sql.execution.search.Querier.deserializeQuery; +import static org.elasticsearch.xpack.sql.execution.search.Querier.logSearchResponse; +import static org.elasticsearch.xpack.sql.execution.search.Querier.prepareRequest; +import static org.elasticsearch.xpack.sql.execution.search.Querier.serializeQuery; + /** * Cursor for composite aggregation (GROUP BY). * Stores the query that gets updated/slides across requests. @@ -132,7 +134,7 @@ public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry log.trace("About to execute composite query {} on {}", StringUtils.toString(query), indices); } - SearchRequest request = Querier.prepareRequest(query, cfg.requestTimeout(), includeFrozen, indices); + SearchRequest request = prepareRequest(query, cfg.requestTimeout(), includeFrozen, indices); client.search(request, new ActionListener.Delegating<>(listener) { @Override @@ -169,7 +171,7 @@ static void handle( ) { if (log.isTraceEnabled()) { - Querier.logSearchResponse(response, log); + logSearchResponse(response, log); } // there are some results if (response.getAggregations().asList().isEmpty() == false) { @@ -244,31 +246,8 @@ private static void updateSourceAfterKey(Map afterKey, SearchSou } } - /** - * Deserializes the search source from a byte array. - */ - private static SearchSourceBuilder deserializeQuery(NamedWriteableRegistry registry, byte[] source) throws IOException { - try (NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(source), registry)) { - return new SearchSourceBuilder(in); - } - } - - /** - * Serializes the search source to a byte array. - */ - private static byte[] serializeQuery(SearchSourceBuilder source) throws IOException { - if (source == null) { - return new byte[0]; - } - - try (BytesStreamOutput out = new BytesStreamOutput()) { - source.writeTo(out); - return BytesReference.toBytes(out.bytes()); - } - } - @Override - public void clear(Client client, ActionListener listener) { + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { listener.onResponse(true); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index dbdb23b30d914..298e758a6ff50 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -10,12 +10,20 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.PriorityQueue; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.ClosePointInTimeAction; +import org.elasticsearch.action.search.ClosePointInTimeRequest; +import org.elasticsearch.action.search.OpenPointInTimeAction; +import org.elasticsearch.action.search.OpenPointInTimeRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.aggregations.Aggregation; @@ -23,6 +31,7 @@ import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation.Bucket; import org.elasticsearch.search.aggregations.bucket.filter.Filters; +import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.xcontent.XContentBuilder; @@ -122,27 +131,64 @@ public void query(List output, QueryContainer query, String index, Ac List> sortingColumns = query.sortingColumns(); listener = sortingColumns.isEmpty() ? listener : new LocalAggregationSorterListener(listener, sortingColumns, query.limit()); - ActionListener l = null; - if (query.isAggsOnly()) { + if (cfg.task() != null && cfg.task().isCancelled()) { + listener.onFailure(new TaskCancelledException("cancelled")); + } else if (query.isAggsOnly()) { + ActionListener l; if (query.aggs().useImplicitGroupBy()) { l = new ImplicitGroupActionListener(listener, client, cfg, output, query, search); } else { l = new CompositeActionListener(listener, client, cfg, output, query, search); } + client.search(search, l); } else { - search.scroll(cfg.pageTimeout()); - l = new ScrollActionListener(listener, client, cfg, output, query); + searchWithPointInTime(search, new SearchHitActionListener(listener, client, cfg, output, query, sourceBuilder)); } + } - if (cfg.task() != null && cfg.task().isCancelled()) { - listener.onFailure(new TaskCancelledException("cancelled")); - return; + private void searchWithPointInTime(SearchRequest search, ActionListener listener) { + final OpenPointInTimeRequest openPitRequest = new OpenPointInTimeRequest(search.indices()).indicesOptions(search.indicesOptions()) + .keepAlive(cfg.pageTimeout()); + + client.execute(OpenPointInTimeAction.INSTANCE, openPitRequest, wrap(openPointInTimeResponse -> { + String pitId = openPointInTimeResponse.getPointInTimeId(); + search.indices(Strings.EMPTY_ARRAY); + search.source().pointInTimeBuilder(new PointInTimeBuilder(pitId)); + ActionListener closePitOnErrorListener = wrap(searchResponse -> { + try { + listener.onResponse(searchResponse); + } catch (Exception e) { + closePointInTimeAfterError(client, pitId, e, listener); + } + }, searchError -> closePointInTimeAfterError(client, pitId, searchError, listener)); + client.search(search, closePitOnErrorListener); + }, listener::onFailure)); + } + + private static void closePointInTimeAfterError(Client client, String pointInTimeId, Exception e, ActionListener listener) { + closePointInTime(client, pointInTimeId, wrap(r -> listener.onFailure(e), closeError -> { + e.addSuppressed(closeError); + listener.onFailure(e); + })); + } + + public static void closePointInTime(Client client, String pointInTimeId, ActionListener listener) { + if (pointInTimeId != null) { + // request should not be made with the parent task assigned because the parent task might already be canceled + client = client instanceof ParentTaskAssigningClient wrapperClient ? wrapperClient.unwrap() : client; + + client.execute( + ClosePointInTimeAction.INSTANCE, + new ClosePointInTimeRequest(pointInTimeId), + wrap(clearPointInTimeResponse -> listener.onResponse(clearPointInTimeResponse.isSucceeded()), listener::onFailure) + ); + } else { + listener.onResponse(true); } - client.search(search, l); } - public static SearchRequest prepareRequest(SearchSourceBuilder source, TimeValue timeout, boolean includeFrozen, String... indices) { - source.timeout(timeout); + public static SearchRequest prepareRequest(SearchSourceBuilder source, TimeValue timeOut, boolean includeFrozen, String... indices) { + source.timeout(timeOut); SearchRequest searchRequest = new SearchRequest(INTRODUCING_UNSIGNED_LONG); searchRequest.indices(indices); @@ -181,6 +227,29 @@ protected static void logSearchResponse(SearchResponse response, Logger logger) ); } + /** + * Deserializes the search source from a byte array. + */ + public static SearchSourceBuilder deserializeQuery(NamedWriteableRegistry registry, byte[] source) throws IOException { + try (NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(source), registry)) { + return new SearchSourceBuilder(in); + } + } + + /** + * Serializes the search source to a byte array. + */ + public static byte[] serializeQuery(SearchSourceBuilder source) throws IOException { + if (source == null) { + return new byte[0]; + } + + try (BytesStreamOutput out = new BytesStreamOutput()) { + source.writeTo(out); + return BytesReference.toBytes(out.bytes()); + } + } + /** * Listener used for local sorting (typically due to aggregations used inside `ORDER BY`). * @@ -504,24 +573,27 @@ private BucketExtractor createExtractor(FieldExtraction ref, BucketExtractor tot } /** - * Dedicated listener for column retrieval/non-grouped queries (scrolls). + * Dedicated listener for column retrieval/non-grouped queries (search hits). */ - static class ScrollActionListener extends BaseActionListener { + static class SearchHitActionListener extends BaseActionListener { private final QueryContainer query; private final BitSet mask; private final boolean multiValueFieldLeniency; + private final SearchSourceBuilder source; - ScrollActionListener( + SearchHitActionListener( ActionListener listener, Client client, SqlConfiguration cfg, List output, - QueryContainer query + QueryContainer query, + SearchSourceBuilder source ) { super(listener, client, cfg, output); this.query = query; this.mask = query.columnMask(output); this.multiValueFieldLeniency = cfg.multiValueFieldLeniency(); + this.source = source; } @Override @@ -534,12 +606,13 @@ protected void handleResponse(SearchResponse response, ActionListener list exts.add(createExtractor(ref.extraction())); } - ScrollCursor.handle( + SearchHitCursor.handle( + client, response, - () -> new SchemaSearchHitRowSet(schema, exts, mask, query.limit(), response), - p -> listener.onResponse(p), - p -> clear(response.getScrollId(), wrap(success -> listener.onResponse(p), listener::onFailure)), - schema + source, + () -> new SchemaSearchHitRowSet(schema, exts, mask, source.size(), query.limit(), response), + listener, + query.shouldIncludeFrozen() ); } @@ -579,7 +652,7 @@ private HitExtractor createExtractor(FieldExtraction ref) { /** * Base listener class providing clean-up and exception handling. - * Handles both scroll queries (scan/scroll) and regular/composite-aggs queries. + * Handles both search hits and composite-aggs queries. */ abstract static class BaseActionListener extends ActionListener.Delegating { @@ -595,52 +668,13 @@ abstract static class BaseActionListener extends ActionListener.Delegating cleanup(response, e))); - } - } catch (Exception ex) { - cleanup(response, ex); - } + handleResponse(response, delegate); } protected abstract void handleResponse(SearchResponse response, ActionListener listener); - // clean-up the scroll in case of exception - protected final void cleanup(SearchResponse response, Exception ex) { - if (response != null && response.getScrollId() != null) { - client.prepareClearScroll() - .addScrollId(response.getScrollId()) - // in case of failure, report the initial exception instead of the one resulting from cleaning the scroll - .execute(ActionListener.wrap(r -> delegate.onFailure(ex), e -> { - ex.addSuppressed(e); - delegate.onFailure(ex); - })); - } else { - delegate.onFailure(ex); - } - } - - protected final void clear(String scrollId, ActionListener listener) { - if (scrollId != null) { - client.prepareClearScroll() - .addScrollId(scrollId) - .execute( - ActionListener.wrap( - clearScrollResponse -> listener.onResponse(clearScrollResponse.isSucceeded()), - listener::onFailure - ) - ); - } else { - listener.onResponse(false); - } - } } @SuppressWarnings("rawtypes") diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaSearchHitRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaSearchHitRowSet.java index ea6131c564bf3..67712658529fb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaSearchHitRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SchemaSearchHitRowSet.java @@ -15,15 +15,15 @@ import java.util.List; /** - * Initial results from a scroll search. Distinct from the following pages + * Initial results from a search hit search. Distinct from the following pages * because it has a {@link Schema} available. See {@link SearchHitRowSet} * for the next pages. */ class SchemaSearchHitRowSet extends SearchHitRowSet implements SchemaRowSet { private final Schema schema; - SchemaSearchHitRowSet(Schema schema, List exts, BitSet mask, int limitHits, SearchResponse response) { - super(exts, mask, limitHits, response); + SchemaSearchHitRowSet(Schema schema, List exts, BitSet mask, int sizeRequested, int limitHits, SearchResponse response) { + super(exts, mask, sizeRequested, limitHits, response); this.schema = schema; } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursor.java deleted file mode 100644 index e240ca06375d7..0000000000000 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursor.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.sql.execution.search; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.search.ClearScrollRequest; -import org.elasticsearch.action.search.ClearScrollResponse; -import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchScrollRequest; -import org.elasticsearch.client.internal.Client; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.core.Tuple; -import org.elasticsearch.search.SearchHit; -import org.elasticsearch.xpack.ql.execution.search.extractor.HitExtractor; -import org.elasticsearch.xpack.ql.type.Schema; -import org.elasticsearch.xpack.sql.session.Cursor; -import org.elasticsearch.xpack.sql.session.Rows; -import org.elasticsearch.xpack.sql.session.SqlConfiguration; - -import java.io.IOException; -import java.util.BitSet; -import java.util.List; -import java.util.Objects; -import java.util.function.Consumer; -import java.util.function.Supplier; - -import static org.elasticsearch.action.ActionListener.wrap; - -public class ScrollCursor implements Cursor { - - private static final Logger log = LogManager.getLogger(ScrollCursor.class); - - public static final String NAME = "s"; - - private final String scrollId; - private final List extractors; - private final BitSet mask; - private final int limit; - - public ScrollCursor(String scrollId, List extractors, BitSet mask, int limit) { - this.scrollId = scrollId; - this.extractors = extractors; - this.mask = mask; - this.limit = limit; - } - - public ScrollCursor(StreamInput in) throws IOException { - scrollId = in.readString(); - limit = in.readVInt(); - - extractors = in.readNamedWriteableList(HitExtractor.class); - mask = BitSet.valueOf(in.readByteArray()); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(scrollId); - out.writeVInt(limit); - - out.writeNamedWriteableList(extractors); - out.writeByteArray(mask.toByteArray()); - } - - @Override - public String getWriteableName() { - return NAME; - } - - String scrollId() { - return scrollId; - } - - BitSet mask() { - return mask; - } - - List extractors() { - return extractors; - } - - int limit() { - return limit; - } - - @Override - public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry registry, ActionListener listener) { - if (log.isTraceEnabled()) { - log.trace("About to execute scroll query {}", scrollId); - } - - SearchScrollRequest request = new SearchScrollRequest(scrollId).scroll(cfg.pageTimeout()); - client.searchScroll(request, wrap(response -> { - handle( - response, - () -> new SearchHitRowSet(extractors, mask, limit, response), - p -> listener.onResponse(p), - p -> clear(client, wrap(success -> listener.onResponse(p), listener::onFailure)), - Schema.EMPTY - ); - }, listener::onFailure)); - } - - @Override - public void clear(Client client, ActionListener listener) { - cleanCursor( - client, - scrollId, - wrap(clearScrollResponse -> listener.onResponse(clearScrollResponse.isSucceeded()), listener::onFailure) - ); - } - - static void handle( - SearchResponse response, - Supplier makeRowHit, - Consumer onPage, - Consumer clearScroll, - Schema schema - ) { - if (log.isTraceEnabled()) { - Querier.logSearchResponse(response, log); - } - SearchHit[] hits = response.getHits().getHits(); - // clean-up - if (hits.length > 0) { - SearchHitRowSet rowSet = makeRowHit.get(); - Tuple nextScrollData = rowSet.nextScrollData(); - - if (nextScrollData == null) { - // no more data, let's clean the scroll before continuing - clearScroll.accept(Page.last(rowSet)); - } else { - Cursor next = new ScrollCursor(nextScrollData.v1(), rowSet.extractors(), rowSet.mask(), nextScrollData.v2()); - onPage.accept(new Page(rowSet, next)); - } - } - // no-hits - else { - clearScroll.accept(Page.last(Rows.empty(schema))); - } - } - - @Override - public boolean equals(Object obj) { - if (obj == null || obj.getClass() != getClass()) { - return false; - } - ScrollCursor other = (ScrollCursor) obj; - return Objects.equals(scrollId, other.scrollId) - && Objects.equals(extractors, other.extractors) - && Objects.equals(limit, other.limit); - } - - @Override - public int hashCode() { - return Objects.hash(scrollId, extractors, limit); - } - - @Override - public String toString() { - return "cursor for scroll [" + scrollId + "]"; - } - - public static void cleanCursor(Client client, String scrollId, ActionListener listener) { - ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); - clearScrollRequest.addScrollId(scrollId); - client.clearScroll(clearScrollRequest, listener); - } -} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java new file mode 100644 index 0000000000000..5258492a29af1 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursor.java @@ -0,0 +1,229 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.execution.search; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.builder.PointInTimeBuilder; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.xpack.ql.execution.search.extractor.HitExtractor; +import org.elasticsearch.xpack.ql.util.StringUtils; +import org.elasticsearch.xpack.sql.session.Cursor; +import org.elasticsearch.xpack.sql.session.SqlConfiguration; +import org.elasticsearch.xpack.sql.util.Check; + +import java.io.IOException; +import java.util.Arrays; +import java.util.BitSet; +import java.util.List; +import java.util.Objects; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.sql.execution.search.Querier.closePointInTime; +import static org.elasticsearch.xpack.sql.execution.search.Querier.deserializeQuery; +import static org.elasticsearch.xpack.sql.execution.search.Querier.logSearchResponse; +import static org.elasticsearch.xpack.sql.execution.search.Querier.prepareRequest; +import static org.elasticsearch.xpack.sql.execution.search.Querier.serializeQuery; + +public class SearchHitCursor implements Cursor { + + private static final Logger log = LogManager.getLogger(SearchHitCursor.class); + + public static final String NAME = "h"; + + private final byte[] nextQuery; + private final List extractors; + private final BitSet mask; + private final int limit; + private final boolean includeFrozen; + + /** + * @param nextQuery a serialized {@link SearchSourceBuilder} representing the query to fetch the next page. The query is serialized + * because cursors have to be (de)serialized on the transport layer in {@code TextFormat.PLAIN_TEXT.format} which does + * not have all the required {@link NamedWriteable}`s available that is required to deserialize + * {@link SearchSourceBuilder}. As a workaround the deserialization of {@code nextQuery} is deferred until the query is + * needed. + */ + SearchHitCursor(byte[] nextQuery, List exts, BitSet mask, int remainingLimit, boolean includeFrozen) { + this.nextQuery = nextQuery; + this.extractors = exts; + this.mask = mask; + this.limit = remainingLimit; + this.includeFrozen = includeFrozen; + } + + public SearchHitCursor(StreamInput in) throws IOException { + nextQuery = in.readByteArray(); + limit = in.readVInt(); + + extractors = in.readNamedWriteableList(HitExtractor.class); + mask = BitSet.valueOf(in.readByteArray()); + includeFrozen = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeByteArray(nextQuery); + out.writeVInt(limit); + + out.writeNamedWriteableList(extractors); + out.writeByteArray(mask.toByteArray()); + out.writeBoolean(includeFrozen); + } + + @Override + public String getWriteableName() { + return NAME; + } + + byte[] next() { + return nextQuery; + } + + BitSet mask() { + return mask; + } + + List extractors() { + return extractors; + } + + int limit() { + return limit; + } + + boolean includeFrozen() { + return includeFrozen; + } + + @Override + public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry registry, ActionListener listener) { + SearchSourceBuilder q; + try { + q = deserializeQuery(registry, nextQuery); + } catch (Exception ex) { + listener.onFailure(ex); + return; + } + + SearchSourceBuilder query = q; + if (log.isTraceEnabled()) { + log.trace("About to execute search hit query {}", StringUtils.toString(query)); + } + + SearchRequest request = prepareRequest(query, cfg.requestTimeout(), includeFrozen); + + client.search( + request, + ActionListener.wrap( + (SearchResponse response) -> handle( + client, + response, + request.source(), + makeRowSet(query.size(), response), + listener, + includeFrozen + ), + listener::onFailure + ) + ); + } + + private Supplier makeRowSet(int sizeRequested, SearchResponse response) { + return () -> new SearchHitRowSet(extractors, mask, sizeRequested, limit, response); + } + + static void handle( + Client client, + SearchResponse response, + SearchSourceBuilder source, + Supplier makeRowSet, + ActionListener listener, + boolean includeFrozen + ) { + + if (log.isTraceEnabled()) { + logSearchResponse(response, log); + } + + SearchHit[] hits = response.getHits().getHits(); + + SearchHitRowSet rowSet = makeRowSet.get(); + + if (rowSet.hasRemaining() == false) { + closePointInTime( + client, + response.pointInTimeId(), + ActionListener.wrap(r -> listener.onResponse(Page.last(rowSet)), listener::onFailure) + ); + } else { + source.pointInTimeBuilder(new PointInTimeBuilder(response.pointInTimeId())); + updateSearchAfter(hits, source); + + byte[] nextQuery; + try { + nextQuery = serializeQuery(source); + } catch (IOException e) { + listener.onFailure(e); + return; + } + + SearchHitCursor nextCursor = new SearchHitCursor( + nextQuery, + rowSet.extractors(), + rowSet.mask(), + rowSet.getRemainingLimit(), + includeFrozen + ); + listener.onResponse(new Page(rowSet, nextCursor)); + } + } + + private static void updateSearchAfter(SearchHit[] hits, SearchSourceBuilder source) { + SearchHit lastHit = hits[hits.length - 1]; + source.searchAfter(lastHit.getSortValues()); + } + + @Override + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { + SearchSourceBuilder query; + try { + query = deserializeQuery(registry, nextQuery); + } catch (IOException e) { + listener.onFailure(e); + return; + } + Check.isTrue(query.pointInTimeBuilder() != null, "Expected cursor with point-in-time id but got null"); + closePointInTime(client, query.pointInTimeBuilder().getEncodedId(), listener); + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(nextQuery), extractors, limit, mask, includeFrozen); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + SearchHitCursor other = (SearchHitCursor) obj; + return Arrays.equals(nextQuery, other.nextQuery) + && Objects.equals(extractors, other.extractors) + && Objects.equals(limit, other.limit) + && Objects.equals(includeFrozen, other.includeFrozen); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java index 2b453ff827df5..ba6a9854e4254 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/SearchHitRowSet.java @@ -6,9 +6,7 @@ */ package org.elasticsearch.xpack.sql.execution.search; -import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.core.Tuple; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.xpack.ql.execution.search.extractor.HitExtractor; @@ -37,11 +35,11 @@ class SearchHitRowSet extends ResultRowSet { private final int size; private final int[] indexPerLevel; - private final Tuple nextScrollData; + private final int remainingLimit; private int row = 0; - SearchHitRowSet(List exts, BitSet mask, int limit, SearchResponse response) { + SearchHitRowSet(List exts, BitSet mask, int sizeRequested, int limit, SearchResponse response) { super(exts, mask); this.hits = response.getHits().getHits(); @@ -85,30 +83,22 @@ class SearchHitRowSet extends ResultRowSet { indexPerLevel = new int[maxDepth + 1]; this.innerHit = innerHit; - String scrollId = response.getScrollId(); - - if (scrollId == null) { - /* SearchResponse can contain a null scroll when you start a - * scroll but all results fit in the first page. */ - nextScrollData = null; + // compute remaining limit (only if the limit is specified - that is, positive). + int remaining = limit < 0 ? limit : limit - size; + // either the search returned fewer records than requested or the limit is exhausted + if (size < sizeRequested || remaining == 0) { + remainingLimit = 0; } else { - TotalHits totalHits = response.getHits().getTotalHits(); - - // compute remaining limit (only if the limit is specified - that is, positive). - int remainingLimit = limit < 0 ? limit : limit - size; - // if the computed limit is zero, or the size is zero it means either there's nothing left or the limit has been reached - if (size == 0 || remainingLimit == 0 - // or the scroll has ended - || totalHits != null && totalHits.value == hits.length) { - nextScrollData = null; - } else { - nextScrollData = new Tuple<>(scrollId, remainingLimit); - } + remainingLimit = remaining; } } - protected boolean isLimitReached() { - return nextScrollData == null; + public boolean hasRemaining() { + return remainingLimit != 0; + } + + public int getRemainingLimit() { + return remainingLimit; } @Override @@ -218,8 +208,4 @@ protected void doReset() { public int size() { return size; } - - Tuple nextScrollData() { - return nextScrollData; - } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormatterCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormatterCursor.java index b55adc0af34d3..a6ba80e42708b 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormatterCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TextFormatterCursor.java @@ -59,8 +59,8 @@ public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry } @Override - public void clear(Client client, ActionListener listener) { - delegate.clear(client, listener); + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { + delegate.clear(client, registry, listener); } @Override diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursor.java index 5dd5bb203c519..be45132d78314 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursor.java @@ -48,5 +48,5 @@ public static Page last(RowSet rowSet) { /** * Cleans the resources associated with the cursor */ - void clear(Client client, ActionListener listener); + void clear(Client client, NamedWriteableRegistry registry, ActionListener listener); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java index 9384e1b5f989e..b94b60a850dab 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/Cursors.java @@ -16,7 +16,7 @@ import org.elasticsearch.xpack.sql.common.io.SqlStreamOutput; import org.elasticsearch.xpack.sql.execution.search.CompositeAggCursor; import org.elasticsearch.xpack.sql.execution.search.PivotCursor; -import org.elasticsearch.xpack.sql.execution.search.ScrollCursor; +import org.elasticsearch.xpack.sql.execution.search.SearchHitCursor; import org.elasticsearch.xpack.sql.execution.search.extractor.SqlBucketExtractors; import org.elasticsearch.xpack.sql.execution.search.extractor.SqlHitExtractors; import org.elasticsearch.xpack.sql.expression.function.scalar.Processors; @@ -46,7 +46,7 @@ public static List getNamedWriteables() { // cursors entries.add(new NamedWriteableRegistry.Entry(Cursor.class, EmptyCursor.NAME, in -> Cursor.EMPTY)); - entries.add(new NamedWriteableRegistry.Entry(Cursor.class, ScrollCursor.NAME, ScrollCursor::new)); + entries.add(new NamedWriteableRegistry.Entry(Cursor.class, SearchHitCursor.NAME, SearchHitCursor::new)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, CompositeAggCursor.NAME, CompositeAggCursor::new)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, PivotCursor.NAME, PivotCursor::new)); entries.add(new NamedWriteableRegistry.Entry(Cursor.class, TextFormatterCursor.NAME, TextFormatterCursor::new)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/EmptyCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/EmptyCursor.java index e1ed687e6006c..6e10629c5ffc9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/EmptyCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/EmptyCursor.java @@ -38,7 +38,7 @@ public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry } @Override - public void clear(Client client, ActionListener listener) { + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { // There is nothing to clean listener.onResponse(false); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java index fe72838695b78..f4cd86ac1b8dc 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/session/ListCursor.java @@ -87,7 +87,7 @@ public void nextPage(SqlConfiguration cfg, Client client, NamedWriteableRegistry } @Override - public void clear(Client client, ActionListener listener) { + public void clear(Client client, NamedWriteableRegistry registry, ActionListener listener) { listener.onResponse(true); } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java index 0d359480de949..30e1178a20781 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/CancellationTests.java @@ -9,6 +9,11 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.action.search.ClosePointInTimeAction; +import org.elasticsearch.action.search.ClosePointInTimeRequest; +import org.elasticsearch.action.search.ClosePointInTimeResponse; +import org.elasticsearch.action.search.OpenPointInTimeAction; +import org.elasticsearch.action.search.OpenPointInTimeResponse; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequestBuilder; @@ -42,11 +47,13 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.instanceOf; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; @@ -158,6 +165,7 @@ public void testCancellationDuringSearch() throws InterruptedException { ClusterService mockClusterService = mockClusterService(nodeId); String[] indices = new String[] { "endgame" }; + String pitId = randomAlphaOfLength(10); // Emulation of field capabilities FieldCapabilitiesResponse fieldCapabilitiesResponse = mock(FieldCapabilitiesResponse.class); @@ -170,12 +178,21 @@ public void testCancellationDuringSearch() throws InterruptedException { return null; }).when(client).fieldCaps(any(), any()); + // Emulation of open pit + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(new OpenPointInTimeResponse(pitId)); + return null; + }).when(client).execute(eq(OpenPointInTimeAction.INSTANCE), any(), any()); + // Emulation of search cancellation ArgumentCaptor searchRequestCaptor = ArgumentCaptor.forClass(SearchRequest.class); when(client.prepareSearch(any())).thenReturn(new SearchRequestBuilder(client, SearchAction.INSTANCE).setIndices(indices)); doAnswer((Answer) invocation -> { @SuppressWarnings("unchecked") SearchRequest request = (SearchRequest) invocation.getArguments()[1]; + assertEquals(pitId, request.pointInTimeBuilder().getEncodedId()); TaskId parentTask = request.getParentTask(); assertNotNull(parentTask); assertEquals(task.getId(), parentTask.getId()); @@ -184,7 +201,18 @@ public void testCancellationDuringSearch() throws InterruptedException { ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onFailure(new TaskCancelledException("cancelled")); return null; - }).when(client).execute(any(), searchRequestCaptor.capture(), any()); + }).when(client).execute(eq(SearchAction.INSTANCE), searchRequestCaptor.capture(), any()); + + // Emulation of close pit + doAnswer(invocation -> { + ClosePointInTimeRequest request = (ClosePointInTimeRequest) invocation.getArguments()[1]; + assertEquals(pitId, request.getId()); + + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[2]; + listener.onResponse(new ClosePointInTimeResponse(true, 1)); + return null; + }).when(client).execute(eq(ClosePointInTimeAction.INSTANCE), any(), any()); IndexResolver indexResolver = indexResolver(client); PlanExecutor planExecutor = new PlanExecutor(client, indexResolver, new NamedWriteableRegistry(Collections.emptyList())); @@ -204,10 +232,12 @@ public void onFailure(Exception e) { countDownLatch.countDown(); } }, "", mock(TransportService.class), mockClusterService); - countDownLatch.await(); + assertTrue(countDownLatch.await(5, TimeUnit.SECONDS)); // Final verification to ensure no more interaction verify(client).fieldCaps(any(), any()); - verify(client).execute(any(), any(), any()); + verify(client, times(1)).execute(eq(OpenPointInTimeAction.INSTANCE), any(), any()); + verify(client, times(1)).execute(eq(SearchAction.INSTANCE), any(), any()); + verify(client, times(1)).execute(eq(ClosePointInTimeAction.INSTANCE), any(), any()); verify(client, times(1)).settings(); verify(client, times(1)).threadPool(); verifyNoMoreInteractions(client); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java index 44016669595cf..bfe2394b8d822 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/QuerierTests.java @@ -27,7 +27,7 @@ import java.util.concurrent.atomic.AtomicInteger; import static java.util.Collections.emptyList; -import static org.elasticsearch.xpack.sql.execution.search.ScrollCursorTests.randomHitExtractor; +import static org.elasticsearch.xpack.sql.execution.search.SearchHitCursorTests.randomHitExtractor; public class QuerierTests extends ESTestCase { diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursorTests.java similarity index 72% rename from x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursorTests.java rename to x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursorTests.java index f4e19175134fe..e7146e1664c88 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/ScrollCursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/execution/search/SearchHitCursorTests.java @@ -21,18 +21,19 @@ import java.util.List; import java.util.function.Supplier; -public class ScrollCursorTests extends AbstractSqlWireSerializingTestCase { - public static ScrollCursor randomScrollCursor() { +public class SearchHitCursorTests extends AbstractSqlWireSerializingTestCase { + public static SearchHitCursor randomSearchHitCursor() { int extractorsSize = between(1, 20); List extractors = new ArrayList<>(extractorsSize); for (int i = 0; i < extractorsSize; i++) { extractors.add(randomHitExtractor(0)); } - return new ScrollCursor( - randomAlphaOfLength(5), + return new SearchHitCursor( + new byte[randomInt(256)], extractors, CompositeAggregationCursorTests.randomBitSet(extractorsSize), - randomIntBetween(10, 1024) + randomIntBetween(10, 1024), + randomBoolean() ); } @@ -46,12 +47,13 @@ static HitExtractor randomHitExtractor(int depth) { } @Override - protected ScrollCursor mutateInstance(ScrollCursor instance) throws IOException { - return new ScrollCursor( - instance.scrollId(), + protected SearchHitCursor mutateInstance(SearchHitCursor instance) throws IOException { + return new SearchHitCursor( + instance.next(), instance.extractors(), randomValueOtherThan(instance.mask(), () -> CompositeAggregationCursorTests.randomBitSet(instance.extractors().size())), - randomValueOtherThan(instance.limit(), () -> randomIntBetween(1, 1024)) + randomValueOtherThan(instance.limit(), () -> randomIntBetween(1, 1024)), + instance.includeFrozen() == false ); } @@ -61,22 +63,22 @@ protected NamedWriteableRegistry getNamedWriteableRegistry() { } @Override - protected ScrollCursor createTestInstance() { - return randomScrollCursor(); + protected SearchHitCursor createTestInstance() { + return randomSearchHitCursor(); } @Override - protected Reader instanceReader() { - return ScrollCursor::new; + protected Reader instanceReader() { + return SearchHitCursor::new; } @Override - protected ScrollCursor copyInstance(ScrollCursor instance, Version version) throws IOException { + protected SearchHitCursor copyInstance(SearchHitCursor instance, Version version) throws IOException { /* Randomly choose between internal protocol round trip and String based * round trips used to toXContent. */ if (randomBoolean()) { return super.copyInstance(instance, version); } - return (ScrollCursor) CursorTests.decodeFromString(Cursors.encodeToString(instance, randomZone())); + return (SearchHitCursor) CursorTests.decodeFromString(Cursors.encodeToString(instance, randomZone())); } } diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CursorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CursorTests.java index ea5a55b92e8fb..08e0f6fca8912 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CursorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/plugin/CursorTests.java @@ -7,8 +7,6 @@ package org.elasticsearch.xpack.sql.plugin; import org.elasticsearch.Version; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.logging.LoggerMessageFormat; @@ -16,26 +14,21 @@ import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; import org.elasticsearch.xpack.sql.action.BasicFormatter; import org.elasticsearch.xpack.sql.action.SqlQueryResponse; -import org.elasticsearch.xpack.sql.execution.search.ScrollCursor; -import org.elasticsearch.xpack.sql.execution.search.ScrollCursorTests; +import org.elasticsearch.xpack.sql.execution.search.SearchHitCursorTests; import org.elasticsearch.xpack.sql.proto.ColumnInfo; import org.elasticsearch.xpack.sql.proto.Mode; import org.elasticsearch.xpack.sql.session.Cursor; import org.elasticsearch.xpack.sql.session.Cursors; import org.elasticsearch.xpack.sql.session.CursorsTestUtil; -import org.mockito.ArgumentCaptor; import java.util.ArrayList; -import java.util.BitSet; import java.util.Collections; import java.util.List; import java.util.function.Supplier; import static org.elasticsearch.action.support.PlainActionFuture.newFuture; import static org.elasticsearch.xpack.sql.proto.SqlVersion.DATE_NANOS_SUPPORT_VERSION; -import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; public class CursorTests extends ESTestCase { @@ -44,26 +37,11 @@ public void testEmptyCursorClearCursor() { Client clientMock = mock(Client.class); Cursor cursor = Cursor.EMPTY; PlainActionFuture future = newFuture(); - cursor.clear(clientMock, future); + cursor.clear(clientMock, writableRegistry(), future); assertFalse(future.actionGet()); verifyNoMoreInteractions(clientMock); } - @SuppressWarnings("unchecked") - public void testScrollCursorClearCursor() { - Client clientMock = mock(Client.class); - ActionListener listenerMock = mock(ActionListener.class); - String cursorString = randomAlphaOfLength(10); - Cursor cursor = new ScrollCursor(cursorString, Collections.emptyList(), new BitSet(0), randomInt()); - - cursor.clear(clientMock, listenerMock); - - ArgumentCaptor request = ArgumentCaptor.forClass(ClearScrollRequest.class); - verify(clientMock).clearScroll(request.capture(), any(ActionListener.class)); - assertEquals(Collections.singletonList(cursorString), request.getValue().getScrollIds()); - verifyNoMoreInteractions(listenerMock); - } - private static SqlQueryResponse createRandomSqlResponse() { int columnCount = between(1, 10); @@ -79,25 +57,25 @@ private static SqlQueryResponse createRandomSqlResponse() { @SuppressWarnings("unchecked") static Cursor randomNonEmptyCursor() { - Supplier cursorSupplier = randomFrom(() -> ScrollCursorTests.randomScrollCursor(), () -> { + Supplier cursorSupplier = randomFrom(SearchHitCursorTests::randomSearchHitCursor, () -> { SqlQueryResponse response = createRandomSqlResponse(); if (response.columns() != null && response.rows() != null) { return new TextFormatterCursor( - ScrollCursorTests.randomScrollCursor(), + SearchHitCursorTests.randomSearchHitCursor(), new BasicFormatter(response.columns(), response.rows(), BasicFormatter.FormatOption.CLI) ); } else { - return ScrollCursorTests.randomScrollCursor(); + return SearchHitCursorTests.randomSearchHitCursor(); } }, () -> { SqlQueryResponse response = createRandomSqlResponse(); if (response.columns() != null && response.rows() != null) { return new TextFormatterCursor( - ScrollCursorTests.randomScrollCursor(), + SearchHitCursorTests.randomSearchHitCursor(), new BasicFormatter(response.columns(), response.rows(), BasicFormatter.FormatOption.TEXT) ); } else { - return ScrollCursorTests.randomScrollCursor(); + return SearchHitCursorTests.randomSearchHitCursor(); } }); return cursorSupplier.get(); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml index 94de0e786a019..dc09cf91be72b 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/sql/sql.yml @@ -38,6 +38,9 @@ setup: --- "Paging through results": + - skip: + version: "8.1.99 - " + reason: PIT cursors introduced in 8.2 no longer return empty last page - do: sql.query: format: json @@ -73,6 +76,41 @@ setup: - is_false: cursor - length: { rows: 0 } +--- +"Paging through results with PIT cursor": + - skip: + version: " - 8.1.99" + reason: PIT cursors introduced in 8.2 no longer return empty last page + - do: + sql.query: + format: json + body: + query: "SELECT * FROM test ORDER BY int asc" + fetch_size: 2 + - match: { columns.0.name: int } + - match: { columns.1.name: str } + - match: { rows.0.0: 1 } + - match: { rows.0.1: test1 } + - match: { rows.1.0: 2 } + - match: { rows.1.1: test2 } + - is_true: cursor + - set: { cursor: cursor } + + - do: + sql.query: + format: json + body: + cursor: "$cursor" + - match: { rows.0.0: 3 } + - match: { rows.0.1: test3 } + - is_false: columns + - is_false: cursor + + - do: + indices.stats: { index: 'test' } + + - match: { indices.test.total.search.open_contexts: 0 } + --- "Getting textual representation": - do: From 909a52e2ec276d6357b21b1c55ce4f02d0353c2f Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Tue, 15 Feb 2022 11:25:21 +0100 Subject: [PATCH 05/37] Test doc-value-based searches on older indices (#83844) Tests doc-value-based searches against indices from 5.x / 6.x clusters. Relates #81210 --- .../rest/yaml/ESClientYamlSuiteTestCase.java | 6 +- .../qa/repository-old-versions/build.gradle | 18 +- .../oldrepos/DocValueOnlyFieldsIT.java | 222 ++++++++++++++++++ 3 files changed, 244 insertions(+), 2 deletions(-) create mode 100644 x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index e70434f7225e4..a58c4e21e530c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -447,7 +447,7 @@ public void test() throws IOException { inFipsJvm() && testCandidate.getTestSection().getSkipSection().getFeatures().contains("fips_140") ); - if (testCandidate.getSetupSection().isEmpty() == false) { + if (skipSetupSections() == false && testCandidate.getSetupSection().isEmpty() == false) { logger.debug("start setup test [{}]", testCandidate.getTestPath()); for (ExecutableSection executableSection : testCandidate.getSetupSection().getExecutableSections()) { executeSection(executableSection); @@ -470,6 +470,10 @@ public void test() throws IOException { } } + protected boolean skipSetupSections() { + return false; + } + /** * Execute an {@link ExecutableSection}, careful to log its place of origin on failure. */ diff --git a/x-pack/qa/repository-old-versions/build.gradle b/x-pack/qa/repository-old-versions/build.gradle index 54e6958c58ac3..2581a4e5736ce 100644 --- a/x-pack/qa/repository-old-versions/build.gradle +++ b/x-pack/qa/repository-old-versions/build.gradle @@ -18,6 +18,7 @@ import org.gradle.api.internal.artifacts.ArtifactAttributes apply plugin: 'elasticsearch.jdk-download' apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' +apply plugin: 'elasticsearch.rest-resources' configurations { oldesFixture @@ -37,6 +38,15 @@ jdks { } } +restResources { + restApi { + include '_common', 'search' + } + restTests { + includeCore 'search/390_doc_values_search.yml' + } +} + if (Os.isFamily(Os.FAMILY_WINDOWS)) { logger.warn("Disabling repository-old-versions tests because we can't get the pid file on windows") tasks.named("testingConventions").configure { enabled = false } @@ -91,15 +101,21 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { if (Architecture.current() == Architecture.AARCH64) { env 'ES_JAVA_OPTS', '-Xss512k' } + def dataPath = "${baseDir}/data" args 'oldes.OldElasticsearch', baseDir, "${ -> config.getSingleFile().toPath()}", false, - "path.repo: ${repoLocation}" + "path.repo: ${repoLocation}", + "path.data: ${dataPath}" if (version.onOrAfter('6.8.0') && Architecture.current() == Architecture.AARCH64) { // We need to explicitly disable ML when running old ES versions on ARM args 'xpack.ml.enabled: false' } + doFirst { + delete(dataPath) + mkdir(dataPath) + } maxWaitInSeconds 60 waitCondition = { fixture, ant -> // the fixture writes the ports file when Elasticsearch's HTTP service diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java new file mode 100644 index 0000000000000..7df801a174e9d --- /dev/null +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java @@ -0,0 +1,222 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.oldrepos; + +import com.carrotsearch.randomizedtesting.RandomizedTest; +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.http.HttpHost; +import org.elasticsearch.Build; +import org.elasticsearch.Version; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.core.Booleans; +import org.elasticsearch.core.PathUtils; +import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; +import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.junit.Before; + +import java.io.IOException; + +/** + * Tests doc-value-based searches against indices imported from clusters older than N-1. + * We reuse the YAML tests in search/390_doc_values_search.yml but have to do the setup + * manually here as the setup is done on the old cluster for which we have to use the + * low-level REST client instead of the YAML set up that only knows how to talk to + * newer ES versions. + * + * We mimic the setup in search/390_doc_values_search.yml here, but adapt it to work + * against older version clusters. + */ +public class DocValueOnlyFieldsIT extends ESClientYamlSuiteTestCase { + + final Version oldVersion = Version.fromString(System.getProperty("tests.es.version")); + static boolean setupDone; + + public DocValueOnlyFieldsIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { + super(testCandidate); + } + + @ParametersFactory + public static Iterable parameters() throws Exception { + return ESClientYamlSuiteTestCase.createParameters(); + } + + @Override + protected boolean preserveClusterUponCompletion() { + return true; + } + + @Override + protected Settings restClientSettings() { + String token = basicAuthHeaderValue("admin", new SecureString("admin-password".toCharArray())); + return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); + } + + @Override + public void test() throws IOException { + assumeTrue("feature currently only enabled in snapshot builds", Build.CURRENT.isSnapshot()); + super.test(); + } + + @Override + protected boolean skipSetupSections() { + // setup in the YAML file is replaced by the method below + return true; + } + + @Before + public void setupIndex() throws IOException { + assumeTrue("feature currently only enabled in snapshot builds", Build.CURRENT.isSnapshot()); + + final boolean afterRestart = Booleans.parseBoolean(System.getProperty("tests.after_restart")); + if (afterRestart) { + return; + } + + // The following is bit of a hack. While we wish we could make this an @BeforeClass, it does not work because the client() is only + // initialized later, so we do it when running the first test + if (setupDone) { + return; + } + + setupDone = true; + + String repoLocation = PathUtils.get(System.getProperty("tests.repo.location")) + .resolve(RandomizedTest.getContext().getTargetClass().getName()) + .toString(); + + String indexName = "test"; + String repoName = "doc_values_repo"; + String snapshotName = "snap"; + String[] basicTypes = new String[] { + "byte", + "double", + "float", + "half_float", + "integer", + "long", + "short", + "boolean", + "keyword", + "ip", + "geo_point" }; // date is manually added as it need further configuration + + int oldEsPort = Integer.parseInt(System.getProperty("tests.es.port")); + try (RestClient oldEs = RestClient.builder(new HttpHost("127.0.0.1", oldEsPort)).build()) { + Request createIndex = new Request("PUT", "/" + indexName); + int numberOfShards = randomIntBetween(1, 3); + + boolean multiTypes = oldVersion.before(Version.V_7_0_0); + + XContentBuilder settingsBuilder = XContentFactory.jsonBuilder() + .startObject() + .startObject("settings") + .field("index.number_of_shards", numberOfShards) + .endObject() + .startObject("mappings"); + if (multiTypes) { + settingsBuilder.startObject("doc"); + } + settingsBuilder.field("dynamic", false).startObject("properties"); + for (String type : basicTypes) { + settingsBuilder.startObject(type).field("type", type).endObject(); + } + settingsBuilder.startObject("date").field("type", "date").field("format", "yyyy/MM/dd").endObject(); + if (multiTypes) { + settingsBuilder.endObject(); + } + settingsBuilder.endObject().endObject().endObject(); + + createIndex.setJsonEntity(Strings.toString(settingsBuilder)); + assertOK(oldEs.performRequest(createIndex)); + + Request doc1 = new Request("PUT", "/" + indexName + "/" + "doc" + "/" + "1"); + doc1.addParameter("refresh", "true"); + XContentBuilder bodyDoc1 = XContentFactory.jsonBuilder() + .startObject() + .field("byte", 1) + .field("double", 1.0) + .field("float", 1.0) + .field("half_float", 1.0) + .field("integer", 1) + .field("long", 1) + .field("short", 1) + .field("date", "2017/01/01") + .field("keyword", "key1") + .field("boolean", false) + .field("ip", "192.168.0.1") + .array("geo_point", 13.5, 34.89) + .endObject(); + doc1.setJsonEntity(Strings.toString(bodyDoc1)); + assertOK(oldEs.performRequest(doc1)); + + Request doc2 = new Request("PUT", "/" + indexName + "/" + "doc" + "/" + "2"); + doc2.addParameter("refresh", "true"); + XContentBuilder bodyDoc2 = XContentFactory.jsonBuilder() + .startObject() + .field("byte", 2) + .field("double", 2.0) + .field("float", 2.0) + .field("half_float", 2.0) + .field("integer", 2) + .field("long", 2) + .field("short", 2) + .field("date", "2017/01/02") + .field("keyword", "key2") + .field("boolean", true) + .field("ip", "192.168.0.2") + .array("geo_point", -63.24, 31.0) + .endObject(); + doc2.setJsonEntity(Strings.toString(bodyDoc2)); + assertOK(oldEs.performRequest(doc2)); + + // register repo on old ES and take snapshot + Request createRepoRequest = new Request("PUT", "/_snapshot/" + repoName); + createRepoRequest.setJsonEntity(""" + {"type":"fs","settings":{"location":"%s"}} + """.formatted(repoLocation)); + assertOK(oldEs.performRequest(createRepoRequest)); + + Request createSnapshotRequest = new Request("PUT", "/_snapshot/" + repoName + "/" + snapshotName); + createSnapshotRequest.addParameter("wait_for_completion", "true"); + createSnapshotRequest.setJsonEntity("{\"indices\":\"" + indexName + "\"}"); + assertOK(oldEs.performRequest(createSnapshotRequest)); + } + + // register repo on new ES and restore snapshot + Request createRepoRequest2 = new Request("PUT", "/_snapshot/" + repoName); + createRepoRequest2.setJsonEntity(""" + {"type":"fs","settings":{"location":"%s","allow_bwc_indices":true}} + """.formatted(repoLocation)); + assertOK(client().performRequest(createRepoRequest2)); + + final Request createRestoreRequest = new Request("POST", "/_snapshot/" + repoName + "/" + snapshotName + "/_restore"); + createRestoreRequest.addParameter("wait_for_completion", "true"); + createRestoreRequest.setJsonEntity("{\"indices\":\"" + indexName + "\"}"); + assertOK(client().performRequest(createRestoreRequest)); + + // add mappings (they will be auto-converted later) + Request putMappingsRequest = new Request("PUT", "/" + indexName + "/_mappings"); + XContentBuilder mappingsBuilder = XContentFactory.jsonBuilder().startObject().startObject("properties"); + for (String type : basicTypes) { + mappingsBuilder.startObject(type).field("type", type).field("index", false).endObject(); + } + mappingsBuilder.startObject("date").field("type", "date").field("index", false).field("format", "yyyy/MM/dd").endObject(); + mappingsBuilder.endObject().endObject(); + putMappingsRequest.setJsonEntity(Strings.toString(mappingsBuilder)); + assertOK(client().performRequest(putMappingsRequest)); + } +} From ad44b8852feffcfa5fbdc53a2c07563cabde8cd0 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 15 Feb 2022 11:30:32 +0000 Subject: [PATCH 06/37] [ML] Delete ML HLRC client and request/response classes (#83912) --- .../client/MLRequestConverters.java | 907 ----- .../client/MachineLearningClient.java | 2992 ----------------- .../client/RestHighLevelClient.java | 18 - .../InferencePipelineAggregationBuilder.java | 138 - .../client/analytics/ParsedInference.java | 132 - .../client/ml/AbstractResultResponse.java | 49 - .../client/ml/CloseJobRequest.java | 183 - .../client/ml/CloseJobResponse.java | 80 - .../client/ml/DeleteCalendarEventRequest.java | 60 - .../client/ml/DeleteCalendarJobRequest.java | 70 - .../client/ml/DeleteCalendarRequest.java | 48 - .../ml/DeleteDataFrameAnalyticsRequest.java | 83 - .../client/ml/DeleteDatafeedRequest.java | 63 - .../client/ml/DeleteExpiredDataRequest.java | 104 - .../client/ml/DeleteExpiredDataResponse.java | 77 - .../client/ml/DeleteFilterRequest.java | 49 - .../client/ml/DeleteForecastRequest.java | 167 - .../client/ml/DeleteJobRequest.java | 86 - .../client/ml/DeleteJobResponse.java | 104 - .../client/ml/DeleteModelSnapshotRequest.java | 56 - .../ml/DeleteTrainedModelAliasRequest.java | 46 - .../client/ml/DeleteTrainedModelRequest.java | 53 - .../client/ml/EstimateModelMemoryRequest.java | 99 - .../ml/EstimateModelMemoryResponse.java | 69 - .../client/ml/EvaluateDataFrameRequest.java | 142 - .../client/ml/EvaluateDataFrameResponse.java | 103 - .../ml/ExplainDataFrameAnalyticsRequest.java | 61 - .../ml/ExplainDataFrameAnalyticsResponse.java | 83 - .../client/ml/FlushJobRequest.java | 181 - .../client/ml/FlushJobResponse.java | 104 - .../client/ml/ForecastJobRequest.java | 161 - .../client/ml/ForecastJobResponse.java | 90 - .../client/ml/GetBucketsRequest.java | 250 -- .../client/ml/GetBucketsResponse.java | 70 - .../client/ml/GetCalendarEventsRequest.java | 154 - .../client/ml/GetCalendarEventsResponse.java | 79 - .../client/ml/GetCalendarsRequest.java | 87 - .../client/ml/GetCalendarsResponse.java | 77 - .../client/ml/GetCategoriesRequest.java | 131 - .../client/ml/GetCategoriesResponse.java | 70 - .../ml/GetDataFrameAnalyticsRequest.java | 111 - .../ml/GetDataFrameAnalyticsResponse.java | 63 - .../ml/GetDataFrameAnalyticsStatsRequest.java | 88 - .../GetDataFrameAnalyticsStatsResponse.java | 101 - .../client/ml/GetDatafeedRequest.java | 148 - .../client/ml/GetDatafeedResponse.java | 80 - .../client/ml/GetDatafeedStatsRequest.java | 134 - .../client/ml/GetDatafeedStatsResponse.java | 79 - .../client/ml/GetFiltersRequest.java | 105 - .../client/ml/GetFiltersResponse.java | 80 - .../client/ml/GetInfluencersRequest.java | 212 -- .../client/ml/GetInfluencersResponse.java | 70 - .../client/ml/GetJobRequest.java | 148 - .../client/ml/GetJobResponse.java | 80 - .../client/ml/GetJobStatsRequest.java | 132 - .../client/ml/GetJobStatsResponse.java | 79 - .../client/ml/GetModelSnapshotsRequest.java | 191 -- .../client/ml/GetModelSnapshotsResponse.java | 71 - .../client/ml/GetOverallBucketsRequest.java | 251 -- .../client/ml/GetOverallBucketsResponse.java | 70 - .../client/ml/GetRecordsRequest.java | 211 -- .../client/ml/GetRecordsResponse.java | 70 - .../client/ml/GetTrainedModelsRequest.java | 198 -- .../client/ml/GetTrainedModelsResponse.java | 74 - .../ml/GetTrainedModelsStatsRequest.java | 92 - .../ml/GetTrainedModelsStatsResponse.java | 74 - .../client/ml/MlInfoRequest.java | 13 - .../client/ml/MlInfoResponse.java | 50 - .../client/ml/NodeAttributes.java | 140 - .../client/ml/OpenJobRequest.java | 116 - .../client/ml/OpenJobResponse.java | 100 - .../client/ml/PostCalendarEventRequest.java | 97 - .../client/ml/PostCalendarEventResponse.java | 81 - .../client/ml/PostDataRequest.java | 213 -- .../client/ml/PostDataResponse.java | 62 - .../client/ml/PreviewDatafeedRequest.java | 136 - .../client/ml/PreviewDatafeedResponse.java | 103 - .../client/ml/PutCalendarJobRequest.java | 70 - .../client/ml/PutCalendarRequest.java | 56 - .../client/ml/PutCalendarResponse.java | 65 - .../ml/PutDataFrameAnalyticsRequest.java | 65 - .../ml/PutDataFrameAnalyticsResponse.java | 46 - .../client/ml/PutDatafeedRequest.java | 68 - .../client/ml/PutDatafeedResponse.java | 59 - .../client/ml/PutFilterRequest.java | 68 - .../client/ml/PutFilterResponse.java | 59 - .../client/ml/PutJobRequest.java | 68 - .../client/ml/PutJobResponse.java | 59 - .../ml/PutTrainedModelAliasRequest.java | 56 - .../client/ml/PutTrainedModelRequest.java | 54 - .../client/ml/PutTrainedModelResponse.java | 51 - .../client/ml/RevertModelSnapshotRequest.java | 103 - .../ml/RevertModelSnapshotResponse.java | 81 - .../client/ml/SetUpgradeModeRequest.java | 75 - .../ml/StartDataFrameAnalyticsRequest.java | 62 - .../ml/StartDataFrameAnalyticsResponse.java | 85 - .../client/ml/StartDatafeedRequest.java | 144 - .../client/ml/StartDatafeedResponse.java | 100 - .../ml/StopDataFrameAnalyticsRequest.java | 89 - .../ml/StopDataFrameAnalyticsResponse.java | 73 - .../client/ml/StopDatafeedRequest.java | 182 - .../client/ml/StopDatafeedResponse.java | 81 - .../ml/UpdateDataFrameAnalyticsRequest.java | 65 - .../client/ml/UpdateDatafeedRequest.java | 64 - .../client/ml/UpdateFilterRequest.java | 140 - .../client/ml/UpdateJobRequest.java | 64 - .../client/ml/UpdateModelSnapshotRequest.java | 118 - .../ml/UpdateModelSnapshotResponse.java | 97 - .../ml/UpgradeJobModelSnapshotRequest.java | 108 - .../ml/UpgradeJobModelSnapshotResponse.java | 89 - .../client/ml/calendars/Calendar.java | 106 - .../client/ml/calendars/ScheduledEvent.java | 123 - .../client/ml/datafeed/ChunkingConfig.java | 118 - .../client/ml/datafeed/DatafeedConfig.java | 491 --- .../client/ml/datafeed/DatafeedState.java | 34 - .../client/ml/datafeed/DatafeedStats.java | 142 - .../ml/datafeed/DatafeedTimingStats.java | 166 - .../client/ml/datafeed/DatafeedUpdate.java | 455 --- .../ml/datafeed/DelayedDataCheckConfig.java | 119 - .../client/ml/dataframe/Classification.java | 550 --- .../ml/dataframe/DataFrameAnalysis.java | 16 - .../dataframe/DataFrameAnalyticsConfig.java | 341 -- .../DataFrameAnalyticsConfigUpdate.java | 174 - .../ml/dataframe/DataFrameAnalyticsDest.java | 111 - .../dataframe/DataFrameAnalyticsSource.java | 165 - .../ml/dataframe/DataFrameAnalyticsState.java | 29 - .../ml/dataframe/DataFrameAnalyticsStats.java | 188 -- ...ataFrameAnalysisNamedXContentProvider.java | 26 - .../client/ml/dataframe/OutlierDetection.java | 253 -- .../client/ml/dataframe/PhaseProgress.java | 82 - .../client/ml/dataframe/QueryConfig.java | 71 - .../client/ml/dataframe/Regression.java | 549 --- .../ml/dataframe/evaluation/Evaluation.java | 21 - .../evaluation/EvaluationMetric.java | 32 - .../MlEvaluationNamedXContentProvider.java | 216 -- .../classification/AccuracyMetric.java | 148 - .../classification/AucRocMetric.java | 95 - .../classification/Classification.java | 148 - .../MulticlassConfusionMatrixMetric.java | 289 -- .../classification/PerClassSingleValue.java | 72 - .../classification/PrecisionMetric.java | 135 - .../classification/RecallMetric.java | 135 - .../evaluation/common/AucRocPoint.java | 92 - .../evaluation/common/AucRocResult.java | 99 - .../AbstractConfusionMatrixMetric.java | 33 - .../outlierdetection/AucRocMetric.java | 85 - .../ConfusionMatrixMetric.java | 198 -- .../outlierdetection/OutlierDetection.java | 131 - .../outlierdetection/PrecisionMetric.java | 114 - .../outlierdetection/RecallMetric.java | 114 - .../evaluation/regression/HuberMetric.java | 138 - .../regression/MeanSquaredErrorMetric.java | 118 - .../MeanSquaredLogarithmicErrorMetric.java | 137 - .../evaluation/regression/RSquaredMetric.java | 120 - .../evaluation/regression/Regression.java | 132 - .../ml/dataframe/explain/FieldSelection.java | 161 - .../dataframe/explain/MemoryEstimation.java | 96 - .../ml/dataframe/stats/AnalysisStats.java | 18 - .../AnalysisStatsNamedXContentProvider.java | 37 - .../classification/ClassificationStats.java | 126 - .../stats/classification/Hyperparameters.java | 285 -- .../stats/classification/TimingStats.java | 79 - .../stats/classification/ValidationLoss.java | 78 - .../ml/dataframe/stats/common/DataCounts.java | 102 - .../ml/dataframe/stats/common/FoldValues.java | 79 - .../dataframe/stats/common/MemoryUsage.java | 133 - .../OutlierDetectionStats.java | 98 - .../stats/outlierdetection/Parameters.java | 142 - .../stats/outlierdetection/TimingStats.java | 65 - .../stats/regression/Hyperparameters.java | 270 -- .../stats/regression/RegressionStats.java | 126 - .../stats/regression/TimingStats.java | 79 - .../stats/regression/ValidationLoss.java | 78 - .../InferenceToXContentCompressor.java | 76 - .../MlInferenceNamedXContentProvider.java | 101 - .../ml/inference/NamedXContentObject.java | 23 - .../inference/NamedXContentObjectHelper.java | 60 - .../inference/SimpleBoundedInputStream.java | 55 - .../ml/inference/TrainedModelConfig.java | 504 --- .../ml/inference/TrainedModelDefinition.java | 119 - .../ml/inference/TrainedModelInput.java | 78 - .../ml/inference/TrainedModelStats.java | 128 - .../client/ml/inference/TrainedModelType.java | 26 - .../preprocessing/CustomWordEmbedding.java | 151 - .../preprocessing/FrequencyEncoding.java | 170 - .../ml/inference/preprocessing/Multi.java | 110 - .../ml/inference/preprocessing/NGram.java | 224 -- .../preprocessing/OneHotEncoding.java | 144 - .../inference/preprocessing/PreProcessor.java | 21 - .../preprocessing/TargetMeanEncoding.java | 192 -- .../inference/results/FeatureImportance.java | 175 - .../ml/inference/results/TopClassEntry.java | 113 - .../trainedmodel/ClassificationConfig.java | 119 - .../inference/trainedmodel/IndexLocation.java | 77 - .../trainedmodel/InferenceConfig.java | 14 - .../trainedmodel/InferenceStats.java | 160 - .../trainedmodel/RegressionConfig.java | 94 - .../ml/inference/trainedmodel/TargetType.java | 29 - .../inference/trainedmodel/TrainedModel.java | 25 - .../trainedmodel/TrainedModelLocation.java | 13 - .../trainedmodel/ensemble/Ensemble.java | 198 -- .../trainedmodel/ensemble/Exponent.java | 71 - .../ensemble/LogisticRegression.java | 71 - .../ensemble/OutputAggregator.java | 17 - .../trainedmodel/ensemble/WeightedMode.java | 77 - .../trainedmodel/ensemble/WeightedSum.java | 73 - .../langident/LangIdentNeuralNetwork.java | 94 - .../trainedmodel/langident/LangNetLayer.java | 111 - .../ml/inference/trainedmodel/tree/Tree.java | 231 -- .../inference/trainedmodel/tree/TreeNode.java | 286 -- .../client/ml/job/config/AnalysisConfig.java | 446 --- .../client/ml/job/config/AnalysisLimits.java | 131 - .../config/CategorizationAnalyzerConfig.java | 347 -- .../client/ml/job/config/DataDescription.java | 176 - .../config/DefaultDetectorDescription.java | 83 - .../client/ml/job/config/DetectionRule.java | 142 - .../client/ml/job/config/Detector.java | 377 --- .../ml/job/config/DetectorFunction.java | 84 - .../client/ml/job/config/FilterRef.java | 93 - .../client/ml/job/config/Job.java | 627 ---- .../client/ml/job/config/JobState.java | 32 - .../client/ml/job/config/JobUpdate.java | 588 ---- .../client/ml/job/config/MlFilter.java | 170 - .../client/ml/job/config/ModelPlotConfig.java | 93 - .../client/ml/job/config/Operator.java | 60 - .../PerPartitionCategorizationConfig.java | 86 - .../client/ml/job/config/RuleAction.java | 30 - .../client/ml/job/config/RuleCondition.java | 108 - .../client/ml/job/config/RuleScope.java | 123 - .../client/ml/job/process/DataCounts.java | 480 --- .../client/ml/job/process/ModelSizeStats.java | 606 ---- .../client/ml/job/process/ModelSnapshot.java | 361 -- .../client/ml/job/process/Quantiles.java | 104 - .../client/ml/job/process/TimingStats.java | 200 -- .../client/ml/job/results/AnomalyCause.java | 322 -- .../client/ml/job/results/AnomalyRecord.java | 476 --- .../client/ml/job/results/Bucket.java | 249 -- .../ml/job/results/BucketInfluencer.java | 196 -- .../ml/job/results/CategoryDefinition.java | 232 -- .../client/ml/job/results/Influence.java | 91 - .../client/ml/job/results/Influencer.java | 190 -- .../client/ml/job/results/OverallBucket.java | 206 -- .../client/ml/job/results/Result.java | 25 - .../client/ml/job/stats/ForecastStats.java | 165 - .../client/ml/job/stats/JobStats.java | 229 -- .../client/ml/job/stats/SimpleStats.java | 104 - ...icsearch.plugins.spi.NamedXContentProvider | 4 - 247 files changed, 35792 deletions(-) delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedInference.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/AbstractResultResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarEventRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDataFrameAnalyticsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteFilterRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelAliasRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelAliasRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDataFrameAnalyticsRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotRequest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotResponse.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/ScheduledEvent.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/ChunkingConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedState.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedTimingStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DelayedDataCheckConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalysis.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigUpdate.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsDest.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsState.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/MlDataFrameAnalysisNamedXContentProvider.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/PhaseProgress.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/QueryConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/Evaluation.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/EvaluationMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/MlEvaluationNamedXContentProvider.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AccuracyMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AucRocMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/Classification.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PerClassSingleValue.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PrecisionMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/RecallMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocPoint.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocResult.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AucRocMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/PrecisionMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/RecallMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/HuberMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredErrorMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/RSquaredMetric.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/Regression.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/FieldSelection.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/MemoryEstimation.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStatsNamedXContentProvider.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/Hyperparameters.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLoss.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCounts.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValues.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsage.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/Parameters.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/Hyperparameters.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLoss.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/InferenceToXContentCompressor.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObject.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObjectHelper.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/SimpleBoundedInputStream.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelInput.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelType.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/CustomWordEmbedding.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/Multi.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/NGram.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/FeatureImportance.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/TopClassEntry.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ClassificationConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/IndexLocation.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/RegressionConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TargetType.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModelLocation.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Exponent.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/LogisticRegression.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/OutputAggregator.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedMode.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedSum.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangIdentNeuralNetwork.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangNetLayer.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisLimits.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/CategorizationAnalyzerConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DataDescription.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DefaultDetectorDescription.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectionRule.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectorFunction.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/FilterRef.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/MlFilter.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/ModelPlotConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Operator.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/PerPartitionCategorizationConfig.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleAction.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleCondition.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/DataCounts.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSnapshot.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/Quantiles.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/TimingStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyCause.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/CategoryDefinition.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influence.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java delete mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java deleted file mode 100644 index 7c036510d0790..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MLRequestConverters.java +++ /dev/null @@ -1,907 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client; - -import org.apache.http.HttpEntity; -import org.apache.http.client.methods.HttpDelete; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; -import org.apache.http.nio.entity.NByteArrayEntity; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.client.RequestConverters.EndpointBuilder; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.CloseJobRequest; -import org.elasticsearch.client.ml.DeleteCalendarEventRequest; -import org.elasticsearch.client.ml.DeleteCalendarJobRequest; -import org.elasticsearch.client.ml.DeleteCalendarRequest; -import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.DeleteDatafeedRequest; -import org.elasticsearch.client.ml.DeleteExpiredDataRequest; -import org.elasticsearch.client.ml.DeleteFilterRequest; -import org.elasticsearch.client.ml.DeleteForecastRequest; -import org.elasticsearch.client.ml.DeleteJobRequest; -import org.elasticsearch.client.ml.DeleteModelSnapshotRequest; -import org.elasticsearch.client.ml.DeleteTrainedModelAliasRequest; -import org.elasticsearch.client.ml.DeleteTrainedModelRequest; -import org.elasticsearch.client.ml.EstimateModelMemoryRequest; -import org.elasticsearch.client.ml.EvaluateDataFrameRequest; -import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.FlushJobRequest; -import org.elasticsearch.client.ml.ForecastJobRequest; -import org.elasticsearch.client.ml.GetBucketsRequest; -import org.elasticsearch.client.ml.GetCalendarEventsRequest; -import org.elasticsearch.client.ml.GetCalendarsRequest; -import org.elasticsearch.client.ml.GetCategoriesRequest; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest; -import org.elasticsearch.client.ml.GetDatafeedRequest; -import org.elasticsearch.client.ml.GetDatafeedStatsRequest; -import org.elasticsearch.client.ml.GetFiltersRequest; -import org.elasticsearch.client.ml.GetInfluencersRequest; -import org.elasticsearch.client.ml.GetJobRequest; -import org.elasticsearch.client.ml.GetJobStatsRequest; -import org.elasticsearch.client.ml.GetModelSnapshotsRequest; -import org.elasticsearch.client.ml.GetOverallBucketsRequest; -import org.elasticsearch.client.ml.GetRecordsRequest; -import org.elasticsearch.client.ml.GetTrainedModelsRequest; -import org.elasticsearch.client.ml.GetTrainedModelsStatsRequest; -import org.elasticsearch.client.ml.MlInfoRequest; -import org.elasticsearch.client.ml.OpenJobRequest; -import org.elasticsearch.client.ml.PostCalendarEventRequest; -import org.elasticsearch.client.ml.PostDataRequest; -import org.elasticsearch.client.ml.PreviewDatafeedRequest; -import org.elasticsearch.client.ml.PutCalendarJobRequest; -import org.elasticsearch.client.ml.PutCalendarRequest; -import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.PutDatafeedRequest; -import org.elasticsearch.client.ml.PutFilterRequest; -import org.elasticsearch.client.ml.PutJobRequest; -import org.elasticsearch.client.ml.PutTrainedModelAliasRequest; -import org.elasticsearch.client.ml.PutTrainedModelRequest; -import org.elasticsearch.client.ml.RevertModelSnapshotRequest; -import org.elasticsearch.client.ml.SetUpgradeModeRequest; -import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.StartDatafeedRequest; -import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.StopDatafeedRequest; -import org.elasticsearch.client.ml.UpdateDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.UpdateDatafeedRequest; -import org.elasticsearch.client.ml.UpdateFilterRequest; -import org.elasticsearch.client.ml.UpdateJobRequest; -import org.elasticsearch.client.ml.UpdateModelSnapshotRequest; -import org.elasticsearch.client.ml.UpgradeJobModelSnapshotRequest; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; - -import java.io.IOException; - -import static org.elasticsearch.client.RequestConverters.REQUEST_BODY_CONTENT_TYPE; -import static org.elasticsearch.client.RequestConverters.createContentType; -import static org.elasticsearch.client.RequestConverters.createEntity; - -final class MLRequestConverters { - - private MLRequestConverters() {} - - static Request putJob(PutJobRequest putJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(putJobRequest.getJob().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getJob(GetJobRequest getJobRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(Strings.collectionToCommaDelimitedString(getJobRequest.getJobIds())) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (getJobRequest.getAllowNoMatch() != null) { - params.putParam(GetJobRequest.ALLOW_NO_MATCH.getPreferredName(), Boolean.toString(getJobRequest.getAllowNoMatch())); - } - if (getJobRequest.getExcludeGenerated() != null) { - params.putParam(GetJobRequest.EXCLUDE_GENERATED, Boolean.toString(getJobRequest.getExcludeGenerated())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request getJobStats(GetJobStatsRequest getJobStatsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(Strings.collectionToCommaDelimitedString(getJobStatsRequest.getJobIds())) - .addPathPartAsIs("_stats") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (getJobStatsRequest.getAllowNoMatch() != null) { - params.putParam("allow_no_match", Boolean.toString(getJobStatsRequest.getAllowNoMatch())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request openJob(OpenJobRequest openJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(openJobRequest.getJobId()) - .addPathPartAsIs("_open") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(openJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request closeJob(CloseJobRequest closeJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(Strings.collectionToCommaDelimitedString(closeJobRequest.getJobIds())) - .addPathPartAsIs("_close") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(closeJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request deleteExpiredData(DeleteExpiredDataRequest deleteExpiredDataRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("_delete_expired_data") - .addPathPart(deleteExpiredDataRequest.getJobId()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - request.setEntity(createEntity(deleteExpiredDataRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request deleteJob(DeleteJobRequest deleteJobRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(deleteJobRequest.getJobId()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (deleteJobRequest.getForce() != null) { - params.putParam("force", Boolean.toString(deleteJobRequest.getForce())); - } - if (deleteJobRequest.getWaitForCompletion() != null) { - params.putParam("wait_for_completion", Boolean.toString(deleteJobRequest.getWaitForCompletion())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request flushJob(FlushJobRequest flushJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(flushJobRequest.getJobId()) - .addPathPartAsIs("_flush") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(flushJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request forecastJob(ForecastJobRequest forecastJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(forecastJobRequest.getJobId()) - .addPathPartAsIs("_forecast") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(forecastJobRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request updateJob(UpdateJobRequest updateJobRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(updateJobRequest.getJobUpdate().getJobId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateJobRequest.getJobUpdate(), REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putDatafeed(PutDatafeedRequest putDatafeedRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(putDatafeedRequest.getDatafeed().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request updateDatafeed(UpdateDatafeedRequest updateDatafeedRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(updateDatafeedRequest.getDatafeedUpdate().getId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getDatafeed(GetDatafeedRequest getDatafeedRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(Strings.collectionToCommaDelimitedString(getDatafeedRequest.getDatafeedIds())) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (getDatafeedRequest.getAllowNoMatch() != null) { - params.putParam(GetDatafeedRequest.ALLOW_NO_MATCH.getPreferredName(), Boolean.toString(getDatafeedRequest.getAllowNoMatch())); - } - if (getDatafeedRequest.getExcludeGenerated() != null) { - params.putParam(GetDatafeedRequest.EXCLUDE_GENERATED, Boolean.toString(getDatafeedRequest.getExcludeGenerated())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request deleteDatafeed(DeleteDatafeedRequest deleteDatafeedRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(deleteDatafeedRequest.getDatafeedId()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (deleteDatafeedRequest.getForce() != null) { - params.putParam("force", Boolean.toString(deleteDatafeedRequest.getForce())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request startDatafeed(StartDatafeedRequest startDatafeedRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(startDatafeedRequest.getDatafeedId()) - .addPathPartAsIs("_start") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(startDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request stopDatafeed(StopDatafeedRequest stopDatafeedRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(Strings.collectionToCommaDelimitedString(stopDatafeedRequest.getDatafeedIds())) - .addPathPartAsIs("_stop") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(stopDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getDatafeedStats(GetDatafeedStatsRequest getDatafeedStatsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("datafeeds") - .addPathPart(Strings.collectionToCommaDelimitedString(getDatafeedStatsRequest.getDatafeedIds())) - .addPathPartAsIs("_stats") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (getDatafeedStatsRequest.getAllowNoMatch() != null) { - params.putParam("allow_no_match", Boolean.toString(getDatafeedStatsRequest.getAllowNoMatch())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request previewDatafeed(PreviewDatafeedRequest previewDatafeedRequest) throws IOException { - EndpointBuilder builder = new EndpointBuilder().addPathPartAsIs("_ml").addPathPartAsIs("datafeeds"); - String endpoint = previewDatafeedRequest.getDatafeedId() != null - ? builder.addPathPart(previewDatafeedRequest.getDatafeedId()).addPathPartAsIs("_preview").build() - : builder.addPathPartAsIs("_preview").build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - if (previewDatafeedRequest.getDatafeedId() == null) { - request.setEntity(createEntity(previewDatafeedRequest, REQUEST_BODY_CONTENT_TYPE)); - } - return request; - } - - static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(deleteForecastRequest.getJobId()) - .addPathPartAsIs("_forecast") - .addPathPart(Strings.collectionToCommaDelimitedString(deleteForecastRequest.getForecastIds())) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (deleteForecastRequest.getAllowNoForecasts() != null) { - params.putParam("allow_no_forecasts", Boolean.toString(deleteForecastRequest.getAllowNoForecasts())); - } - if (deleteForecastRequest.timeout() != null) { - params.putParam("timeout", deleteForecastRequest.timeout().getStringRep()); - } - request.addParameters(params.asMap()); - return request; - } - - static Request deleteModelSnapshot(DeleteModelSnapshotRequest deleteModelSnapshotRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(deleteModelSnapshotRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .addPathPart(deleteModelSnapshotRequest.getSnapshotId()) - .build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getBucketsRequest.getJobId()) - .addPathPartAsIs("results") - .addPathPartAsIs("buckets") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getBucketsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getCategories(GetCategoriesRequest getCategoriesRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getCategoriesRequest.getJobId()) - .addPathPartAsIs("results") - .addPathPartAsIs("categories") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getCategoriesRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getModelSnapshots(GetModelSnapshotsRequest getModelSnapshotsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getModelSnapshotsRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getModelSnapshotsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request updateModelSnapshot(UpdateModelSnapshotRequest updateModelSnapshotRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(updateModelSnapshotRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .addPathPart(updateModelSnapshotRequest.getSnapshotId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateModelSnapshotRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request upgradeJobSnapshot(UpgradeJobModelSnapshotRequest upgradeJobModelSnapshotRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(upgradeJobModelSnapshotRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .addPathPart(upgradeJobModelSnapshotRequest.getSnapshotId()) - .addPathPartAsIs("_upgrade") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (upgradeJobModelSnapshotRequest.getTimeout() != null) { - params.putParam( - UpgradeJobModelSnapshotRequest.TIMEOUT.getPreferredName(), - upgradeJobModelSnapshotRequest.getTimeout().getStringRep() - ); - } - if (upgradeJobModelSnapshotRequest.getWaitForCompletion() != null) { - params.putParam( - UpgradeJobModelSnapshotRequest.WAIT_FOR_COMPLETION.getPreferredName(), - upgradeJobModelSnapshotRequest.getWaitForCompletion().toString() - ); - } - request.addParameters(params.asMap()); - return request; - } - - static Request revertModelSnapshot(RevertModelSnapshotRequest revertModelSnapshotsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(revertModelSnapshotsRequest.getJobId()) - .addPathPartAsIs("model_snapshots") - .addPathPart(revertModelSnapshotsRequest.getSnapshotId()) - .addPathPart("_revert") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(revertModelSnapshotsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getOverallBuckets(GetOverallBucketsRequest getOverallBucketsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(Strings.collectionToCommaDelimitedString(getOverallBucketsRequest.getJobIds())) - .addPathPartAsIs("results") - .addPathPartAsIs("overall_buckets") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getOverallBucketsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getRecords(GetRecordsRequest getRecordsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getRecordsRequest.getJobId()) - .addPathPartAsIs("results") - .addPathPartAsIs("records") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getRecordsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request postData(PostDataRequest postDataRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(postDataRequest.getJobId()) - .addPathPartAsIs("_data") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (postDataRequest.getResetStart() != null) { - params.putParam(PostDataRequest.RESET_START.getPreferredName(), postDataRequest.getResetStart()); - } - if (postDataRequest.getResetEnd() != null) { - params.putParam(PostDataRequest.RESET_END.getPreferredName(), postDataRequest.getResetEnd()); - } - BytesReference content = postDataRequest.getContent(); - request.addParameters(params.asMap()); - if (content != null) { - BytesRef source = postDataRequest.getContent().toBytesRef(); - HttpEntity byteEntity = new NByteArrayEntity( - source.bytes, - source.offset, - source.length, - createContentType(postDataRequest.getXContentType()) - ); - request.setEntity(byteEntity); - } - return request; - } - - static Request getInfluencers(GetInfluencersRequest getInfluencersRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPart(getInfluencersRequest.getJobId()) - .addPathPartAsIs("results") - .addPathPartAsIs("influencers") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getInfluencersRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putCalendar(PutCalendarRequest putCalendarRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(putCalendarRequest.getCalendar().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putCalendarRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getCalendars(GetCalendarsRequest getCalendarsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(getCalendarsRequest.getCalendarId()) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getCalendarsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putCalendarJob(PutCalendarJobRequest putCalendarJobRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(putCalendarJobRequest.getCalendarId()) - .addPathPartAsIs("jobs") - .addPathPart(Strings.collectionToCommaDelimitedString(putCalendarJobRequest.getJobIds())) - .build(); - return new Request(HttpPut.METHOD_NAME, endpoint); - } - - static Request deleteCalendarJob(DeleteCalendarJobRequest deleteCalendarJobRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(deleteCalendarJobRequest.getCalendarId()) - .addPathPartAsIs("jobs") - .addPathPart(Strings.collectionToCommaDelimitedString(deleteCalendarJobRequest.getJobIds())) - .build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request deleteCalendar(DeleteCalendarRequest deleteCalendarRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(deleteCalendarRequest.getCalendarId()) - .build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - return request; - } - - static Request getCalendarEvents(GetCalendarEventsRequest getCalendarEventsRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(getCalendarEventsRequest.getCalendarId()) - .addPathPartAsIs("events") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.setEntity(createEntity(getCalendarEventsRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request postCalendarEvents(PostCalendarEventRequest postCalendarEventRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(postCalendarEventRequest.getCalendarId()) - .addPathPartAsIs("events") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity( - createEntity(postCalendarEventRequest, REQUEST_BODY_CONTENT_TYPE, PostCalendarEventRequest.EXCLUDE_CALENDAR_ID_PARAMS) - ); - return request; - } - - static Request deleteCalendarEvent(DeleteCalendarEventRequest deleteCalendarEventRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("calendars") - .addPathPart(deleteCalendarEventRequest.getCalendarId()) - .addPathPartAsIs("events") - .addPathPart(deleteCalendarEventRequest.getEventId()) - .build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request estimateModelMemory(EstimateModelMemoryRequest estimateModelMemoryRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("anomaly_detectors") - .addPathPartAsIs("_estimate_model_memory") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(estimateModelMemoryRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putDataFrameAnalytics(PutDataFrameAnalyticsRequest putRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(putRequest.getConfig().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request updateDataFrameAnalytics(UpdateDataFrameAnalyticsRequest updateRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(updateRequest.getUpdate().getId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getDataFrameAnalytics(GetDataFrameAnalyticsRequest getRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(Strings.collectionToCommaDelimitedString(getRequest.getIds())) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (getRequest.getPageParams() != null) { - PageParams pageParams = getRequest.getPageParams(); - if (pageParams.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), pageParams.getFrom().toString()); - } - if (pageParams.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), pageParams.getSize().toString()); - } - } - if (getRequest.getAllowNoMatch() != null) { - params.putParam(GetDataFrameAnalyticsRequest.ALLOW_NO_MATCH, Boolean.toString(getRequest.getAllowNoMatch())); - } - if (getRequest.getExcludeGenerated() != null) { - params.putParam(GetDataFrameAnalyticsRequest.EXCLUDE_GENERATED, Boolean.toString(getRequest.getExcludeGenerated())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request getDataFrameAnalyticsStats(GetDataFrameAnalyticsStatsRequest getStatsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(Strings.collectionToCommaDelimitedString(getStatsRequest.getIds())) - .addPathPartAsIs("_stats") - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (getStatsRequest.getPageParams() != null) { - PageParams pageParams = getStatsRequest.getPageParams(); - if (pageParams.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), pageParams.getFrom().toString()); - } - if (pageParams.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), pageParams.getSize().toString()); - } - } - if (getStatsRequest.getAllowNoMatch() != null) { - params.putParam( - GetDataFrameAnalyticsStatsRequest.ALLOW_NO_MATCH.getPreferredName(), - Boolean.toString(getStatsRequest.getAllowNoMatch()) - ); - } - request.addParameters(params.asMap()); - return request; - } - - static Request startDataFrameAnalytics(StartDataFrameAnalyticsRequest startRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(startRequest.getId()) - .addPathPartAsIs("_start") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (startRequest.getTimeout() != null) { - params.withTimeout(startRequest.getTimeout()); - } - request.addParameters(params.asMap()); - return request; - } - - static Request stopDataFrameAnalytics(StopDataFrameAnalyticsRequest stopRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(stopRequest.getId()) - .addPathPartAsIs("_stop") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (stopRequest.getTimeout() != null) { - params.withTimeout(stopRequest.getTimeout()); - } - if (stopRequest.getAllowNoMatch() != null) { - params.putParam( - StopDataFrameAnalyticsRequest.ALLOW_NO_MATCH.getPreferredName(), - Boolean.toString(stopRequest.getAllowNoMatch()) - ); - } - if (stopRequest.getForce() != null) { - params.putParam(StopDataFrameAnalyticsRequest.FORCE.getPreferredName(), Boolean.toString(stopRequest.getForce())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request deleteDataFrameAnalytics(DeleteDataFrameAnalyticsRequest deleteRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics") - .addPathPart(deleteRequest.getId()) - .build(); - - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - - RequestConverters.Params params = new RequestConverters.Params(); - if (deleteRequest.getForce() != null) { - params.putParam("force", Boolean.toString(deleteRequest.getForce())); - } - if (deleteRequest.getTimeout() != null) { - params.withTimeout(deleteRequest.getTimeout()); - } - request.addParameters(params.asMap()); - - return request; - } - - static Request evaluateDataFrame(EvaluateDataFrameRequest evaluateRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "_evaluate").build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(evaluateRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request explainDataFrameAnalytics(ExplainDataFrameAnalyticsRequest explainRequest) throws IOException { - EndpointBuilder endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "data_frame", "analytics"); - if (explainRequest.getId() != null) { - endpoint.addPathPart(explainRequest.getId()); - } - endpoint.addPathPartAsIs("_explain"); - - Request request = new Request(HttpPost.METHOD_NAME, endpoint.build()); - if (explainRequest.getConfig() != null) { - request.setEntity(createEntity(explainRequest.getConfig(), REQUEST_BODY_CONTENT_TYPE)); - } - return request; - } - - static Request getTrainedModels(GetTrainedModelsRequest getTrainedModelsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(Strings.collectionToCommaDelimitedString(getTrainedModelsRequest.getIds())) - .build(); - RequestConverters.Params params = new RequestConverters.Params(); - if (getTrainedModelsRequest.getPageParams() != null) { - PageParams pageParams = getTrainedModelsRequest.getPageParams(); - if (pageParams.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), pageParams.getFrom().toString()); - } - if (pageParams.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), pageParams.getSize().toString()); - } - } - if (getTrainedModelsRequest.getAllowNoMatch() != null) { - params.putParam(GetTrainedModelsRequest.ALLOW_NO_MATCH, Boolean.toString(getTrainedModelsRequest.getAllowNoMatch())); - } - if (getTrainedModelsRequest.getDecompressDefinition() != null) { - params.putParam( - GetTrainedModelsRequest.DECOMPRESS_DEFINITION, - Boolean.toString(getTrainedModelsRequest.getDecompressDefinition()) - ); - } - if (getTrainedModelsRequest.getIncludes().isEmpty() == false) { - params.putParam( - GetTrainedModelsRequest.INCLUDE, - Strings.collectionToCommaDelimitedString(getTrainedModelsRequest.getIncludes()) - ); - } - if (getTrainedModelsRequest.getTags() != null) { - params.putParam(GetTrainedModelsRequest.TAGS, Strings.collectionToCommaDelimitedString(getTrainedModelsRequest.getTags())); - } - if (getTrainedModelsRequest.getExcludeGenerated() != null) { - params.putParam(GetTrainedModelsRequest.EXCLUDE_GENERATED, Boolean.toString(getTrainedModelsRequest.getExcludeGenerated())); - } - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.addParameters(params.asMap()); - return request; - } - - static Request getTrainedModelsStats(GetTrainedModelsStatsRequest getTrainedModelsStatsRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(Strings.collectionToCommaDelimitedString(getTrainedModelsStatsRequest.getIds())) - .addPathPart("_stats") - .build(); - RequestConverters.Params params = new RequestConverters.Params(); - if (getTrainedModelsStatsRequest.getPageParams() != null) { - PageParams pageParams = getTrainedModelsStatsRequest.getPageParams(); - if (pageParams.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), pageParams.getFrom().toString()); - } - if (pageParams.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), pageParams.getSize().toString()); - } - } - if (getTrainedModelsStatsRequest.getAllowNoMatch() != null) { - params.putParam(GetTrainedModelsStatsRequest.ALLOW_NO_MATCH, Boolean.toString(getTrainedModelsStatsRequest.getAllowNoMatch())); - } - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - request.addParameters(params.asMap()); - return request; - } - - static Request deleteTrainedModel(DeleteTrainedModelRequest deleteRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models").addPathPart(deleteRequest.getId()).build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request putTrainedModel(PutTrainedModelRequest putTrainedModelRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(putTrainedModelRequest.getTrainedModelConfig().getModelId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putTrainedModelRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request putTrainedModelAlias(PutTrainedModelAliasRequest putTrainedModelAliasRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(putTrainedModelAliasRequest.getModelId()) - .addPathPartAsIs("model_aliases") - .addPathPart(putTrainedModelAliasRequest.getModelAlias()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (putTrainedModelAliasRequest.getReassign() != null) { - params.putParam(PutTrainedModelAliasRequest.REASSIGN, Boolean.toString(putTrainedModelAliasRequest.getReassign())); - } - request.addParameters(params.asMap()); - return request; - } - - static Request deleteTrainedModelAlias(DeleteTrainedModelAliasRequest deleteTrainedModelAliasRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "trained_models") - .addPathPart(deleteTrainedModelAliasRequest.getModelId()) - .addPathPartAsIs("model_aliases") - .addPathPart(deleteTrainedModelAliasRequest.getModelAlias()) - .build(); - return new Request(HttpDelete.METHOD_NAME, endpoint); - } - - static Request putFilter(PutFilterRequest putFilterRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("filters") - .addPathPart(putFilterRequest.getMlFilter().getId()) - .build(); - Request request = new Request(HttpPut.METHOD_NAME, endpoint); - request.setEntity(createEntity(putFilterRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request getFilter(GetFiltersRequest getFiltersRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("filters") - .addPathPart(getFiltersRequest.getFilterId()) - .build(); - Request request = new Request(HttpGet.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - if (getFiltersRequest.getSize() != null) { - params.putParam(PageParams.SIZE.getPreferredName(), getFiltersRequest.getSize().toString()); - } - if (getFiltersRequest.getFrom() != null) { - params.putParam(PageParams.FROM.getPreferredName(), getFiltersRequest.getFrom().toString()); - } - request.addParameters(params.asMap()); - return request; - } - - static Request updateFilter(UpdateFilterRequest updateFilterRequest) throws IOException { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml") - .addPathPartAsIs("filters") - .addPathPart(updateFilterRequest.getFilterId()) - .addPathPartAsIs("_update") - .build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - request.setEntity(createEntity(updateFilterRequest, REQUEST_BODY_CONTENT_TYPE)); - return request; - } - - static Request deleteFilter(DeleteFilterRequest deleteFilterRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "filters").addPathPart(deleteFilterRequest.getId()).build(); - Request request = new Request(HttpDelete.METHOD_NAME, endpoint); - return request; - } - - static Request setUpgradeMode(SetUpgradeModeRequest setUpgradeModeRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "set_upgrade_mode").build(); - Request request = new Request(HttpPost.METHOD_NAME, endpoint); - RequestConverters.Params params = new RequestConverters.Params(); - params.putParam(SetUpgradeModeRequest.ENABLED.getPreferredName(), Boolean.toString(setUpgradeModeRequest.isEnabled())); - if (setUpgradeModeRequest.getTimeout() != null) { - params.putParam(SetUpgradeModeRequest.TIMEOUT.getPreferredName(), setUpgradeModeRequest.getTimeout().toString()); - } - request.addParameters(params.asMap()); - return request; - } - - static Request mlInfo(MlInfoRequest infoRequest) { - String endpoint = new EndpointBuilder().addPathPartAsIs("_ml", "info").build(); - return new Request(HttpGet.METHOD_NAME, endpoint); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java deleted file mode 100644 index 1272ca1658578..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/MachineLearningClient.java +++ /dev/null @@ -1,2992 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.client.ml.CloseJobRequest; -import org.elasticsearch.client.ml.CloseJobResponse; -import org.elasticsearch.client.ml.DeleteCalendarEventRequest; -import org.elasticsearch.client.ml.DeleteCalendarJobRequest; -import org.elasticsearch.client.ml.DeleteCalendarRequest; -import org.elasticsearch.client.ml.DeleteDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.DeleteDatafeedRequest; -import org.elasticsearch.client.ml.DeleteExpiredDataRequest; -import org.elasticsearch.client.ml.DeleteExpiredDataResponse; -import org.elasticsearch.client.ml.DeleteFilterRequest; -import org.elasticsearch.client.ml.DeleteForecastRequest; -import org.elasticsearch.client.ml.DeleteJobRequest; -import org.elasticsearch.client.ml.DeleteJobResponse; -import org.elasticsearch.client.ml.DeleteModelSnapshotRequest; -import org.elasticsearch.client.ml.DeleteTrainedModelAliasRequest; -import org.elasticsearch.client.ml.DeleteTrainedModelRequest; -import org.elasticsearch.client.ml.EstimateModelMemoryRequest; -import org.elasticsearch.client.ml.EstimateModelMemoryResponse; -import org.elasticsearch.client.ml.EvaluateDataFrameRequest; -import org.elasticsearch.client.ml.EvaluateDataFrameResponse; -import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.ExplainDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.FlushJobRequest; -import org.elasticsearch.client.ml.FlushJobResponse; -import org.elasticsearch.client.ml.ForecastJobRequest; -import org.elasticsearch.client.ml.ForecastJobResponse; -import org.elasticsearch.client.ml.GetBucketsRequest; -import org.elasticsearch.client.ml.GetBucketsResponse; -import org.elasticsearch.client.ml.GetCalendarEventsRequest; -import org.elasticsearch.client.ml.GetCalendarEventsResponse; -import org.elasticsearch.client.ml.GetCalendarsRequest; -import org.elasticsearch.client.ml.GetCalendarsResponse; -import org.elasticsearch.client.ml.GetCategoriesRequest; -import org.elasticsearch.client.ml.GetCategoriesResponse; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsRequest; -import org.elasticsearch.client.ml.GetDataFrameAnalyticsStatsResponse; -import org.elasticsearch.client.ml.GetDatafeedRequest; -import org.elasticsearch.client.ml.GetDatafeedResponse; -import org.elasticsearch.client.ml.GetDatafeedStatsRequest; -import org.elasticsearch.client.ml.GetDatafeedStatsResponse; -import org.elasticsearch.client.ml.GetFiltersRequest; -import org.elasticsearch.client.ml.GetFiltersResponse; -import org.elasticsearch.client.ml.GetInfluencersRequest; -import org.elasticsearch.client.ml.GetInfluencersResponse; -import org.elasticsearch.client.ml.GetJobRequest; -import org.elasticsearch.client.ml.GetJobResponse; -import org.elasticsearch.client.ml.GetJobStatsRequest; -import org.elasticsearch.client.ml.GetJobStatsResponse; -import org.elasticsearch.client.ml.GetModelSnapshotsRequest; -import org.elasticsearch.client.ml.GetModelSnapshotsResponse; -import org.elasticsearch.client.ml.GetOverallBucketsRequest; -import org.elasticsearch.client.ml.GetOverallBucketsResponse; -import org.elasticsearch.client.ml.GetRecordsRequest; -import org.elasticsearch.client.ml.GetRecordsResponse; -import org.elasticsearch.client.ml.GetTrainedModelsRequest; -import org.elasticsearch.client.ml.GetTrainedModelsResponse; -import org.elasticsearch.client.ml.GetTrainedModelsStatsRequest; -import org.elasticsearch.client.ml.GetTrainedModelsStatsResponse; -import org.elasticsearch.client.ml.MlInfoRequest; -import org.elasticsearch.client.ml.MlInfoResponse; -import org.elasticsearch.client.ml.OpenJobRequest; -import org.elasticsearch.client.ml.OpenJobResponse; -import org.elasticsearch.client.ml.PostCalendarEventRequest; -import org.elasticsearch.client.ml.PostCalendarEventResponse; -import org.elasticsearch.client.ml.PostDataRequest; -import org.elasticsearch.client.ml.PostDataResponse; -import org.elasticsearch.client.ml.PreviewDatafeedRequest; -import org.elasticsearch.client.ml.PreviewDatafeedResponse; -import org.elasticsearch.client.ml.PutCalendarJobRequest; -import org.elasticsearch.client.ml.PutCalendarRequest; -import org.elasticsearch.client.ml.PutCalendarResponse; -import org.elasticsearch.client.ml.PutDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.PutDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.PutDatafeedRequest; -import org.elasticsearch.client.ml.PutDatafeedResponse; -import org.elasticsearch.client.ml.PutFilterRequest; -import org.elasticsearch.client.ml.PutFilterResponse; -import org.elasticsearch.client.ml.PutJobRequest; -import org.elasticsearch.client.ml.PutJobResponse; -import org.elasticsearch.client.ml.PutTrainedModelAliasRequest; -import org.elasticsearch.client.ml.PutTrainedModelRequest; -import org.elasticsearch.client.ml.PutTrainedModelResponse; -import org.elasticsearch.client.ml.RevertModelSnapshotRequest; -import org.elasticsearch.client.ml.RevertModelSnapshotResponse; -import org.elasticsearch.client.ml.SetUpgradeModeRequest; -import org.elasticsearch.client.ml.StartDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.StartDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.StartDatafeedRequest; -import org.elasticsearch.client.ml.StartDatafeedResponse; -import org.elasticsearch.client.ml.StopDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.StopDataFrameAnalyticsResponse; -import org.elasticsearch.client.ml.StopDatafeedRequest; -import org.elasticsearch.client.ml.StopDatafeedResponse; -import org.elasticsearch.client.ml.UpdateDataFrameAnalyticsRequest; -import org.elasticsearch.client.ml.UpdateDatafeedRequest; -import org.elasticsearch.client.ml.UpdateFilterRequest; -import org.elasticsearch.client.ml.UpdateJobRequest; -import org.elasticsearch.client.ml.UpdateModelSnapshotRequest; -import org.elasticsearch.client.ml.UpdateModelSnapshotResponse; -import org.elasticsearch.client.ml.UpgradeJobModelSnapshotRequest; -import org.elasticsearch.client.ml.UpgradeJobModelSnapshotResponse; -import org.elasticsearch.client.ml.job.stats.JobStats; - -import java.io.IOException; -import java.util.Collections; - -/** - * Machine Learning API client wrapper for the {@link RestHighLevelClient} - *

- * See the - * X-Pack Machine Learning APIs for additional information. - * - * @deprecated The High Level Rest Client is deprecated in favor of the - * - * Elasticsearch Java API Client - */ -@Deprecated(since = "7.16.0", forRemoval = true) -@SuppressWarnings("removal") -public final class MachineLearningClient { - - private final RestHighLevelClient restHighLevelClient; - - MachineLearningClient(RestHighLevelClient restHighLevelClient) { - this.restHighLevelClient = restHighLevelClient; - } - - /** - * Creates a new Machine Learning Job - *

- * For additional info - * see ML PUT job documentation - * - * @param request The PutJobRequest containing the {@link org.elasticsearch.client.ml.job.config.Job} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutJobResponse with enclosed {@link org.elasticsearch.client.ml.job.config.Job} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutJobResponse putJob(PutJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putJob, - options, - PutJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Job asynchronously and notifies listener on completion - *

- * For additional info - * see ML PUT job documentation - * @param request The request containing the {@link org.elasticsearch.client.ml.job.config.Job} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putJobAsync(PutJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putJob, - options, - PutJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets one or more Machine Learning job configuration info. - *

- * For additional info - * see ML GET job documentation - * - * @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetJobResponse} response object containing - * the {@link org.elasticsearch.client.ml.job.config.Job} objects and the number of jobs found - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetJobResponse getJob(GetJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getJob, - options, - GetJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets one or more Machine Learning job configuration info, asynchronously. - *

- * For additional info - * see ML GET job documentation - * @param request {@link GetJobRequest} Request containing a list of jobId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified with {@link GetJobResponse} upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getJobAsync(GetJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getJob, - options, - GetJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets usage statistics for one or more Machine Learning jobs - *

- * For additional info - * see Get job stats docs - * - * @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetJobStatsResponse} response object containing - * the {@link JobStats} objects and the number of jobs found - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetJobStatsResponse getJobStats(GetJobStatsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getJobStats, - options, - GetJobStatsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets usage statistics for one or more Machine Learning jobs, asynchronously. - *

- * For additional info - * see Get job stats docs - * @param request {@link GetJobStatsRequest} Request containing a list of jobId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified with {@link GetJobStatsResponse} upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getJobStatsAsync(GetJobStatsRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getJobStats, - options, - GetJobStatsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes expired data from Machine Learning Jobs - *

- * For additional info - * see ML Delete Expired Data - * documentation - * - * @param request The request to delete expired ML data - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The action response which contains the acknowledgement or the task id depending on whether the action was set to wait for - * completion - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public DeleteExpiredDataResponse deleteExpiredData(DeleteExpiredDataRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteExpiredData, - options, - DeleteExpiredDataResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes expired data from Machine Learning Jobs asynchronously and notifies the listener on completion - *

- * For additional info - * see ML Delete Expired Data - * documentation - * @param request The request to delete expired ML data - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteExpiredDataAsync( - DeleteExpiredDataRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteExpiredData, - options, - DeleteExpiredDataResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Job - *

- * For additional info - * see ML Delete job documentation - * - * @param request The request to delete the job - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The action response which contains the acknowledgement or the task id depending on whether the action was set to wait for - * completion - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public DeleteJobResponse deleteJob(DeleteJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteJob, - options, - DeleteJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Job asynchronously and notifies the listener on completion - *

- * For additional info - * see ML Delete Job documentation - * - * @param request The request to delete the job - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteJobAsync(DeleteJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteJob, - options, - DeleteJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Opens a Machine Learning Job. - * When you open a new job, it starts with an empty model. - * When you open an existing job, the most recent model state is automatically loaded. - * The job is ready to resume its analysis from where it left off, once new data is received. - *

- * For additional info - * see ML Open Job documentation - * - * @param request Request containing job_id and additional optional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response containing if the job was successfully opened or not. - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public OpenJobResponse openJob(OpenJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::openJob, - options, - OpenJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Opens a Machine Learning Job asynchronously, notifies listener on completion. - * When you open a new job, it starts with an empty model. - * When you open an existing job, the most recent model state is automatically loaded. - * The job is ready to resume its analysis from where it left off, once new data is received. - *

- * For additional info - * see ML Open Job documentation - * - * @param request Request containing job_id and additional optional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable openJobAsync(OpenJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::openJob, - options, - OpenJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Closes one or more Machine Learning Jobs. A job can be opened and closed multiple times throughout its lifecycle. - * A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. - *

- * For additional info - * see ML Close Job documentation - * - * @param request Request containing job_ids and additional options. See {@link CloseJobRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response containing if the job was successfully closed or not. - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public CloseJobResponse closeJob(CloseJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::closeJob, - options, - CloseJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Closes one or more Machine Learning Jobs asynchronously, notifies listener on completion - * A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. - *

- * For additional info - * see ML Close Job documentation - * - * @param request Request containing job_ids and additional options. See {@link CloseJobRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable closeJobAsync(CloseJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::closeJob, - options, - CloseJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Flushes internally buffered data for the given Machine Learning Job ensuring all data sent to the has been processed. - * This may cause new results to be calculated depending on the contents of the buffer - * Both flush and close operations are similar, - * however the flush is more efficient if you are expecting to send more data for analysis. - * When flushing, the job remains open and is available to continue analyzing data. - * A close operation additionally prunes and persists the model state to disk and the - * job must be opened again before analyzing further data. - *

- * For additional info - * see Flush ML job documentation - * - * @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public FlushJobResponse flushJob(FlushJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::flushJob, - options, - FlushJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Flushes internally buffered data for the given Machine Learning Job asynchronously ensuring all data sent to the has been processed. - * This may cause new results to be calculated depending on the contents of the buffer - * Both flush and close operations are similar, - * however the flush is more efficient if you are expecting to send more data for analysis. - * When flushing, the job remains open and is available to continue analyzing data. - * A close operation additionally prunes and persists the model state to disk and the - * job must be opened again before analyzing further data. - *

- * For additional info - * see Flush ML job documentation - * - * @param request The {@link FlushJobRequest} object enclosing the `jobId` and additional request options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable flushJobAsync(FlushJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::flushJob, - options, - FlushJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates a forecast of an existing, opened Machine Learning Job - * This predicts the future behavior of a time series by using its historical behavior. - *

- * For additional info - * see Forecast ML Job Documentation - * - * @param request ForecastJobRequest with forecasting options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response containing forecast acknowledgement and new forecast's ID - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public ForecastJobResponse forecastJob(ForecastJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::forecastJob, - options, - ForecastJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a forecast of an existing, opened Machine Learning Job asynchronously - * This predicts the future behavior of a time series by using its historical behavior. - *

- * For additional info - * see Forecast ML Job Documentation - * - * @param request ForecastJobRequest with forecasting options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable forecastJobAsync(ForecastJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::forecastJob, - options, - ForecastJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes Machine Learning Job Forecasts - *

- * For additional info - * see Delete Job Forecast - * Documentation - * - * @param request the {@link DeleteForecastRequest} object enclosing the desired jobId, forecastIDs, and other options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return a AcknowledgedResponse object indicating request success - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteForecast(DeleteForecastRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteForecast, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes Machine Learning Job Forecasts asynchronously - *

- * For additional info - * see Delete Job Forecast - * Documentation - * - * @param request the {@link DeleteForecastRequest} object enclosing the desired jobId, forecastIDs, and other options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteForecastAsync( - DeleteForecastRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteForecast, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes Machine Learning Model Snapshots - *

- * For additional info - * see - * ML Delete Model Snapshot documentation - * - * @param request The request to delete the model snapshot - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteModelSnapshot(DeleteModelSnapshotRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteModelSnapshot, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes Machine Learning Model Snapshots asynchronously and notifies the listener on completion - *

- * For additional info - * see - * ML Delete Model Snapshot documentation - * - * @param request The request to delete the model snapshot - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteModelSnapshotAsync( - DeleteModelSnapshotRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteModelSnapshot, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Reverts to a particular Machine Learning Model Snapshot - *

- * For additional info - * see - * ML Revert Model Snapshot documentation - * - * @param request The request to revert to a previous model snapshot - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public RevertModelSnapshotResponse revertModelSnapshot(RevertModelSnapshotRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::revertModelSnapshot, - options, - RevertModelSnapshotResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Reverts to a particular Machine Learning Model Snapshot asynchronously and notifies the listener on completion - *

- * For additional info - * see - * ML Revert Model Snapshot documentation - * - * @param request The request to revert to a previous model snapshot - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable revertModelSnapshotAsync( - RevertModelSnapshotRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::revertModelSnapshot, - options, - RevertModelSnapshotResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Datafeed - *

- * For additional info - * see ML PUT datafeed documentation - * - * @param request The PutDatafeedRequest containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutDatafeedResponse with enclosed {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutDatafeedResponse putDatafeed(PutDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putDatafeed, - options, - PutDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Datafeed asynchronously and notifies listener on completion - *

- * For additional info - * see ML PUT datafeed documentation - * - * @param request The request containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putDatafeedAsync(PutDatafeedRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putDatafeed, - options, - PutDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning Datafeed - *

- * For additional info - * see - * ML Update datafeed documentation - * - * @param request The UpdateDatafeedRequest containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedUpdate} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutDatafeedResponse with enclosed, updated {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutDatafeedResponse updateDatafeed(UpdateDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateDatafeed, - options, - PutDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning Datafeed asynchronously and notifies listener on completion - *

- * For additional info - * see - * ML Update datafeed documentation - * - * @param request The request containing the {@link org.elasticsearch.client.ml.datafeed.DatafeedUpdate} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateDatafeedAsync( - UpdateDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateDatafeed, - options, - PutDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets one or more Machine Learning datafeed configuration info. - * - *

- * For additional info - * see ML GET datafeed documentation - * - * @param request {@link GetDatafeedRequest} Request containing a list of datafeedId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetDatafeedResponse} response object containing - * the {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} objects and the number of jobs found - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetDatafeedResponse getDatafeed(GetDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getDatafeed, - options, - GetDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets one or more Machine Learning datafeed configuration info, asynchronously. - * - *

- * For additional info - * see ML GET datafeed documentation - * - * @param request {@link GetDatafeedRequest} Request containing a list of datafeedId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified with {@link GetDatafeedResponse} upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getDatafeedAsync(GetDatafeedRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getDatafeed, - options, - GetDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Datafeed - *

- * For additional info - * see - * ML Delete Datafeed documentation - * - * @param request The request to delete the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteDatafeed(DeleteDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteDatafeed, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Datafeed asynchronously and notifies the listener on completion - *

- * For additional info - * see - * ML Delete Datafeed documentation - * - * @param request The request to delete the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteDatafeedAsync( - DeleteDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteDatafeed, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Starts the given Machine Learning Datafeed - *

- * For additional info - * see - * ML Start Datafeed documentation - * - * @param request The request to start the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public StartDatafeedResponse startDatafeed(StartDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::startDatafeed, - options, - StartDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Starts the given Machine Learning Datafeed asynchronously and notifies the listener on completion - *

- * For additional info - * see - * ML Start Datafeed documentation - * - * @param request The request to start the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable startDatafeedAsync( - StartDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::startDatafeed, - options, - StartDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Stops the given Machine Learning Datafeed - *

- * For additional info - * see - * ML Stop Datafeed documentation - * - * @param request The request to stop the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public StopDatafeedResponse stopDatafeed(StopDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::stopDatafeed, - options, - StopDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Stops the given Machine Learning Datafeed asynchronously and notifies the listener on completion - *

- * For additional info - * see - * ML Stop Datafeed documentation - * - * @param request The request to stop the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable stopDatafeedAsync( - StopDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::stopDatafeed, - options, - StopDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets statistics for one or more Machine Learning datafeeds - *

- * For additional info - * see Get datafeed stats docs - * - * @param request {@link GetDatafeedStatsRequest} Request containing a list of datafeedId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetDatafeedStatsResponse} response object containing - * the {@link org.elasticsearch.client.ml.datafeed.DatafeedStats} objects and the number of datafeeds found - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetDatafeedStatsResponse getDatafeedStats(GetDatafeedStatsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getDatafeedStats, - options, - GetDatafeedStatsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Previews the given Machine Learning Datafeed - *

- * For additional info - * see - * ML Preview Datafeed documentation - * - * @param request The request to preview the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link PreviewDatafeedResponse} object containing a {@link org.elasticsearch.common.bytes.BytesReference} of the data in - * JSON format - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PreviewDatafeedResponse previewDatafeed(PreviewDatafeedRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::previewDatafeed, - options, - PreviewDatafeedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets statistics for one or more Machine Learning datafeeds, asynchronously. - *

- * For additional info - * see Get datafeed stats docs - * - * @param request {@link GetDatafeedStatsRequest} Request containing a list of datafeedId(s) and additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified with {@link GetDatafeedStatsResponse} upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getDatafeedStatsAsync( - GetDatafeedStatsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getDatafeedStats, - options, - GetDatafeedStatsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Previews the given Machine Learning Datafeed asynchronously and notifies the listener on completion - *

- * For additional info - * see - * ML Preview Datafeed documentation - * - * @param request The request to preview the datafeed - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable previewDatafeedAsync( - PreviewDatafeedRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::previewDatafeed, - options, - PreviewDatafeedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} - *

- * For additional info - * see ML Update Job Documentation - * - * @param request the {@link UpdateJobRequest} object enclosing the desired updates - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return a PutJobResponse object containing the updated job object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutJobResponse updateJob(UpdateJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateJob, - options, - PutJobResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning {@link org.elasticsearch.client.ml.job.config.Job} asynchronously - *

- * For additional info - * see ML Update Job Documentation - * - * @param request the {@link UpdateJobRequest} object enclosing the desired updates - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateJobAsync(UpdateJobRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateJob, - options, - PutJobResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the buckets for a Machine Learning Job. - *

- * For additional info - * see ML GET buckets documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - */ - public GetBucketsResponse getBuckets(GetBucketsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getBuckets, - options, - GetBucketsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the buckets for a Machine Learning Job, notifies listener once the requested buckets are retrieved. - *

- * For additional info - * see ML GET buckets documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getBucketsAsync(GetBucketsRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getBuckets, - options, - GetBucketsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the categories for a Machine Learning Job. - *

- * For additional info - * see - * ML GET categories documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetCategoriesResponse getCategories(GetCategoriesRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getCategories, - options, - GetCategoriesResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the categories for a Machine Learning Job, notifies listener once the requested buckets are retrieved. - *

- * For additional info - * see - * ML GET categories documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getCategoriesAsync( - GetCategoriesRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getCategories, - options, - GetCategoriesResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the snapshots for a Machine Learning Job. - *

- * For additional info - * see - * ML GET model snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetModelSnapshotsResponse getModelSnapshots(GetModelSnapshotsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getModelSnapshots, - options, - GetModelSnapshotsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the snapshots for a Machine Learning Job, notifies listener once the requested snapshots are retrieved. - *

- * For additional info - * see - * ML GET model snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getModelSnapshotsAsync( - GetModelSnapshotsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getModelSnapshots, - options, - GetModelSnapshotsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a snapshot for a Machine Learning Job. - *

- * For additional info - * see - * ML UPDATE model snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public UpdateModelSnapshotResponse updateModelSnapshot(UpdateModelSnapshotRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateModelSnapshot, - options, - UpdateModelSnapshotResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a snapshot for a Machine Learning Job, notifies listener once the requested snapshots are retrieved. - *

- * For additional info - * see - * ML UPDATE model snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateModelSnapshotAsync( - UpdateModelSnapshotRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateModelSnapshot, - options, - UpdateModelSnapshotResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Upgrades a snapshot for a Machine Learning Job to the current major version. - *

- * For additional info - * see - * ML Upgrade job snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public UpgradeJobModelSnapshotResponse upgradeJobSnapshot(UpgradeJobModelSnapshotRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::upgradeJobSnapshot, - options, - UpgradeJobModelSnapshotResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Upgrades a snapshot for a Machine Learning Job to the current major version, - * notifies listener once the upgrade has started. - *

- * For additional info - * see - * ML Upgrade job snapshots documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable upgradeJobSnapshotAsync( - UpgradeJobModelSnapshotRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::upgradeJobSnapshot, - options, - UpgradeJobModelSnapshotResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets overall buckets for a set of Machine Learning Jobs. - *

- * For additional info - * see - * ML GET overall buckets documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - */ - public GetOverallBucketsResponse getOverallBuckets(GetOverallBucketsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getOverallBuckets, - options, - GetOverallBucketsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets overall buckets for a set of Machine Learning Jobs, notifies listener once the requested buckets are retrieved. - *

- * For additional info - * see - * ML GET overall buckets documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getOverallBucketsAsync( - GetOverallBucketsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getOverallBuckets, - options, - GetOverallBucketsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the records for a Machine Learning Job. - *

- * For additional info - * see ML GET records documentation - * - * @param request the request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - */ - public GetRecordsResponse getRecords(GetRecordsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getRecords, - options, - GetRecordsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the records for a Machine Learning Job, notifies listener once the requested records are retrieved. - *

- * For additional info - * see ML GET records documentation - * - * @param request the request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getRecordsAsync(GetRecordsRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getRecords, - options, - GetRecordsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Sends data to an anomaly detection job for analysis. - *

- * NOTE: The job must have a state of open to receive and process the data. - *

- * For additional info - * see ML POST Data documentation - * - * @param request PostDataRequest containing the data to post and some additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response containing operational progress about the job - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PostDataResponse postData(PostDataRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::postData, - options, - PostDataResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Sends data to an anomaly detection job for analysis, asynchronously - *

- * NOTE: The job must have a state of open to receive and process the data. - *

- * For additional info - * see ML POST Data documentation - * - * @param request PostDataRequest containing the data to post and some additional options - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable postDataAsync(PostDataRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::postData, - options, - PostDataResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets a single or multiple calendars. - *

- * For additional info - * see ML GET calendars documentation - * - * @param request The calendars request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetCalendarsResponse} response object containing the {@link org.elasticsearch.client.ml.calendars.Calendar} - * objects and the number of calendars found - */ - public GetCalendarsResponse getCalendars(GetCalendarsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getCalendars, - options, - GetCalendarsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets a single or multiple calendars, notifies listener once the requested records are retrieved. - *

- * For additional info - * see ML GET calendars documentation - * - * @param request The calendars request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getCalendarsAsync( - GetCalendarsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getCalendars, - options, - GetCalendarsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the influencers for a Machine Learning Job. - *

- * For additional info - * see - * ML GET influencers documentation - * - * @param request the request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - */ - public GetInfluencersResponse getInfluencers(GetInfluencersRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getInfluencers, - options, - GetInfluencersResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the influencers for a Machine Learning Job, notifies listener once the requested influencers are retrieved. - *

- * For additional info - * * see - * ML GET influencers documentation - * - * @param request the request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getInfluencersAsync( - GetInfluencersRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getInfluencers, - options, - GetInfluencersResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Create a new machine learning calendar - *

- * For additional info - * see - * ML create calendar documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutCalendarResponse} containing the calendar - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutCalendarResponse putCalendar(PutCalendarRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putCalendar, - options, - PutCalendarResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Create a new machine learning calendar, notifies listener with the created calendar - *

- * For additional info - * see - * ML create calendar documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putCalendarAsync(PutCalendarRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putCalendar, - options, - PutCalendarResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Adds Machine Learning Job(s) to a calendar - *

- * For additional info - * see - * ML Put calendar job documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutCalendarResponse} containing the updated calendar - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutCalendarResponse putCalendarJob(PutCalendarJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putCalendarJob, - options, - PutCalendarResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Adds Machine Learning Job(s) to a calendar, notifies listener when completed - *

- * For additional info - * see - * ML Put calendar job documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putCalendarJobAsync( - PutCalendarJobRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putCalendarJob, - options, - PutCalendarResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Removes Machine Learning Job(s) from a calendar - *

- * For additional info - * see - * ML Delete calendar job documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutCalendarResponse} containing the updated calendar - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutCalendarResponse deleteCalendarJob(DeleteCalendarJobRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteCalendarJob, - options, - PutCalendarResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Removes Machine Learning Job(s) from a calendar, notifies listener when completed - *

- * For additional info - * see - * ML Delete calendar job documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteCalendarJobAsync( - DeleteCalendarJobRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteCalendarJob, - options, - PutCalendarResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Calendar - *

- * For additional info see - * - * ML Delete calendar documentation - * - * @param request The request to delete the calendar - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteCalendar(DeleteCalendarRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteCalendar, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning Job asynchronously and notifies the listener on completion - *

- * For additional info see - * - * ML Delete calendar documentation - * - * @param request The request to delete the calendar - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteCalendarAsync( - DeleteCalendarRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteCalendar, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the events for a machine learning calendar - *

- * For additional info - * see - * GET Calendar Events API - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PostCalendarEventRequest} containing the scheduled events - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetCalendarEventsResponse getCalendarEvents(GetCalendarEventsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getCalendarEvents, - options, - GetCalendarEventsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the events for a a machine learning calendar asynchronously, notifies the listener on completion - *

- * For additional info - * see - * GET Calendar Events API - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getCalendarEventsAsync( - GetCalendarEventsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getCalendarEvents, - options, - GetCalendarEventsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates new events for a a machine learning calendar - *

- * For additional info - * see - * Add Events to Calendar API - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PostCalendarEventRequest} containing the scheduled events - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PostCalendarEventResponse postCalendarEvent(PostCalendarEventRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::postCalendarEvents, - options, - PostCalendarEventResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates new events for a a machine learning calendar asynchronously, notifies the listener on completion - *

- * For additional info - * see - * Add Events to Calendar API - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable postCalendarEventAsync( - PostCalendarEventRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::postCalendarEvents, - options, - PostCalendarEventResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Removes a Scheduled Event from a calendar - *

- * For additional info - * see - * ML Delete calendar event documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutCalendarResponse} containing the updated calendar - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteCalendarEvent(DeleteCalendarEventRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteCalendarEvent, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Removes a Scheduled Event from a calendar, notifies listener when completed - *

- * For additional info - * see - * ML Delete calendar event documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteCalendarEventAsync( - DeleteCalendarEventRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteCalendarEvent, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Filter - *

- * For additional info - * see ML PUT Filter documentation - * - * @param request The PutFilterRequest containing the {@link org.elasticsearch.client.ml.job.config.MlFilter} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutFilterResponse with enclosed {@link org.elasticsearch.client.ml.job.config.MlFilter} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutFilterResponse putFilter(PutFilterRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putFilter, - options, - PutFilterResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a new Machine Learning Filter asynchronously and notifies listener on completion - *

- * For additional info - * see ML PUT Filter documentation - * - * @param request The request containing the {@link org.elasticsearch.client.ml.job.config.MlFilter} settings - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putFilterAsync(PutFilterRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putFilter, - options, - PutFilterResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets Machine Learning Filters - *

- * For additional info - * see ML GET Filter documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return GetFilterResponse with enclosed {@link org.elasticsearch.client.ml.job.config.MlFilter} objects - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public GetFiltersResponse getFilter(GetFiltersRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getFilter, - options, - GetFiltersResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets Machine Learning Filters asynchronously and notifies listener on completion - *

- * For additional info - * see ML GET Filter documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getFilterAsync(GetFiltersRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getFilter, - options, - GetFiltersResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning Filter - *

- * For additional info - * see - * ML Update Filter documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return PutFilterResponse with the updated {@link org.elasticsearch.client.ml.job.config.MlFilter} object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutFilterResponse updateFilter(UpdateFilterRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateFilter, - options, - PutFilterResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a Machine Learning Filter asynchronously and notifies listener on completion - *

- * For additional info - * see - * ML Update Filter documentation - * - * @param request The request - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateFilterAsync(UpdateFilterRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateFilter, - options, - PutFilterResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning filter - *

- * For additional info - * see - * ML Delete Filter documentation - * - * @param request The request to delete the filter - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteFilter(DeleteFilterRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteFilter, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Machine Learning filter asynchronously and notifies the listener on completion - *

- * For additional info - * see - * ML Delete Filter documentation - * - * @param request The request to delete the filter - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteFilterAsync( - DeleteFilterRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteFilter, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets Machine Learning information about default values and limits. - *

- * For additional info - * see Machine Learning info - * - * @param request The request of Machine Learning info - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response info about default values and limits - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public MlInfoResponse getMlInfo(MlInfoRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::mlInfo, - options, - MlInfoResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets Machine Learning information about default values and limits, asynchronously. - *

- * For additional info - * see Machine Learning info - * - * @param request The request of Machine Learning info - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getMlInfoAsync(MlInfoRequest request, RequestOptions options, ActionListener listener) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::mlInfo, - options, - MlInfoResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Sets the ML cluster setting upgrade_mode - *

- * For additional info - * see Set Upgrade Mode - * - * @param request The request to set upgrade mode - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return response - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse setUpgradeMode(SetUpgradeModeRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::setUpgradeMode, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Sets the ML cluster setting upgrade_mode asynchronously - *

- * For additional info - * see Set Upgrade Mode - * - * @param request The request of Machine Learning info - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable setUpgradeModeAsync( - SetUpgradeModeRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::setUpgradeMode, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Estimate the model memory an analysis config is likely to need given supplied field cardinalities - *

- * For additional info - * see Estimate Model Memory - * - * @param request The {@link EstimateModelMemoryRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link EstimateModelMemoryResponse} response object - */ - public EstimateModelMemoryResponse estimateModelMemory(EstimateModelMemoryRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::estimateModelMemory, - options, - EstimateModelMemoryResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Estimate the model memory an analysis config is likely to need given supplied field cardinalities and notifies listener upon - * completion - *

- * For additional info - * see Estimate Model Memory - * - * @param request The {@link EstimateModelMemoryRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable estimateModelMemoryAsync( - EstimateModelMemoryRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::estimateModelMemory, - options, - EstimateModelMemoryResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates a new Data Frame Analytics config - *

- * For additional info - * see - * PUT Data Frame Analytics documentation - * - * @param request The {@link PutDataFrameAnalyticsRequest} containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutDataFrameAnalyticsResponse} containing the created - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutDataFrameAnalyticsResponse putDataFrameAnalytics(PutDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putDataFrameAnalytics, - options, - PutDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates a new Data Frame Analytics config asynchronously and notifies listener upon completion - *

- * For additional info - * see - * PUT Data Frame Analytics documentation - * - * @param request The {@link PutDataFrameAnalyticsRequest} containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putDataFrameAnalyticsAsync( - PutDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putDataFrameAnalytics, - options, - PutDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Updates a Data Frame Analytics config - *

- * For additional info - * see - * PUT Data Frame Analytics documentation - * - * @param request The {@link UpdateDataFrameAnalyticsRequest} containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfigUpdate} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return The {@link PutDataFrameAnalyticsResponse} containing the updated - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public PutDataFrameAnalyticsResponse updateDataFrameAnalytics(UpdateDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::updateDataFrameAnalytics, - options, - PutDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Updates a Data Frame Analytics config asynchronously and notifies listener upon completion - *

- * For additional info - * see - * Update Data Frame Analytics documentation - * - * @param request The {@link UpdateDataFrameAnalyticsRequest} containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfigUpdate} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable updateDataFrameAnalyticsAsync( - UpdateDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::updateDataFrameAnalytics, - options, - PutDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets a single or multiple Data Frame Analytics configs - *

- * For additional info - * see - * GET Data Frame Analytics documentation - * - * @param request The {@link GetDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetDataFrameAnalyticsResponse} response object containing the - * {@link org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig} objects - */ - public GetDataFrameAnalyticsResponse getDataFrameAnalytics(GetDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getDataFrameAnalytics, - options, - GetDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets a single or multiple Data Frame Analytics configs asynchronously and notifies listener upon completion - *

- * For additional info - * see - * GET Data Frame Analytics documentation - * - * @param request The {@link GetDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getDataFrameAnalyticsAsync( - GetDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getDataFrameAnalytics, - options, - GetDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets the running statistics of a Data Frame Analytics - *

- * For additional info - * see - * GET Data Frame Analytics Stats documentation - * - * @param request The {@link GetDataFrameAnalyticsStatsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetDataFrameAnalyticsStatsResponse} response object - */ - public GetDataFrameAnalyticsStatsResponse getDataFrameAnalyticsStats(GetDataFrameAnalyticsStatsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getDataFrameAnalyticsStats, - options, - GetDataFrameAnalyticsStatsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets the running statistics of a Data Frame Analytics asynchronously and notifies listener upon completion - *

- * For additional info - * see - * GET Data Frame Analytics Stats documentation - * - * @param request The {@link GetDataFrameAnalyticsStatsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getDataFrameAnalyticsStatsAsync( - GetDataFrameAnalyticsStatsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getDataFrameAnalyticsStats, - options, - GetDataFrameAnalyticsStatsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Starts Data Frame Analytics - *

- * For additional info - * see - * Start Data Frame Analytics documentation - * - * @param request The {@link StartDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public StartDataFrameAnalyticsResponse startDataFrameAnalytics(StartDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::startDataFrameAnalytics, - options, - StartDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Starts Data Frame Analytics asynchronously and notifies listener upon completion - *

- * For additional info - * see - * Start Data Frame Analytics documentation - * - * @param request The {@link StartDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable startDataFrameAnalyticsAsync( - StartDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::startDataFrameAnalytics, - options, - StartDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Stops Data Frame Analytics - *

- * For additional info - * see - * Stop Data Frame Analytics documentation - * - * @param request The {@link StopDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link StopDataFrameAnalyticsResponse} - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public StopDataFrameAnalyticsResponse stopDataFrameAnalytics(StopDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::stopDataFrameAnalytics, - options, - StopDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Stops Data Frame Analytics asynchronously and notifies listener upon completion - *

- * For additional info - * see - * Stop Data Frame Analytics documentation - * - * @param request The {@link StopDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable stopDataFrameAnalyticsAsync( - StopDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::stopDataFrameAnalytics, - options, - StopDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Data Frame Analytics config - *

- * For additional info - * see - * DELETE Data Frame Analytics documentation - * - * @param request The {@link DeleteDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteDataFrameAnalytics(DeleteDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteDataFrameAnalytics, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Data Frame Analytics config asynchronously and notifies listener upon completion - *

- * For additional info - * see - * DELETE Data Frame Analytics documentation - * - * @param request The {@link DeleteDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteDataFrameAnalyticsAsync( - DeleteDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteDataFrameAnalytics, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Evaluates the given Data Frame - *

- * For additional info - * see - * Evaluate Data Frame documentation - * - * @param request The {@link EvaluateDataFrameRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link EvaluateDataFrameResponse} response object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public EvaluateDataFrameResponse evaluateDataFrame(EvaluateDataFrameRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::evaluateDataFrame, - options, - EvaluateDataFrameResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Evaluates the given Data Frame asynchronously and notifies listener upon completion - *

- * For additional info - * see - * Evaluate Data Frame documentation - * - * @param request The {@link EvaluateDataFrameRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable evaluateDataFrameAsync( - EvaluateDataFrameRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::evaluateDataFrame, - options, - EvaluateDataFrameResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Explains the given Data Frame Analytics - *

- * For additional info - * see - * Explain Data Frame Analytics documentation - * - * @param request The {@link ExplainDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link ExplainDataFrameAnalyticsResponse} response object - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public ExplainDataFrameAnalyticsResponse explainDataFrameAnalytics(ExplainDataFrameAnalyticsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::explainDataFrameAnalytics, - options, - ExplainDataFrameAnalyticsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Explains the given Data Frame Analytics asynchronously and notifies listener upon completion - *

- * For additional info - * see - * Explain Data Frame Analytics documentation - * - * @param request The {@link ExplainDataFrameAnalyticsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable explainDataFrameAnalyticsAsync( - ExplainDataFrameAnalyticsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::explainDataFrameAnalytics, - options, - ExplainDataFrameAnalyticsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets trained model configs - *

- * For additional info - * see - * GET Trained Model Configs documentation - * - * @param request The {@link GetTrainedModelsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetTrainedModelsResponse} response object - */ - public GetTrainedModelsResponse getTrainedModels(GetTrainedModelsRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getTrainedModels, - options, - GetTrainedModelsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets trained model configs asynchronously and notifies listener upon completion - *

- * For additional info - * see - * GET Trained Model Configs documentation - * - * @param request The {@link GetTrainedModelsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getTrainedModelsAsync( - GetTrainedModelsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getTrainedModels, - options, - GetTrainedModelsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Put trained model config - *

- * For additional info - * see - * PUT Trained Model Config documentation - * - * @param request The {@link PutTrainedModelRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link PutTrainedModelResponse} response object - */ - public PutTrainedModelResponse putTrainedModel(PutTrainedModelRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putTrainedModel, - options, - PutTrainedModelResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Put trained model config asynchronously and notifies listener upon completion - *

- * For additional info - * see - * PUT Trained Model Config documentation - * - * @param request The {@link PutTrainedModelRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putTrainedModelAsync( - PutTrainedModelRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putTrainedModel, - options, - PutTrainedModelResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Gets trained model stats - *

- * For additional info - * see - * GET Trained Model Stats documentation - * - * @param request The {@link GetTrainedModelsStatsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return {@link GetTrainedModelsStatsResponse} response object - */ - public GetTrainedModelsStatsResponse getTrainedModelsStats(GetTrainedModelsStatsRequest request, RequestOptions options) - throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::getTrainedModelsStats, - options, - GetTrainedModelsStatsResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Gets trained model stats asynchronously and notifies listener upon completion - *

- * For additional info - * see - * GET Trained Model Stats documentation - * - * @param request The {@link GetTrainedModelsStatsRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable getTrainedModelsStatsAsync( - GetTrainedModelsStatsRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::getTrainedModelsStats, - options, - GetTrainedModelsStatsResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes the given Trained Model - *

- * For additional info - * see - * DELETE Trained Model documentation - * - * @param request The {@link DeleteTrainedModelRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteTrainedModel(DeleteTrainedModelRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteTrainedModel, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes the given Trained Model asynchronously and notifies listener upon completion - *

- * For additional info - * see - * DELETE Trained Model documentation - * - * @param request The {@link DeleteTrainedModelRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteTrainedModelAsync( - DeleteTrainedModelRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteTrainedModel, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Creates or reassigns a trained model alias - *

- * For additional info - * see - * Put Trained Model Aliases documentation - * - * @param request The {@link PutTrainedModelAliasRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse putTrainedModelAlias(PutTrainedModelAliasRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::putTrainedModelAlias, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Creates or reassigns a trained model alias asynchronously and notifies listener upon completion - *

- * For additional info - * see - * Put Trained Model Aliases documentation - * - * @param request The {@link PutTrainedModelAliasRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable putTrainedModelAliasAsync( - PutTrainedModelAliasRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::putTrainedModelAlias, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } - - /** - * Deletes a trained model alias - *

- * For additional info - * see - * Delete Trained Model Aliases documentation - * - * @param request The {@link DeleteTrainedModelAliasRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @return action acknowledgement - * @throws IOException when there is a serialization issue sending the request or receiving the response - */ - public AcknowledgedResponse deleteTrainedModelAlias(DeleteTrainedModelAliasRequest request, RequestOptions options) throws IOException { - return restHighLevelClient.performRequestAndParseEntity( - request, - MLRequestConverters::deleteTrainedModelAlias, - options, - AcknowledgedResponse::fromXContent, - Collections.emptySet() - ); - } - - /** - * Deletes a trained model alias asynchronously and notifies listener upon completion - *

- * For additional info - * see - * Delete Trained Model Aliases documentation - * - * @param request The {@link DeleteTrainedModelAliasRequest} - * @param options Additional request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized - * @param listener Listener to be notified upon request completion - * @return cancellable that may be used to cancel the request - */ - public Cancellable deleteTrainedModelAliasAsync( - DeleteTrainedModelAliasRequest request, - RequestOptions options, - ActionListener listener - ) { - return restHighLevelClient.performRequestAsyncAndParseEntity( - request, - MLRequestConverters::deleteTrainedModelAlias, - options, - AcknowledgedResponse::fromXContent, - listener, - Collections.emptySet() - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index d94071ccac1d5..f030033392123 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -51,8 +51,6 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; -import org.elasticsearch.client.analytics.InferencePipelineAggregationBuilder; -import org.elasticsearch.client.analytics.ParsedInference; import org.elasticsearch.client.analytics.ParsedStringStats; import org.elasticsearch.client.analytics.ParsedTopMetrics; import org.elasticsearch.client.analytics.StringStatsAggregationBuilder; @@ -278,7 +276,6 @@ public class RestHighLevelClient implements Closeable { private final IndicesClient indicesClient = new IndicesClient(this); private final IngestClient ingestClient = new IngestClient(this); private final SnapshotClient snapshotClient = new SnapshotClient(this); - private final MachineLearningClient machineLearningClient = new MachineLearningClient(this); private final SecurityClient securityClient = new SecurityClient(this); private final TransformClient transformClient = new TransformClient(this); private final EqlClient eqlClient = new EqlClient(this); @@ -391,20 +388,6 @@ public SearchableSnapshotsClient searchableSnapshots() { return searchableSnapshotsClient; } - /** - * Provides methods for accessing the Elastic Licensed Machine Learning APIs that - * are shipped with the Elastic Stack distribution of Elasticsearch. All of - * these APIs will 404 if run against the OSS distribution of Elasticsearch. - *

- * See the - * Machine Learning APIs on elastic.co for more information. - * - * @return the client wrapper for making Machine Learning API calls - */ - public MachineLearningClient machineLearning() { - return machineLearningClient; - } - /** * Provides methods for accessing the Elastic Licensed Security APIs that * are shipped with the Elastic Stack distribution of Elasticsearch. All of @@ -2730,7 +2713,6 @@ static List getDefaultNamedXContents() { map.put(CompositeAggregationBuilder.NAME, (p, c) -> ParsedComposite.fromXContent(p, (String) c)); map.put(StringStatsAggregationBuilder.NAME, (p, c) -> ParsedStringStats.PARSER.parse(p, (String) c)); map.put(TopMetricsAggregationBuilder.NAME, (p, c) -> ParsedTopMetrics.PARSER.parse(p, (String) c)); - map.put(InferencePipelineAggregationBuilder.NAME, (p, c) -> ParsedInference.fromXContent(p, (String) (c))); map.put(TimeSeriesAggregationBuilder.NAME, (p, c) -> ParsedTimeSeries.fromXContent(p, (String) (c))); List entries = map.entrySet() .stream() diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java deleted file mode 100644 index 20ba0fa993534..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/InferencePipelineAggregationBuilder.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.analytics; - -import org.elasticsearch.Version; -import org.elasticsearch.client.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.search.aggregations.PipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.AbstractPipelineAggregationBuilder; -import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; -import java.util.TreeMap; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * For building inference pipeline aggregations - * - * NOTE: This extends {@linkplain AbstractPipelineAggregationBuilder} for compatibility - * with {@link SearchSourceBuilder#aggregation(PipelineAggregationBuilder)} but it - * doesn't support any "server" side things like {@linkplain #doWriteTo(StreamOutput)} - * or {@linkplain #createInternal(Map)} - */ -public class InferencePipelineAggregationBuilder extends AbstractPipelineAggregationBuilder { - - public static String NAME = "inference"; - - public static final ParseField MODEL_ID = new ParseField("model_id"); - private static final ParseField INFERENCE_CONFIG = new ParseField("inference_config"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - false, - (args, name) -> new InferencePipelineAggregationBuilder(name, (String) args[0], (Map) args[1]) - ); - - static { - PARSER.declareString(constructorArg(), MODEL_ID); - PARSER.declareObject(constructorArg(), (p, c) -> p.mapStrings(), BUCKETS_PATH_FIELD); - PARSER.declareNamedObject( - InferencePipelineAggregationBuilder::setInferenceConfig, - (p, c, n) -> p.namedObject(InferenceConfig.class, n, c), - INFERENCE_CONFIG - ); - } - - private final Map bucketPathMap; - private final String modelId; - private InferenceConfig inferenceConfig; - - public static InferencePipelineAggregationBuilder parse(String pipelineAggregatorName, XContentParser parser) { - return PARSER.apply(parser, pipelineAggregatorName); - } - - public InferencePipelineAggregationBuilder(String name, String modelId, Map bucketsPath) { - super(name, NAME, new TreeMap<>(bucketsPath).values().toArray(new String[] {})); - this.modelId = modelId; - this.bucketPathMap = bucketsPath; - } - - public void setInferenceConfig(InferenceConfig inferenceConfig) { - this.inferenceConfig = inferenceConfig; - } - - @Override - protected void validate(ValidationContext context) { - // validation occurs on the server - } - - @Override - protected void doWriteTo(StreamOutput out) { - throw new UnsupportedOperationException(); - } - - @Override - protected PipelineAggregator createInternal(Map metaData) { - throw new UnsupportedOperationException(); - } - - @Override - protected boolean overrideBucketsPath() { - return true; - } - - @Override - protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(MODEL_ID.getPreferredName(), modelId); - builder.field(BUCKETS_PATH_FIELD.getPreferredName(), bucketPathMap); - if (inferenceConfig != null) { - builder.startObject(INFERENCE_CONFIG.getPreferredName()); - builder.field(inferenceConfig.getName(), inferenceConfig); - builder.endObject(); - } - return builder; - } - - @Override - public String getWriteableName() { - return NAME; - } - - @Override - public int hashCode() { - return Objects.hash(super.hashCode(), bucketPathMap, modelId, inferenceConfig); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null || getClass() != obj.getClass()) return false; - if (super.equals(obj) == false) return false; - - InferencePipelineAggregationBuilder other = (InferencePipelineAggregationBuilder) obj; - return Objects.equals(bucketPathMap, other.bucketPathMap) - && Objects.equals(modelId, other.modelId) - && Objects.equals(inferenceConfig, other.inferenceConfig); - } - - @Override - public Version getMinimalSupportedVersion() { - return Version.V_7_9_0; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedInference.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedInference.java deleted file mode 100644 index d3e1fcd5c85f0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedInference.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.analytics; - -import org.elasticsearch.client.ml.inference.results.FeatureImportance; -import org.elasticsearch.client.ml.inference.results.TopClassEntry; -import org.elasticsearch.search.aggregations.ParsedAggregation; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * This class parses the superset of all possible fields that may be written by - * InferenceResults. The warning field is mutually exclusive with all the other fields. - * - * In the case of classification results {@link #getValue()} may return a String, - * Boolean or a Double. For regression results {@link #getValue()} is always - * a Double. - */ -public class ParsedInference extends ParsedAggregation { - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - ParsedInference.class.getSimpleName(), - true, - args -> new ParsedInference(args[0], (List) args[1], (List) args[2], (String) args[3]) - ); - - public static final ParseField FEATURE_IMPORTANCE = new ParseField("feature_importance"); - public static final ParseField WARNING = new ParseField("warning"); - public static final ParseField TOP_CLASSES = new ParseField("top_classes"); - - static { - PARSER.declareField(optionalConstructorArg(), (p, n) -> { - Object o; - XContentParser.Token token = p.currentToken(); - if (token == XContentParser.Token.VALUE_STRING) { - o = p.text(); - } else if (token == XContentParser.Token.VALUE_BOOLEAN) { - o = p.booleanValue(); - } else if (token == XContentParser.Token.VALUE_NUMBER) { - o = p.doubleValue(); - } else { - throw new XContentParseException( - p.getTokenLocation(), - "[" - + ParsedInference.class.getSimpleName() - + "] failed to parse field [" - + CommonFields.VALUE - + "] " - + "value [" - + token - + "] is not a string, boolean or number" - ); - } - return o; - }, CommonFields.VALUE, ObjectParser.ValueType.VALUE); - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> FeatureImportance.fromXContent(p), FEATURE_IMPORTANCE); - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> TopClassEntry.fromXContent(p), TOP_CLASSES); - PARSER.declareString(optionalConstructorArg(), WARNING); - declareAggregationFields(PARSER); - } - - public static ParsedInference fromXContent(XContentParser parser, final String name) { - ParsedInference parsed = PARSER.apply(parser, null); - parsed.setName(name); - return parsed; - } - - private final Object value; - private final List featureImportance; - private final List topClasses; - private final String warning; - - ParsedInference(Object value, List featureImportance, List topClasses, String warning) { - this.value = value; - this.warning = warning; - this.featureImportance = featureImportance; - this.topClasses = topClasses; - } - - public Object getValue() { - return value; - } - - public List getFeatureImportance() { - return featureImportance; - } - - public List getTopClasses() { - return topClasses; - } - - public String getWarning() { - return warning; - } - - @Override - protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { - if (warning != null) { - builder.field(WARNING.getPreferredName(), warning); - } else { - builder.field(CommonFields.VALUE.getPreferredName(), value); - if (topClasses != null && topClasses.size() > 0) { - builder.field(TOP_CLASSES.getPreferredName(), topClasses); - } - if (featureImportance != null && featureImportance.size() > 0) { - builder.field(FEATURE_IMPORTANCE.getPreferredName(), featureImportance); - } - } - return builder; - } - - @Override - public String getType() { - return InferencePipelineAggregationBuilder.NAME; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/AbstractResultResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/AbstractResultResponse.java deleted file mode 100644 index 94e42fc0c8b0a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/AbstractResultResponse.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * Abstract class that provides a list of results and their count. - */ -public abstract class AbstractResultResponse implements ToXContentObject { - - public static final ParseField COUNT = new ParseField("count"); - - private final ParseField resultsField; - protected final List results; - protected final long count; - - AbstractResultResponse(ParseField resultsField, List results, long count) { - this.resultsField = Objects.requireNonNull(resultsField, "[results_field] must not be null"); - this.results = Collections.unmodifiableList(results); - this.count = count; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(COUNT.getPreferredName(), count); - builder.field(resultsField.getPreferredName(), results); - builder.endObject(); - return builder; - } - - public long count() { - return count; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobRequest.java deleted file mode 100644 index bd55976d7debb..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobRequest.java +++ /dev/null @@ -1,183 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.security.InvalidParameterException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request to close Machine Learning Jobs - */ -public class CloseJobRequest implements ToXContentObject, Validatable { - - public static final ParseField JOB_ID = new ParseField("job_id"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ParseField FORCE = new ParseField("force"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "close_job_request", - true, - a -> new CloseJobRequest((List) a[0]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), - JOB_ID, - ObjectParser.ValueType.STRING_ARRAY - ); - PARSER.declareString((obj, val) -> obj.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - PARSER.declareBoolean(CloseJobRequest::setForce, FORCE); - PARSER.declareBoolean(CloseJobRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private static final String ALL_JOBS = "_all"; - - private final List jobIds; - private TimeValue timeout; - private Boolean force; - private Boolean allowNoMatch; - - /** - * Explicitly close all jobs - * - * @return a {@link CloseJobRequest} for all existing jobs - */ - public static CloseJobRequest closeAllJobsRequest() { - return new CloseJobRequest(ALL_JOBS); - } - - CloseJobRequest(List jobIds) { - if (jobIds.isEmpty()) { - throw new InvalidParameterException("jobIds must not be empty"); - } - if (jobIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values"); - } - this.jobIds = new ArrayList<>(jobIds); - } - - /** - * Close the specified Jobs via their unique jobIds - * - * @param jobIds must be non-null and non-empty and each jobId must be non-null - */ - public CloseJobRequest(String... jobIds) { - this(Arrays.asList(jobIds)); - } - - /** - * All the jobIds to be closed - */ - public List getJobIds() { - return jobIds; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * How long to wait for the close request to complete before timing out. - * - * @param timeout Default value: 30 minutes - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - public Boolean getForce() { - return force; - } - - /** - * Should the closing be forced. - * - * Use to close a failed job, or to forcefully close a job which has not responded to its initial close request. - * - * @param force When {@code true} forcefully close the job. Defaults to {@code false} - */ - public void setForce(boolean force) { - this.force = force; - } - - public Boolean getAllowNoMatch() { - return this.allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no jobs. - * - * This includes {@code _all} string or when no jobs have been specified - * - * @param allowNoMatch When {@code true} ignore if wildcard or {@code _all} matches no jobs. Defaults to {@code true} - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, timeout, force, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - CloseJobRequest that = (CloseJobRequest) other; - return Objects.equals(jobIds, that.jobIds) - && Objects.equals(timeout, that.timeout) - && Objects.equals(force, that.force) - && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(JOB_ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds)); - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - if (force != null) { - builder.field(FORCE.getPreferredName(), force); - } - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobResponse.java deleted file mode 100644 index 827cd87595ad4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/CloseJobResponse.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Job(s) closed or not - */ -public class CloseJobResponse implements ToXContentObject { - - private static final ParseField CLOSED = new ParseField("closed"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "close_job_response", - true, - (a) -> new CloseJobResponse((Boolean) a[0]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), CLOSED); - } - - private boolean closed; - - public CloseJobResponse(boolean closed) { - this.closed = closed; - } - - public static CloseJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Has the job closed or not - * @return boolean value indicating the job closed status - */ - public boolean isClosed() { - return closed; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - CloseJobResponse that = (CloseJobResponse) other; - return isClosed() == that.isClosed(); - } - - @Override - public int hashCode() { - return Objects.hash(isClosed()); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLOSED.getPreferredName(), closed); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarEventRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarEventRequest.java deleted file mode 100644 index 5ade66ff71820..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarEventRequest.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * Request class for removing an event from an existing calendar - */ -public class DeleteCalendarEventRequest implements Validatable { - - private final String eventId; - private final String calendarId; - - /** - * Create a new request referencing an existing Calendar and which event to remove - * from it. - * - * @param calendarId The non-null ID of the calendar - * @param eventId Scheduled Event to remove from the calendar, Cannot be null. - */ - public DeleteCalendarEventRequest(String calendarId, String eventId) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - this.eventId = Objects.requireNonNull(eventId, "[event_id] must not be null."); - } - - public String getEventId() { - return eventId; - } - - public String getCalendarId() { - return calendarId; - } - - @Override - public int hashCode() { - return Objects.hash(eventId, calendarId); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DeleteCalendarEventRequest that = (DeleteCalendarEventRequest) other; - return Objects.equals(eventId, that.eventId) && Objects.equals(calendarId, that.calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarJobRequest.java deleted file mode 100644 index fff975334d95c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarJobRequest.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.security.InvalidParameterException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request class for removing Machine Learning Jobs from an existing calendar - */ -public class DeleteCalendarJobRequest implements Validatable { - - private final List jobIds; - private final String calendarId; - - /** - * Create a new request referencing an existing Calendar and which JobIds to remove - * from it. - * - * @param calendarId The non-null ID of the calendar - * @param jobIds JobIds to remove from the calendar, cannot be empty, or contain null values. - * It can be a list of jobs or groups. - */ - public DeleteCalendarJobRequest(String calendarId, String... jobIds) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - if (jobIds.length == 0) { - throw new InvalidParameterException("jobIds must not be empty."); - } - if (Arrays.stream(jobIds).anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values."); - } - this.jobIds = Arrays.asList(jobIds); - } - - public List getJobIds() { - return jobIds; - } - - public String getCalendarId() { - return calendarId; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, calendarId); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DeleteCalendarJobRequest that = (DeleteCalendarJobRequest) other; - return Objects.equals(jobIds, that.jobIds) && Objects.equals(calendarId, that.calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java deleted file mode 100644 index 8777d202529f6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteCalendarRequest.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * Request to delete a Machine Learning Calendar - */ -public class DeleteCalendarRequest implements Validatable { - - private final String calendarId; - - /** - * The constructor requires a single calendar id. - * @param calendarId The calendar to delete. Must be {@code non-null} - */ - public DeleteCalendarRequest(String calendarId) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null"); - } - - public String getCalendarId() { - return calendarId; - } - - @Override - public int hashCode() { - return Objects.hash(calendarId); - } - - @Override - public boolean equals(Object obj) { - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - DeleteCalendarRequest other = (DeleteCalendarRequest) obj; - return Objects.equals(calendarId, other.calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDataFrameAnalyticsRequest.java deleted file mode 100644 index ec19611ec58ae..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.core.TimeValue; - -import java.util.Objects; -import java.util.Optional; - -/** - * Request to delete a data frame analytics config - */ -public class DeleteDataFrameAnalyticsRequest implements Validatable { - - private final String id; - private Boolean force; - private TimeValue timeout; - - public DeleteDataFrameAnalyticsRequest(String id) { - this.id = id; - } - - public String getId() { - return id; - } - - public Boolean getForce() { - return force; - } - - /** - * Used to forcefully delete an job that is not stopped. - * This method is quicker than stopping and deleting the job. - * - * @param force When {@code true} forcefully delete a non stopped job. Defaults to {@code false} - */ - public void setForce(Boolean force) { - this.force = force; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * Sets the time to wait until the job is deleted. - * - * @param timeout The time to wait until the job is deleted. - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - @Override - public Optional validate() { - if (id == null) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - DeleteDataFrameAnalyticsRequest other = (DeleteDataFrameAnalyticsRequest) o; - return Objects.equals(id, other.id) && Objects.equals(force, other.force) && Objects.equals(timeout, other.timeout); - } - - @Override - public int hashCode() { - return Objects.hash(id, force, timeout); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java deleted file mode 100644 index 4ed729e9fd079..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteDatafeedRequest.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * Request to delete a Machine Learning Datafeed via its ID - */ -public class DeleteDatafeedRequest implements Validatable { - - private String datafeedId; - private Boolean force; - - public DeleteDatafeedRequest(String datafeedId) { - this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null"); - } - - public String getDatafeedId() { - return datafeedId; - } - - public Boolean getForce() { - return force; - } - - /** - * Used to forcefully delete a started datafeed. - * This method is quicker than stopping and deleting the datafeed. - * - * @param force When {@code true} forcefully delete a started datafeed. Defaults to {@code false} - */ - public void setForce(Boolean force) { - this.force = force; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, force); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || obj.getClass() != getClass()) { - return false; - } - - DeleteDatafeedRequest other = (DeleteDatafeedRequest) obj; - return Objects.equals(datafeedId, other.datafeedId) && Objects.equals(force, other.force); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java deleted file mode 100644 index 66edbcfe8e961..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataRequest.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to delete expired model snapshots and forecasts - */ -public class DeleteExpiredDataRequest implements Validatable, ToXContentObject { - - static final String REQUESTS_PER_SECOND = "requests_per_second"; - static final String TIMEOUT = "timeout"; - static final String JOB_ID = "job_id"; - - private final String jobId; - private final Float requestsPerSecond; - private final TimeValue timeout; - - /** - * Create a new request to delete expired data - */ - public DeleteExpiredDataRequest() { - this(null, null, null); - } - - public DeleteExpiredDataRequest(String jobId, Float requestsPerSecond, TimeValue timeout) { - this.jobId = jobId; - this.requestsPerSecond = requestsPerSecond; - this.timeout = timeout; - } - - /** - * The requests allowed per second in the underlying Delete by Query requests executed. - * - * `-1.0f` indicates that the standard nightly cleanup behavior should be ran. - * Throttling scales according to the number of data nodes. - * `null` is default and means no throttling will occur. - */ - public Float getRequestsPerSecond() { - return requestsPerSecond; - } - - /** - * Indicates how long the deletion request will run until it timesout. - * - * Default value is 8 hours. - */ - public TimeValue getTimeout() { - return timeout; - } - - /** - * The optional job id - * - * The default is `null` meaning all jobs. - * @return The job id or null - */ - public String getJobId() { - return jobId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DeleteExpiredDataRequest that = (DeleteExpiredDataRequest) o; - return Objects.equals(requestsPerSecond, that.requestsPerSecond) - && Objects.equals(timeout, that.timeout) - && Objects.equals(jobId, that.jobId); - } - - @Override - public int hashCode() { - return Objects.hash(requestsPerSecond, timeout, jobId); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (jobId != null) { - builder.field(JOB_ID, jobId); - } - if (requestsPerSecond != null) { - builder.field(REQUESTS_PER_SECOND, requestsPerSecond); - } - if (timeout != null) { - builder.field(TIMEOUT, timeout.getStringRep()); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataResponse.java deleted file mode 100644 index 18cd260698198..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteExpiredDataResponse.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * A response acknowledging the deletion of expired data - */ -public class DeleteExpiredDataResponse implements ToXContentObject { - - private static final ParseField DELETED = new ParseField("deleted"); - - public DeleteExpiredDataResponse(boolean deleted) { - this.deleted = deleted; - } - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "delete_expired_data_response", - true, - a -> new DeleteExpiredDataResponse((Boolean) a[0]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), DELETED); - } - - public static DeleteExpiredDataResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final Boolean deleted; - - public Boolean getDeleted() { - return deleted; - } - - @Override - public int hashCode() { - return Objects.hash(deleted); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (deleted != null) { - builder.field(DELETED.getPreferredName(), deleted); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - DeleteExpiredDataResponse response = (DeleteExpiredDataResponse) obj; - return Objects.equals(deleted, response.deleted); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteFilterRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteFilterRequest.java deleted file mode 100644 index a98ad85c775e0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteFilterRequest.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * A request to delete a machine learning filter - */ -public class DeleteFilterRequest implements Validatable { - - private final String filterId; - - public DeleteFilterRequest(String filterId) { - this.filterId = Objects.requireNonNull(filterId, "[filter_id] is required"); - } - - public String getId() { - return filterId; - } - - @Override - public int hashCode() { - return Objects.hash(filterId); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - final DeleteFilterRequest other = (DeleteFilterRequest) obj; - - return Objects.equals(filterId, other.filterId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java deleted file mode 100644 index 11a49bf3aa270..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteForecastRequest.java +++ /dev/null @@ -1,167 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * POJO for a delete forecast request - */ -public class DeleteForecastRequest implements Validatable, ToXContentObject { - - public static final ParseField FORECAST_ID = new ParseField("forecast_id"); - public static final ParseField ALLOW_NO_FORECASTS = new ParseField("allow_no_forecasts"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final String ALL = "_all"; - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "delete_forecast_request", - (a) -> new DeleteForecastRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareStringOrNull((c, p) -> c.setForecastIds(Strings.commaDelimitedListToStringArray(p)), FORECAST_ID); - PARSER.declareBoolean(DeleteForecastRequest::setAllowNoForecasts, ALLOW_NO_FORECASTS); - PARSER.declareString(DeleteForecastRequest::timeout, TIMEOUT); - } - - /** - * Create a new {@link DeleteForecastRequest} that explicitly deletes all forecasts - * - * @param jobId the jobId of the Job whose forecasts to delete - */ - public static DeleteForecastRequest deleteAllForecasts(String jobId) { - DeleteForecastRequest request = new DeleteForecastRequest(jobId); - request.setForecastIds(ALL); - return request; - } - - private final String jobId; - private List forecastIds = new ArrayList<>(); - private Boolean allowNoForecasts; - private TimeValue timeout; - - /** - * Create a new DeleteForecastRequest for the given Job ID - * - * @param jobId the jobId of the Job whose forecast(s) to delete - */ - public DeleteForecastRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId, Job.ID.getPreferredName()); - } - - public String getJobId() { - return jobId; - } - - public List getForecastIds() { - return forecastIds; - } - - /** - * The forecast IDs to delete. Can be also be {@link DeleteForecastRequest#ALL} to explicitly delete ALL forecasts - * - * @param forecastIds forecast IDs to delete - */ - public void setForecastIds(String... forecastIds) { - setForecastIds(Arrays.asList(forecastIds)); - } - - void setForecastIds(List forecastIds) { - if (forecastIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("forecastIds must not contain null values"); - } - this.forecastIds = new ArrayList<>(forecastIds); - } - - public Boolean getAllowNoForecasts() { - return allowNoForecasts; - } - - /** - * Sets the value of "allow_no_forecasts". - * - * @param allowNoForecasts when {@code true} no error is thrown when {@link DeleteForecastRequest#ALL} does not find any forecasts - */ - public void setAllowNoForecasts(boolean allowNoForecasts) { - this.allowNoForecasts = allowNoForecasts; - } - - /** - * Allows to set the timeout - * @param timeout timeout as a string (e.g. 1s) - */ - public void timeout(String timeout) { - this.timeout = TimeValue.parseTimeValue(timeout, this.timeout, getClass().getSimpleName() + ".timeout"); - } - - /** - * Allows to set the timeout - * @param timeout timeout as a {@link TimeValue} - */ - public void timeout(TimeValue timeout) { - this.timeout = timeout; - } - - public TimeValue timeout() { - return timeout; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DeleteForecastRequest that = (DeleteForecastRequest) other; - return Objects.equals(jobId, that.jobId) - && Objects.equals(forecastIds, that.forecastIds) - && Objects.equals(allowNoForecasts, that.allowNoForecasts) - && Objects.equals(timeout, that.timeout); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, forecastIds, allowNoForecasts, timeout); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (forecastIds != null) { - builder.field(FORECAST_ID.getPreferredName(), Strings.collectionToCommaDelimitedString(forecastIds)); - } - if (allowNoForecasts != null) { - builder.field(ALLOW_NO_FORECASTS.getPreferredName(), allowNoForecasts); - } - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java deleted file mode 100644 index dc4c0cd4d6c86..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobRequest.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -/** - * Request to delete a Machine Learning Job via its ID - */ -public class DeleteJobRequest implements Validatable { - - private String jobId; - private Boolean force; - private Boolean waitForCompletion; - - public DeleteJobRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); - } - - public String getJobId() { - return jobId; - } - - /** - * The jobId which to delete - * @param jobId unique jobId to delete, must not be null - */ - public void setJobId(String jobId) { - this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); - } - - public Boolean getForce() { - return force; - } - - /** - * Used to forcefully delete an opened job. - * This method is quicker than closing and deleting the job. - * - * @param force When {@code true} forcefully delete an opened job. Defaults to {@code false} - */ - public void setForce(Boolean force) { - this.force = force; - } - - public Boolean getWaitForCompletion() { - return waitForCompletion; - } - - /** - * Set whether this request should wait until the operation has completed before returning - * @param waitForCompletion When {@code true} the call will wait for the job deletion to complete. - * Otherwise, the deletion will be executed asynchronously and the response - * will contain the task id. - */ - public void setWaitForCompletion(Boolean waitForCompletion) { - this.waitForCompletion = waitForCompletion; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, force); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || obj.getClass() != getClass()) { - return false; - } - - DeleteJobRequest other = (DeleteJobRequest) obj; - return Objects.equals(jobId, other.jobId) && Objects.equals(force, other.force); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java deleted file mode 100644 index ad843da43a357..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteJobResponse.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.tasks.TaskId; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response object that contains the acknowledgement or the task id - * depending on whether the delete job action was requested to wait for completion. - */ -public class DeleteJobResponse implements ToXContentObject { - - private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); - private static final ParseField TASK = new ParseField("task"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "delete_job_response", - true, - a -> new DeleteJobResponse((Boolean) a[0], (TaskId) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ACKNOWLEDGED); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), TaskId.parser(), TASK, ObjectParser.ValueType.STRING); - } - - public static DeleteJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final Boolean acknowledged; - private final TaskId task; - - DeleteJobResponse(@Nullable Boolean acknowledged, @Nullable TaskId task) { - assert acknowledged != null || task != null; - this.acknowledged = acknowledged; - this.task = task; - } - - /** - * Get the action acknowledgement - * @return {@code null} when the request had {@link DeleteJobRequest#getWaitForCompletion()} set to {@code false} or - * otherwise a {@code boolean} that indicates whether the job was deleted successfully. - */ - public Boolean getAcknowledged() { - return acknowledged; - } - - /** - * Get the task id - * @return {@code null} when the request had {@link DeleteJobRequest#getWaitForCompletion()} set to {@code true} or - * otherwise the id of the job deletion task. - */ - public TaskId getTask() { - return task; - } - - @Override - public int hashCode() { - return Objects.hash(acknowledged, task); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DeleteJobResponse that = (DeleteJobResponse) other; - return Objects.equals(acknowledged, that.acknowledged) && Objects.equals(task, that.task); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (acknowledged != null) { - builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged); - } - if (task != null) { - builder.field(TASK.getPreferredName(), task.toString()); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java deleted file mode 100644 index 75dda5d47eade..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteModelSnapshotRequest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.process.ModelSnapshot; - -import java.util.Objects; - -/** - * Request to delete a Machine Learning Model Snapshot Job via its Job and Snapshot IDs - */ -public class DeleteModelSnapshotRequest implements Validatable { - - private final String jobId; - private final String snapshotId; - - public DeleteModelSnapshotRequest(String jobId, String snapshotId) { - this.jobId = Objects.requireNonNull(jobId, "[" + Job.ID + "] must not be null"); - this.snapshotId = Objects.requireNonNull(snapshotId, "[" + ModelSnapshot.SNAPSHOT_ID + "] must not be null"); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || obj.getClass() != getClass()) { - return false; - } - - DeleteModelSnapshotRequest other = (DeleteModelSnapshotRequest) obj; - return Objects.equals(jobId, other.jobId) && Objects.equals(snapshotId, other.snapshotId); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelAliasRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelAliasRequest.java deleted file mode 100644 index aa91a01a0d775..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelAliasRequest.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -public class DeleteTrainedModelAliasRequest implements Validatable { - - private final String modelAlias; - private final String modelId; - - public DeleteTrainedModelAliasRequest(String modelAlias, String modelId) { - this.modelAlias = Objects.requireNonNull(modelAlias); - this.modelId = Objects.requireNonNull(modelId); - } - - public String getModelAlias() { - return modelAlias; - } - - public String getModelId() { - return modelId; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DeleteTrainedModelAliasRequest request = (DeleteTrainedModelAliasRequest) o; - return Objects.equals(modelAlias, request.modelAlias) && Objects.equals(modelId, request.modelId); - } - - @Override - public int hashCode() { - return Objects.hash(modelAlias, modelId); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelRequest.java deleted file mode 100644 index a964c8f0ddd63..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/DeleteTrainedModelRequest.java +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; - -import java.util.Objects; -import java.util.Optional; - -/** - * Request to delete a data frame analytics config - */ -public class DeleteTrainedModelRequest implements Validatable { - - private final String id; - - public DeleteTrainedModelRequest(String id) { - this.id = id; - } - - public String getId() { - return id; - } - - @Override - public Optional validate() { - if (id == null) { - return Optional.of(ValidationException.withError("trained model id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - DeleteTrainedModelRequest other = (DeleteTrainedModelRequest) o; - return Objects.equals(id, other.id); - } - - @Override - public int hashCode() { - return Objects.hash(id); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryRequest.java deleted file mode 100644 index 2a2a43fb2441e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryRequest.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.ml.job.config.AnalysisConfig; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.Map; -import java.util.Objects; -import java.util.Optional; - -/** - * Request to estimate the model memory an analysis config is likely to need given supplied field cardinalities. - */ -public class EstimateModelMemoryRequest implements Validatable, ToXContentObject { - - public static final String ANALYSIS_CONFIG = "analysis_config"; - public static final String OVERALL_CARDINALITY = "overall_cardinality"; - public static final String MAX_BUCKET_CARDINALITY = "max_bucket_cardinality"; - - private final AnalysisConfig analysisConfig; - private Map overallCardinality = Collections.emptyMap(); - private Map maxBucketCardinality = Collections.emptyMap(); - - @Override - public Optional validate() { - return Optional.empty(); - } - - public EstimateModelMemoryRequest(AnalysisConfig analysisConfig) { - this.analysisConfig = Objects.requireNonNull(analysisConfig); - } - - public AnalysisConfig getAnalysisConfig() { - return analysisConfig; - } - - public Map getOverallCardinality() { - return overallCardinality; - } - - public void setOverallCardinality(Map overallCardinality) { - this.overallCardinality = Collections.unmodifiableMap(overallCardinality); - } - - public Map getMaxBucketCardinality() { - return maxBucketCardinality; - } - - public void setMaxBucketCardinality(Map maxBucketCardinality) { - this.maxBucketCardinality = Collections.unmodifiableMap(maxBucketCardinality); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ANALYSIS_CONFIG, analysisConfig); - if (overallCardinality.isEmpty() == false) { - builder.field(OVERALL_CARDINALITY, overallCardinality); - } - if (maxBucketCardinality.isEmpty() == false) { - builder.field(MAX_BUCKET_CARDINALITY, maxBucketCardinality); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(analysisConfig, overallCardinality, maxBucketCardinality); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - EstimateModelMemoryRequest that = (EstimateModelMemoryRequest) other; - return Objects.equals(analysisConfig, that.analysisConfig) - && Objects.equals(overallCardinality, that.overallCardinality) - && Objects.equals(maxBucketCardinality, that.maxBucketCardinality); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryResponse.java deleted file mode 100644 index 806a76fe7eb63..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EstimateModelMemoryResponse.java +++ /dev/null @@ -1,69 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class EstimateModelMemoryResponse { - - public static final ParseField MODEL_MEMORY_ESTIMATE = new ParseField("model_memory_estimate"); - - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "estimate_model_memory", - true, - args -> new EstimateModelMemoryResponse((String) args[0]) - ); - - static { - PARSER.declareString(constructorArg(), MODEL_MEMORY_ESTIMATE); - } - - public static EstimateModelMemoryResponse fromXContent(final XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final ByteSizeValue modelMemoryEstimate; - - public EstimateModelMemoryResponse(String modelMemoryEstimate) { - this.modelMemoryEstimate = ByteSizeValue.parseBytesSizeValue(modelMemoryEstimate, MODEL_MEMORY_ESTIMATE.getPreferredName()); - } - - /** - * @return An estimate of the model memory the supplied analysis config is likely to need given the supplied field cardinalities. - */ - public ByteSizeValue getModelMemoryEstimate() { - return modelMemoryEstimate; - } - - @Override - public boolean equals(Object o) { - - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - EstimateModelMemoryResponse other = (EstimateModelMemoryResponse) o; - return Objects.equals(this.modelMemoryEstimate, other.modelMemoryEstimate); - } - - @Override - public int hashCode() { - return Objects.hash(modelMemoryEstimate); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java deleted file mode 100644 index 5bad5d73a8892..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameRequest.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.ml.dataframe.QueryConfig; -import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class EvaluateDataFrameRequest implements ToXContentObject, Validatable { - - private static final ParseField INDEX = new ParseField("index"); - private static final ParseField QUERY = new ParseField("query"); - private static final ParseField EVALUATION = new ParseField("evaluation"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "evaluate_data_frame_request", - true, - args -> new EvaluateDataFrameRequest((List) args[0], (QueryConfig) args[1], (Evaluation) args[2]) - ); - - static { - PARSER.declareStringArray(constructorArg(), INDEX); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> QueryConfig.fromXContent(p), QUERY); - PARSER.declareObject(constructorArg(), (p, c) -> parseEvaluation(p), EVALUATION); - } - - private static Evaluation parseEvaluation(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); - Evaluation evaluation = parser.namedObject(Evaluation.class, parser.currentName(), null); - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return evaluation; - } - - public static EvaluateDataFrameRequest fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private List indices; - private QueryConfig queryConfig; - private Evaluation evaluation; - - public EvaluateDataFrameRequest(String index, @Nullable QueryConfig queryConfig, Evaluation evaluation) { - this(Arrays.asList(index), queryConfig, evaluation); - } - - public EvaluateDataFrameRequest(List indices, @Nullable QueryConfig queryConfig, Evaluation evaluation) { - setIndices(indices); - setQueryConfig(queryConfig); - setEvaluation(evaluation); - } - - public List getIndices() { - return Collections.unmodifiableList(indices); - } - - public final void setIndices(List indices) { - Objects.requireNonNull(indices); - this.indices = new ArrayList<>(indices); - } - - public QueryConfig getQueryConfig() { - return queryConfig; - } - - public final void setQueryConfig(QueryConfig queryConfig) { - this.queryConfig = queryConfig; - } - - public Evaluation getEvaluation() { - return evaluation; - } - - public final void setEvaluation(Evaluation evaluation) { - this.evaluation = evaluation; - } - - @Override - public Optional validate() { - List errors = new ArrayList<>(); - if (indices.isEmpty()) { - errors.add("At least one index must be specified"); - } - if (evaluation == null) { - errors.add("evaluation must not be null"); - } - return errors.isEmpty() ? Optional.empty() : Optional.of(ValidationException.withErrors(errors)); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.stringListField(INDEX.getPreferredName(), indices); - if (queryConfig != null) { - builder.field(QUERY.getPreferredName(), queryConfig.getQuery()); - } - builder.startObject(EVALUATION.getPreferredName()).field(evaluation.getName(), evaluation).endObject(); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(indices, queryConfig, evaluation); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - EvaluateDataFrameRequest that = (EvaluateDataFrameRequest) o; - return Objects.equals(indices, that.indices) - && Objects.equals(queryConfig, that.queryConfig) - && Objects.equals(evaluation, that.evaluation); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameResponse.java deleted file mode 100644 index 82213974297ab..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/EvaluateDataFrameResponse.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.NamedObjectNotFoundException; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - -public class EvaluateDataFrameResponse implements ToXContentObject { - - public static EvaluateDataFrameResponse fromXContent(XContentParser parser) throws IOException { - if (parser.currentToken() == null) { - parser.nextToken(); - } - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); - String evaluationName = parser.currentName(); - parser.nextToken(); - Map metrics = parser.map(LinkedHashMap::new, p -> parseMetric(evaluationName, p)); - List knownMetrics = metrics.values() - .stream() - .filter(Objects::nonNull) // Filter out null values returned by {@link EvaluateDataFrameResponse::parseMetric}. - .collect(Collectors.toList()); - ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return new EvaluateDataFrameResponse(evaluationName, knownMetrics); - } - - private static EvaluationMetric.Result parseMetric(String evaluationName, XContentParser parser) throws IOException { - String metricName = parser.currentName(); - try { - return parser.namedObject(EvaluationMetric.Result.class, registeredMetricName(evaluationName, metricName), null); - } catch (NamedObjectNotFoundException e) { - parser.skipChildren(); - // Metric name not recognized. Return {@code null} value here and filter it out later. - return null; - } - } - - private final String evaluationName; - private final Map metrics; - - public EvaluateDataFrameResponse(String evaluationName, List metrics) { - this.evaluationName = Objects.requireNonNull(evaluationName); - this.metrics = Objects.requireNonNull(metrics).stream().collect(Collectors.toUnmodifiableMap(m -> m.getMetricName(), m -> m)); - } - - public String getEvaluationName() { - return evaluationName; - } - - public List getMetrics() { - return metrics.values().stream().collect(Collectors.toList()); - } - - @SuppressWarnings("unchecked") - public T getMetricByName(String metricName) { - Objects.requireNonNull(metricName); - return (T) metrics.get(metricName); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.startObject().field(evaluationName, metrics).endObject(); - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - EvaluateDataFrameResponse that = (EvaluateDataFrameResponse) o; - return Objects.equals(evaluationName, that.evaluationName) && Objects.equals(metrics, that.metrics); - } - - @Override - public int hashCode() { - return Objects.hash(evaluationName, metrics); - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsRequest.java deleted file mode 100644 index b9df8faacdda8..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; -import org.elasticsearch.core.Nullable; - -import java.util.Objects; - -/** - * Request to explain the following about a data frame analytics job: - *

    - *
  • field selection: which fields are included or are not in the analysis
  • - *
  • memory estimation: how much memory the job is estimated to require
  • - *
- */ -public class ExplainDataFrameAnalyticsRequest implements Validatable { - - private final String id; - private final DataFrameAnalyticsConfig config; - - public ExplainDataFrameAnalyticsRequest(String id) { - this.id = Objects.requireNonNull(id); - this.config = null; - } - - public ExplainDataFrameAnalyticsRequest(DataFrameAnalyticsConfig config) { - this.id = null; - this.config = Objects.requireNonNull(config); - } - - @Nullable - public String getId() { - return id; - } - - @Nullable - public DataFrameAnalyticsConfig getConfig() { - return config; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - ExplainDataFrameAnalyticsRequest other = (ExplainDataFrameAnalyticsRequest) o; - return Objects.equals(id, other.id) && Objects.equals(config, other.config); - } - - @Override - public int hashCode() { - return Objects.hash(id, config); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsResponse.java deleted file mode 100644 index d9e15f8ff7031..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ExplainDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.dataframe.explain.FieldSelection; -import org.elasticsearch.client.ml.dataframe.explain.MemoryEstimation; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class ExplainDataFrameAnalyticsResponse implements ToXContentObject { - - public static final ParseField TYPE = new ParseField("explain_data_frame_analytics_response"); - - public static final ParseField FIELD_SELECTION = new ParseField("field_selection"); - public static final ParseField MEMORY_ESTIMATION = new ParseField("memory_estimation"); - - public static ExplainDataFrameAnalyticsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - TYPE.getPreferredName(), - true, - args -> new ExplainDataFrameAnalyticsResponse((List) args[0], (MemoryEstimation) args[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), FieldSelection.PARSER, FIELD_SELECTION); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), MemoryEstimation.PARSER, MEMORY_ESTIMATION); - } - - private final List fieldSelection; - private final MemoryEstimation memoryEstimation; - - public ExplainDataFrameAnalyticsResponse(List fieldSelection, MemoryEstimation memoryEstimation) { - this.fieldSelection = Objects.requireNonNull(fieldSelection); - this.memoryEstimation = Objects.requireNonNull(memoryEstimation); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FIELD_SELECTION.getPreferredName(), fieldSelection); - builder.field(MEMORY_ESTIMATION.getPreferredName(), memoryEstimation); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) return true; - if (other == null || getClass() != other.getClass()) return false; - - ExplainDataFrameAnalyticsResponse that = (ExplainDataFrameAnalyticsResponse) other; - return Objects.equals(fieldSelection, that.fieldSelection) && Objects.equals(memoryEstimation, that.memoryEstimation); - } - - @Override - public int hashCode() { - return Objects.hash(fieldSelection, memoryEstimation); - } - - public MemoryEstimation getMemoryEstimation() { - return memoryEstimation; - } - - public List getFieldSelection() { - return fieldSelection; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java deleted file mode 100644 index a19f787a6458f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobRequest.java +++ /dev/null @@ -1,181 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request object to flush a given Machine Learning job. - */ -public class FlushJobRequest implements Validatable, ToXContentObject { - - public static final ParseField CALC_INTERIM = new ParseField("calc_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField ADVANCE_TIME = new ParseField("advance_time"); - public static final ParseField SKIP_TIME = new ParseField("skip_time"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "flush_job_request", - (a) -> new FlushJobRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareBoolean(FlushJobRequest::setCalcInterim, CALC_INTERIM); - PARSER.declareString(FlushJobRequest::setStart, START); - PARSER.declareString(FlushJobRequest::setEnd, END); - PARSER.declareString(FlushJobRequest::setAdvanceTime, ADVANCE_TIME); - PARSER.declareString(FlushJobRequest::setSkipTime, SKIP_TIME); - } - - private final String jobId; - private Boolean calcInterim; - private String start; - private String end; - private String advanceTime; - private String skipTime; - - /** - * Create new Flush job request - * - * @param jobId The job ID of the job to flush - */ - public FlushJobRequest(String jobId) { - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - public boolean getCalcInterim() { - return calcInterim; - } - - /** - * When {@code true} calculates the interim results for the most recent bucket or all buckets within the latency period. - * - * @param calcInterim defaults to {@code false}. - */ - public void setCalcInterim(boolean calcInterim) { - this.calcInterim = calcInterim; - } - - public String getStart() { - return start; - } - - /** - * When used in conjunction with {@link FlushJobRequest#calcInterim}, - * specifies the start of the range of buckets on which to calculate interim results. - * - * @param start the beginning of the range of buckets; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * When used in conjunction with {@link FlushJobRequest#calcInterim}, specifies the end of the range - * of buckets on which to calculate interim results - * - * @param end the end of the range of buckets; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public String getAdvanceTime() { - return advanceTime; - } - - /** - * Specifies to advance to a particular time value. - * Results are generated and the model is updated for data from the specified time interval. - * - * @param advanceTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setAdvanceTime(String advanceTime) { - this.advanceTime = advanceTime; - } - - public String getSkipTime() { - return skipTime; - } - - /** - * Specifies to skip to a particular time value. - * Results are not generated and the model is not updated for data from the specified time interval. - * - * @param skipTime String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setSkipTime(String skipTime) { - this.skipTime = skipTime; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, calcInterim, start, end, advanceTime, skipTime); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - FlushJobRequest other = (FlushJobRequest) obj; - return Objects.equals(jobId, other.jobId) - && calcInterim == other.calcInterim - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(advanceTime, other.advanceTime) - && Objects.equals(skipTime, other.skipTime); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (calcInterim != null) { - builder.field(CALC_INTERIM.getPreferredName(), calcInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (advanceTime != null) { - builder.field(ADVANCE_TIME.getPreferredName(), advanceTime); - } - if (skipTime != null) { - builder.field(SKIP_TIME.getPreferredName(), skipTime); - } - builder.endObject(); - return builder; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java deleted file mode 100644 index d85ec888b61a4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/FlushJobResponse.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -/** - * Response object containing flush acknowledgement and additional data - */ -public class FlushJobResponse implements ToXContentObject { - - public static final ParseField FLUSHED = new ParseField("flushed"); - public static final ParseField LAST_FINALIZED_BUCKET_END = new ParseField("last_finalized_bucket_end"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "flush_job_response", - true, - (a) -> { - boolean flushed = (boolean) a[0]; - Date date = a[1] == null ? null : new Date((long) a[1]); - return new FlushJobResponse(flushed, date); - } - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), FLUSHED); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), LAST_FINALIZED_BUCKET_END); - } - - public static FlushJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final boolean flushed; - private final Date lastFinalizedBucketEnd; - - public FlushJobResponse(boolean flushed, @Nullable Date lastFinalizedBucketEnd) { - this.flushed = flushed; - this.lastFinalizedBucketEnd = lastFinalizedBucketEnd; - } - - /** - * Was the job successfully flushed or not - */ - public boolean isFlushed() { - return flushed; - } - - /** - * Provides the timestamp (in milliseconds-since-the-epoch) of the end of the last bucket that was processed. - */ - @Nullable - public Date getLastFinalizedBucketEnd() { - return lastFinalizedBucketEnd; - } - - @Override - public int hashCode() { - return Objects.hash(flushed, lastFinalizedBucketEnd); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - FlushJobResponse that = (FlushJobResponse) other; - return that.flushed == flushed && Objects.equals(lastFinalizedBucketEnd, that.lastFinalizedBucketEnd); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FLUSHED.getPreferredName(), flushed); - if (lastFinalizedBucketEnd != null) { - builder.timeField( - LAST_FINALIZED_BUCKET_END.getPreferredName(), - LAST_FINALIZED_BUCKET_END.getPreferredName() + "_string", - lastFinalizedBucketEnd.getTime() - ); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java deleted file mode 100644 index 2a7f09c802dc3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobRequest.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Pojo for forecasting an existing and open Machine Learning Job - */ -public class ForecastJobRequest implements Validatable, ToXContentObject { - - public static final ParseField DURATION = new ParseField("duration"); - public static final ParseField EXPIRES_IN = new ParseField("expires_in"); - public static final ParseField MAX_MODEL_MEMORY = new ParseField("max_model_memory"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "forecast_job_request", - (a) -> new ForecastJobRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString((request, val) -> request.setDuration(TimeValue.parseTimeValue(val, DURATION.getPreferredName())), DURATION); - PARSER.declareString( - (request, val) -> request.setExpiresIn(TimeValue.parseTimeValue(val, EXPIRES_IN.getPreferredName())), - EXPIRES_IN - ); - PARSER.declareField(ForecastJobRequest::setMaxModelMemory, (p, c) -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return ByteSizeValue.parseBytesSizeValue(p.text(), MAX_MODEL_MEMORY.getPreferredName()); - } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return new ByteSizeValue(p.longValue()); - } - throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); - }, MAX_MODEL_MEMORY, ObjectParser.ValueType.VALUE); - } - - private final String jobId; - private TimeValue duration; - private TimeValue expiresIn; - private ByteSizeValue maxModelMemory; - - /** - * A new forecast request - * - * @param jobId the non-null, existing, and opened jobId to forecast - */ - public ForecastJobRequest(String jobId) { - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - public TimeValue getDuration() { - return duration; - } - - /** - * Set the forecast duration - * - * A period of time that indicates how far into the future to forecast. - * The default value is 1 day. The forecast starts at the last record that was processed. - * - * @param duration TimeValue for the duration of the forecast - */ - public void setDuration(TimeValue duration) { - this.duration = duration; - } - - public TimeValue getExpiresIn() { - return expiresIn; - } - - /** - * Set the forecast expiration - * - * The period of time that forecast results are retained. - * After a forecast expires, the results are deleted. The default value is 14 days. - * If set to a value of 0, the forecast is never automatically deleted. - * - * @param expiresIn TimeValue for the forecast expiration - */ - public void setExpiresIn(TimeValue expiresIn) { - this.expiresIn = expiresIn; - } - - public ByteSizeValue getMaxModelMemory() { - return maxModelMemory; - } - - /** - * Set the amount of memory allowed to be used by this forecast. - * - * If the projected forecast memory usage exceeds this amount, the forecast will spool results to disk to keep within the limits. - * @param maxModelMemory A byte sized value less than 500MB and less than 40% of the associated job's configured memory usage. - * Defaults to 20MB. - */ - public ForecastJobRequest setMaxModelMemory(ByteSizeValue maxModelMemory) { - this.maxModelMemory = maxModelMemory; - return this; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, duration, expiresIn, maxModelMemory); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - ForecastJobRequest other = (ForecastJobRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(duration, other.duration) - && Objects.equals(expiresIn, other.expiresIn) - && Objects.equals(maxModelMemory, other.maxModelMemory); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (duration != null) { - builder.field(DURATION.getPreferredName(), duration.getStringRep()); - } - if (expiresIn != null) { - builder.field(EXPIRES_IN.getPreferredName(), expiresIn.getStringRep()); - } - if (maxModelMemory != null) { - builder.field(MAX_MODEL_MEMORY.getPreferredName(), maxModelMemory.getStringRep()); - } - builder.endObject(); - return builder; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java deleted file mode 100644 index b1cbd5d863c99..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/ForecastJobResponse.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Forecast response object - */ -public class ForecastJobResponse implements ToXContentObject { - - public static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); - public static final ParseField FORECAST_ID = new ParseField("forecast_id"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "forecast_job_response", - true, - (a) -> new ForecastJobResponse((Boolean) a[0], (String) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ACKNOWLEDGED); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FORECAST_ID); - } - - public static ForecastJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final boolean acknowledged; - private final String forecastId; - - public ForecastJobResponse(boolean acknowledged, String forecastId) { - this.acknowledged = acknowledged; - this.forecastId = forecastId; - } - - /** - * Forecast creating acknowledgement - * @return {@code true} indicates success, {@code false} otherwise - */ - public boolean isAcknowledged() { - return acknowledged; - } - - /** - * The created forecast ID - */ - public String getForecastId() { - return forecastId; - } - - @Override - public int hashCode() { - return Objects.hash(acknowledged, forecastId); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - ForecastJobResponse other = (ForecastJobResponse) obj; - return Objects.equals(acknowledged, other.acknowledged) && Objects.equals(forecastId, other.forecastId); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged); - builder.field(FORECAST_ID.getPreferredName(), forecastId); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java deleted file mode 100644 index 052f177bb8743..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsRequest.java +++ /dev/null @@ -1,250 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.results.Result; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve buckets of a given job - */ -public class GetBucketsRequest implements Validatable, ToXContentObject { - - public static final ParseField EXPAND = new ParseField("expand"); - public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score"); - public static final ParseField SORT = new ParseField("sort"); - public static final ParseField DESCENDING = new ParseField("desc"); - - public static final ObjectParser PARSER = new ObjectParser<>("get_buckets_request", GetBucketsRequest::new); - - static { - PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID); - PARSER.declareString(GetBucketsRequest::setTimestamp, Result.TIMESTAMP); - PARSER.declareBoolean(GetBucketsRequest::setExpand, EXPAND); - PARSER.declareBoolean(GetBucketsRequest::setExcludeInterim, EXCLUDE_INTERIM); - PARSER.declareStringOrNull(GetBucketsRequest::setStart, START); - PARSER.declareStringOrNull(GetBucketsRequest::setEnd, END); - PARSER.declareObject(GetBucketsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - PARSER.declareDouble(GetBucketsRequest::setAnomalyScore, ANOMALY_SCORE); - PARSER.declareString(GetBucketsRequest::setSort, SORT); - PARSER.declareBoolean(GetBucketsRequest::setDescending, DESCENDING); - } - - private String jobId; - private String timestamp; - private Boolean expand; - private Boolean excludeInterim; - private String start; - private String end; - private PageParams pageParams; - private Double anomalyScore; - private String sort; - private Boolean descending; - - private GetBucketsRequest() {} - - /** - * Constructs a request to retrieve buckets of a given job - * @param jobId id of the job to retrieve buckets of - */ - public GetBucketsRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - /** - * Sets the timestamp of a specific bucket to be retrieved. - * @param timestamp String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setTimestamp(String timestamp) { - this.timestamp = timestamp; - } - - public String getTimestamp() { - return timestamp; - } - - public boolean isExpand() { - return expand; - } - - /** - * Sets the value of "expand". - * When {@code true}, buckets will be expanded to include their records. - * @param expand value of "expand" to be set - */ - public void setExpand(Boolean expand) { - this.expand = expand; - } - - public Boolean getExcludeInterim() { - return excludeInterim; - } - - /** - * Sets the value of "exclude_interim". - * When {@code true}, interim buckets will be filtered out. - * @param excludeInterim value of "exclude_interim" to be set - */ - public void setExcludeInterim(Boolean excludeInterim) { - this.excludeInterim = excludeInterim; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only buckets whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only buckets whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * Sets the paging parameters - * @param pageParams the paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public Double getAnomalyScore() { - return anomalyScore; - } - - /** - * Sets the value of "anomaly_score". - * Only buckets with "anomaly_score" equal or greater will be returned. - * @param anomalyScore value of "anomaly_score". - */ - public void setAnomalyScore(Double anomalyScore) { - this.anomalyScore = anomalyScore; - } - - public String getSort() { - return sort; - } - - /** - * Sets the value of "sort". - * Specifies the bucket field to sort on. - * @param sort value of "sort". - */ - public void setSort(String sort) { - this.sort = sort; - } - - public Boolean getDescending() { - return descending; - } - - /** - * Sets the value of "desc". - * Specifies the sorting order. - * @param descending value of "desc" - */ - public void setDescending(boolean descending) { - this.descending = descending; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (timestamp != null) { - builder.field(Result.TIMESTAMP.getPreferredName(), timestamp); - } - if (expand != null) { - builder.field(EXPAND.getPreferredName(), expand); - } - if (excludeInterim != null) { - builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - if (anomalyScore != null) { - builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); - } - if (sort != null) { - builder.field(SORT.getPreferredName(), sort); - } - if (descending != null) { - builder.field(DESCENDING.getPreferredName(), descending); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, timestamp, expand, excludeInterim, anomalyScore, pageParams, start, end, sort, descending); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetBucketsRequest other = (GetBucketsRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(timestamp, other.timestamp) - && Objects.equals(expand, other.expand) - && Objects.equals(excludeInterim, other.excludeInterim) - && Objects.equals(anomalyScore, other.anomalyScore) - && Objects.equals(pageParams, other.pageParams) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(sort, other.sort) - && Objects.equals(descending, other.descending); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsResponse.java deleted file mode 100644 index ced8ad201adbc..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetBucketsResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.Bucket; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested buckets - */ -public class GetBucketsResponse extends AbstractResultResponse { - - public static final ParseField BUCKETS = new ParseField("buckets"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_buckets_response", - true, - a -> new GetBucketsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), Bucket.PARSER, BUCKETS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetBucketsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetBucketsResponse(List buckets, long count) { - super(BUCKETS, buckets, count); - } - - /** - * The retrieved buckets - * @return the retrieved buckets - */ - public List buckets() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetBucketsResponse other = (GetBucketsResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java deleted file mode 100644 index 91bc1f0b537ba..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsRequest.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Get the Scheduled Events for a Calendar - */ -public class GetCalendarEventsRequest implements Validatable, ToXContentObject { - - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_calendar_events_request", - a -> new GetCalendarEventsRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Calendar.ID); - PARSER.declareString(GetCalendarEventsRequest::setStart, START); - PARSER.declareString(GetCalendarEventsRequest::setEnd, END); - PARSER.declareString(GetCalendarEventsRequest::setJobId, Job.ID); - PARSER.declareObject(GetCalendarEventsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - } - - private final String calendarId; - private String start; - private String end; - private String jobId; - private PageParams pageParams; - - /** - * Create a new request to get the ScheduledEvents for the given calendarId. - * - * @param calendarId The ID of the calendar. - * Can be `_all` to get ALL ScheduledEvents for all calendars. - */ - public GetCalendarEventsRequest(String calendarId) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - } - - public String getCalendarId() { - return calendarId; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * The paging parameters for the gathered ScheduledEvents - * @param pageParams The desired paging params - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public String getStart() { - return start; - } - - /** - * Specifies to get events with timestamps after this time. - * - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Specifies to get events with timestamps earlier than this time. - * - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public String getJobId() { - return jobId; - } - - /** - * The jobId for which to get the ScheduledEvents. When this option is used calendarId must be `_all` - * @param jobId The job for which to get the events. - */ - public void setJobId(String jobId) { - this.jobId = jobId; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Calendar.ID.getPreferredName(), calendarId); - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (jobId != null) { - builder.field(Job.ID.getPreferredName(), jobId); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendarId, start, end, jobId, pageParams); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - GetCalendarEventsRequest other = (GetCalendarEventsRequest) obj; - return Objects.equals(calendarId, other.calendarId) - && Objects.equals(pageParams, other.pageParams) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(jobId, other.jobId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsResponse.java deleted file mode 100644 index 2c36c1c329e84..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarEventsResponse.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.calendars.ScheduledEvent; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link ScheduledEvent} objects and the total count found - */ -public class GetCalendarEventsResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("events"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "calendar_events_response", - true, - a -> new GetCalendarEventsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), ScheduledEvent.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), COUNT); - } - - GetCalendarEventsResponse(List events, long count) { - super(RESULTS_FIELD, events, count); - } - - /** - * The collection of {@link ScheduledEvent} objects found in the query - */ - public List events() { - return results; - } - - public static GetCalendarEventsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetCalendarEventsResponse other = (GetCalendarEventsResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java deleted file mode 100644 index 55bcbd88964be..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsRequest.java +++ /dev/null @@ -1,87 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class GetCalendarsRequest implements Validatable, ToXContentObject { - - public static final ObjectParser PARSER = new ObjectParser<>( - "get_calendars_request", - GetCalendarsRequest::new - ); - - static { - PARSER.declareString(GetCalendarsRequest::setCalendarId, Calendar.ID); - PARSER.declareObject(GetCalendarsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - } - - private String calendarId; - private PageParams pageParams; - - public GetCalendarsRequest() {} - - public GetCalendarsRequest(String calendarId) { - this.calendarId = calendarId; - } - - public String getCalendarId() { - return calendarId; - } - - public void setCalendarId(String calendarId) { - this.calendarId = calendarId; - } - - public PageParams getPageParams() { - return pageParams; - } - - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (calendarId != null) { - builder.field(Calendar.ID.getPreferredName(), calendarId); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendarId, pageParams); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetCalendarsRequest other = (GetCalendarsRequest) obj; - return Objects.equals(calendarId, other.calendarId) && Objects.equals(pageParams, other.pageParams); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java deleted file mode 100644 index bf2119692b485..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCalendarsResponse.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class GetCalendarsResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("calendars"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "calendars_response", - true, - a -> new GetCalendarsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), Calendar.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT); - } - - public static GetCalendarsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetCalendarsResponse(List calendars, long count) { - super(RESULTS_FIELD, calendars, count); - } - - /** - * The collection of {@link Calendar} objects found in the query - */ - public List calendars() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetCalendarsResponse other = (GetCalendarsResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java deleted file mode 100644 index 9a00c09ffd847..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesRequest.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.results.CategoryDefinition; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve categories of a given job - */ -public class GetCategoriesRequest implements Validatable, ToXContentObject { - - public static final ParseField CATEGORY_ID = CategoryDefinition.CATEGORY_ID; - public static final ParseField PARTITION_FIELD_VALUE = CategoryDefinition.PARTITION_FIELD_VALUE; - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_categories_request", - a -> new GetCategoriesRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareLong(GetCategoriesRequest::setCategoryId, CATEGORY_ID); - PARSER.declareObject(GetCategoriesRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - PARSER.declareString(GetCategoriesRequest::setPartitionFieldValue, PARTITION_FIELD_VALUE); - } - - private final String jobId; - private Long categoryId; - private PageParams pageParams; - private String partitionFieldValue; - - /** - * Constructs a request to retrieve category information from a given job - * @param jobId id of the job from which to retrieve results - */ - public GetCategoriesRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - public PageParams getPageParams() { - return pageParams; - } - - public Long getCategoryId() { - return categoryId; - } - - /** - * Sets the category id - * @param categoryId the category id - */ - public void setCategoryId(Long categoryId) { - this.categoryId = categoryId; - } - - /** - * Sets the paging parameters - * @param pageParams the paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public String getPartitionFieldValue() { - return partitionFieldValue; - } - - /** - * Sets the partition field value - * @param partitionFieldValue the partition field value - */ - public void setPartitionFieldValue(String partitionFieldValue) { - this.partitionFieldValue = partitionFieldValue; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (categoryId != null) { - builder.field(CATEGORY_ID.getPreferredName(), categoryId); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - if (partitionFieldValue != null) { - builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetCategoriesRequest request = (GetCategoriesRequest) obj; - return Objects.equals(jobId, request.jobId) - && Objects.equals(categoryId, request.categoryId) - && Objects.equals(pageParams, request.pageParams) - && Objects.equals(partitionFieldValue, request.partitionFieldValue); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, categoryId, pageParams, partitionFieldValue); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java deleted file mode 100644 index d1e542d18e43d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetCategoriesResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.CategoryDefinition; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested categories - */ -public class GetCategoriesResponse extends AbstractResultResponse { - - public static final ParseField CATEGORIES = new ParseField("categories"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_categories_response", - true, - a -> new GetCategoriesResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), CategoryDefinition.PARSER, CATEGORIES); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetCategoriesResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetCategoriesResponse(List categories, long count) { - super(CATEGORIES, categories, count); - } - - /** - * The retrieved categories - * @return the retrieved categories - */ - public List categories() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetCategoriesResponse other = (GetCategoriesResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsRequest.java deleted file mode 100644 index 9b19280f9729f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.core.Nullable; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -public class GetDataFrameAnalyticsRequest implements Validatable { - - public static final String ALLOW_NO_MATCH = "allow_no_match"; - public static final String EXCLUDE_GENERATED = "exclude_generated"; - - private final List ids; - private Boolean allowNoMatch; - private PageParams pageParams; - private Boolean excludeGenerated; - - /** - * Helper method to create a request that will get ALL Data Frame Analytics - * @return new {@link GetDataFrameAnalyticsRequest} object for the id "_all" - */ - public static GetDataFrameAnalyticsRequest getAllDataFrameAnalyticsRequest() { - return new GetDataFrameAnalyticsRequest("_all"); - } - - public GetDataFrameAnalyticsRequest(String... ids) { - this.ids = Arrays.asList(ids); - } - - public List getIds() { - return ids; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Setting this flag to `true` removes certain fields from the configuration on retrieval. - * - * This is useful when getting the configuration and wanting to put it in another cluster. - * - * Default value is false. - * @param excludeGenerated Boolean value indicating if certain fields should be removed - */ - public void setExcludeGenerated(boolean excludeGenerated) { - this.excludeGenerated = excludeGenerated; - } - - public Boolean getExcludeGenerated() { - return excludeGenerated; - } - - /** - * Whether to ignore if a wildcard expression matches no data frame analytics. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any data frame analytics - */ - public GetDataFrameAnalyticsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public PageParams getPageParams() { - return pageParams; - } - - public GetDataFrameAnalyticsRequest setPageParams(@Nullable PageParams pageParams) { - this.pageParams = pageParams; - return this; - } - - @Override - public Optional validate() { - if (ids == null || ids.isEmpty()) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetDataFrameAnalyticsRequest other = (GetDataFrameAnalyticsRequest) o; - return Objects.equals(ids, other.ids) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(excludeGenerated, other.excludeGenerated) - && Objects.equals(pageParams, other.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(ids, allowNoMatch, excludeGenerated, pageParams); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsResponse.java deleted file mode 100644 index 8e991efa10d44..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class GetDataFrameAnalyticsResponse { - - public static final ParseField DATA_FRAME_ANALYTICS = new ParseField("data_frame_analytics"); - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_data_frame_analytics", - true, - args -> new GetDataFrameAnalyticsResponse((List) args[0]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), (p, c) -> DataFrameAnalyticsConfig.fromXContent(p), DATA_FRAME_ANALYTICS); - } - - public static GetDataFrameAnalyticsResponse fromXContent(final XContentParser parser) { - return PARSER.apply(parser, null); - } - - private List analytics; - - public GetDataFrameAnalyticsResponse(List analytics) { - this.analytics = analytics; - } - - public List getAnalytics() { - return analytics; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetDataFrameAnalyticsResponse other = (GetDataFrameAnalyticsResponse) o; - return Objects.equals(this.analytics, other.analytics); - } - - @Override - public int hashCode() { - return Objects.hash(analytics); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsRequest.java deleted file mode 100644 index 50a71f5a4dc33..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsRequest.java +++ /dev/null @@ -1,88 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ParseField; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -/** - * Request to get data frame analytics stats - */ -public class GetDataFrameAnalyticsStatsRequest implements Validatable { - - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - private final List ids; - private Boolean allowNoMatch; - private PageParams pageParams; - - public GetDataFrameAnalyticsStatsRequest(String... ids) { - this.ids = Arrays.asList(ids); - } - - public List getIds() { - return ids; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no data frame analytics. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any data frame analytics - */ - public GetDataFrameAnalyticsStatsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public PageParams getPageParams() { - return pageParams; - } - - public GetDataFrameAnalyticsStatsRequest setPageParams(@Nullable PageParams pageParams) { - this.pageParams = pageParams; - return this; - } - - @Override - public Optional validate() { - if (ids == null || ids.isEmpty()) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetDataFrameAnalyticsStatsRequest other = (GetDataFrameAnalyticsStatsRequest) o; - return Objects.equals(ids, other.ids) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(pageParams, other.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(ids, allowNoMatch, pageParams); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsResponse.java deleted file mode 100644 index 00284b0802a16..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDataFrameAnalyticsStatsResponse.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.TaskOperationFailure; -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsStats; -import org.elasticsearch.client.transform.AcknowledgedTasksResponse; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class GetDataFrameAnalyticsStatsResponse { - - public static GetDataFrameAnalyticsStatsResponse fromXContent(XContentParser parser) { - return GetDataFrameAnalyticsStatsResponse.PARSER.apply(parser, null); - } - - private static final ParseField DATA_FRAME_ANALYTICS = new ParseField("data_frame_analytics"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_data_frame_analytics_stats_response", - true, - args -> new GetDataFrameAnalyticsStatsResponse( - (List) args[0], - (List) args[1], - (List) args[2] - ) - ); - - static { - PARSER.declareObjectArray(constructorArg(), (p, c) -> DataFrameAnalyticsStats.fromXContent(p), DATA_FRAME_ANALYTICS); - PARSER.declareObjectArray( - optionalConstructorArg(), - (p, c) -> TaskOperationFailure.fromXContent(p), - AcknowledgedTasksResponse.TASK_FAILURES - ); - PARSER.declareObjectArray( - optionalConstructorArg(), - (p, c) -> ElasticsearchException.fromXContent(p), - AcknowledgedTasksResponse.NODE_FAILURES - ); - } - - private final List analyticsStats; - private final List taskFailures; - private final List nodeFailures; - - public GetDataFrameAnalyticsStatsResponse( - List analyticsStats, - @Nullable List taskFailures, - @Nullable List nodeFailures - ) { - this.analyticsStats = analyticsStats; - this.taskFailures = taskFailures == null ? Collections.emptyList() : Collections.unmodifiableList(taskFailures); - this.nodeFailures = nodeFailures == null ? Collections.emptyList() : Collections.unmodifiableList(nodeFailures); - } - - public List getAnalyticsStats() { - return analyticsStats; - } - - public List getNodeFailures() { - return nodeFailures; - } - - public List getTaskFailures() { - return taskFailures; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetDataFrameAnalyticsStatsResponse other = (GetDataFrameAnalyticsStatsResponse) o; - return Objects.equals(analyticsStats, other.analyticsStats) - && Objects.equals(nodeFailures, other.nodeFailures) - && Objects.equals(taskFailures, other.taskFailures); - } - - @Override - public int hashCode() { - return Objects.hash(analyticsStats, nodeFailures, taskFailures); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java deleted file mode 100644 index 67fa0503a9b77..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedRequest.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request object to get {@link DatafeedConfig} objects with the matching {@code datafeedId}s. - * - * {@code _all} explicitly gets all the datafeeds in the cluster - * An empty request (no {@code datafeedId}s) implicitly gets all the datafeeds in the cluster - */ -public class GetDatafeedRequest implements Validatable, ToXContentObject { - - public static final ParseField DATAFEED_IDS = new ParseField("datafeed_ids"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - public static final String EXCLUDE_GENERATED = "exclude_generated"; - - private static final String ALL_DATAFEEDS = "_all"; - private final List datafeedIds; - private Boolean allowNoMatch; - private Boolean excludeGenerated; - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_datafeed_request", - true, - a -> new GetDatafeedRequest(a[0] == null ? new ArrayList<>() : (List) a[0]) - ); - - static { - PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), DATAFEED_IDS); - PARSER.declareBoolean(GetDatafeedRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - /** - * Helper method to create a query that will get ALL datafeeds - * @return new {@link GetDatafeedRequest} object searching for the datafeedId "_all" - */ - public static GetDatafeedRequest getAllDatafeedsRequest() { - return new GetDatafeedRequest(ALL_DATAFEEDS); - } - - /** - * Get the specified {@link DatafeedConfig} configurations via their unique datafeedIds - * @param datafeedIds must not contain any null values - */ - public GetDatafeedRequest(String... datafeedIds) { - this(Arrays.asList(datafeedIds)); - } - - GetDatafeedRequest(List datafeedIds) { - if (datafeedIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("datafeedIds must not contain null values"); - } - this.datafeedIds = new ArrayList<>(datafeedIds); - } - - /** - * All the datafeedIds for which to get configuration information - */ - public List getDatafeedIds() { - return datafeedIds; - } - - /** - * Whether to ignore if a wildcard expression matches no datafeeds. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any datafeeds - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Setting this flag to `true` removes certain fields from the configuration on retrieval. - * - * This is useful when getting the configuration and wanting to put it in another cluster. - * - * Default value is false. - * @param excludeGenerated Boolean value indicating if certain fields should be removed - */ - public void setExcludeGenerated(boolean excludeGenerated) { - this.excludeGenerated = excludeGenerated; - } - - public Boolean getExcludeGenerated() { - return excludeGenerated; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedIds, excludeGenerated, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || other.getClass() != getClass()) { - return false; - } - - GetDatafeedRequest that = (GetDatafeedRequest) other; - return Objects.equals(datafeedIds, that.datafeedIds) - && Objects.equals(allowNoMatch, that.allowNoMatch) - && Objects.equals(excludeGenerated, that.excludeGenerated); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - if (datafeedIds.isEmpty() == false) { - builder.stringListField(DATAFEED_IDS.getPreferredName(), datafeedIds); - } - - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedResponse.java deleted file mode 100644 index b8a6a0d79972a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedResponse.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link DatafeedConfig} objects and the total count found - */ -public class GetDatafeedResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("datafeeds"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_datafeed_response", - true, - a -> new GetDatafeedResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), DatafeedConfig.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT); - } - - GetDatafeedResponse(List datafeedBuilders, long count) { - super(RESULTS_FIELD, datafeedBuilders.stream().map(DatafeedConfig.Builder::build).collect(Collectors.toList()), count); - } - - /** - * The collection of {@link DatafeedConfig} objects found in the query - */ - public List datafeeds() { - return results; - } - - public static GetDatafeedResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetDatafeedResponse other = (GetDatafeedResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java deleted file mode 100644 index 081504354eb20..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsRequest.java +++ /dev/null @@ -1,134 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request object to get {@link org.elasticsearch.client.ml.datafeed.DatafeedStats} by their respective datafeedIds - * - * {@code _all} explicitly gets all the datafeeds' statistics in the cluster - * An empty request (no {@code datafeedId}s) implicitly gets all the datafeeds' statistics in the cluster - */ -public class GetDatafeedStatsRequest implements Validatable, ToXContentObject { - - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_datafeed_stats_request", - a -> new GetDatafeedStatsRequest((List) a[0]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), - DatafeedConfig.ID, - ObjectParser.ValueType.STRING_ARRAY - ); - PARSER.declareBoolean(GetDatafeedStatsRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private static final String ALL_DATAFEEDS = "_all"; - - private final List datafeedIds; - private Boolean allowNoMatch; - - /** - * Explicitly gets all datafeeds statistics - * - * @return a {@link GetDatafeedStatsRequest} for all existing datafeeds - */ - public static GetDatafeedStatsRequest getAllDatafeedStatsRequest() { - return new GetDatafeedStatsRequest(ALL_DATAFEEDS); - } - - GetDatafeedStatsRequest(List datafeedIds) { - if (datafeedIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("datafeedIds must not contain null values"); - } - this.datafeedIds = new ArrayList<>(datafeedIds); - } - - /** - * Get the specified Datafeed's statistics via their unique datafeedIds - * - * @param datafeedIds must be non-null and each datafeedId must be non-null - */ - public GetDatafeedStatsRequest(String... datafeedIds) { - this(Arrays.asList(datafeedIds)); - } - - /** - * All the datafeedIds for which to get statistics - */ - public List getDatafeedIds() { - return datafeedIds; - } - - public Boolean getAllowNoMatch() { - return this.allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no datafeeds. - * - * This includes {@code _all} string or when no datafeeds have been specified - * - * @param allowNoMatch When {@code true} ignore if wildcard or {@code _all} matches no datafeeds. Defaults to {@code true} - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedIds, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - GetDatafeedStatsRequest that = (GetDatafeedStatsRequest) other; - return Objects.equals(datafeedIds, that.datafeedIds) && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(datafeedIds)); - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsResponse.java deleted file mode 100644 index 94a49fc074c04..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetDatafeedStatsResponse.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.datafeed.DatafeedStats; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link DatafeedStats} objects and the total count found - */ -public class GetDatafeedStatsResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("datafeeds"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_datafeed_stats_response", - true, - a -> new GetDatafeedStatsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), DatafeedStats.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), COUNT); - } - - GetDatafeedStatsResponse(List results, long count) { - super(RESULTS_FIELD, results, count); - } - - /** - * The collection of {@link DatafeedStats} objects found in the query - */ - public List datafeedStats() { - return results; - } - - public static GetDatafeedStatsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetDatafeedStatsResponse other = (GetDatafeedStatsResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java deleted file mode 100644 index cafa4d8b331f5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersRequest.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve {@link MlFilter}s - */ -public class GetFiltersRequest implements Validatable, ToXContentObject { - - public static final ObjectParser PARSER = new ObjectParser<>("get_filters_request", GetFiltersRequest::new); - - static { - PARSER.declareString(GetFiltersRequest::setFilterId, MlFilter.ID); - PARSER.declareInt(GetFiltersRequest::setFrom, PageParams.FROM); - PARSER.declareInt(GetFiltersRequest::setSize, PageParams.SIZE); - } - - private String filterId; - private Integer from; - private Integer size; - - public String getFilterId() { - return filterId; - } - - public Integer getFrom() { - return from; - } - - public Integer getSize() { - return size; - } - - /** - * Sets the filter id - * @param filterId the filter id - */ - public void setFilterId(String filterId) { - this.filterId = filterId; - } - - /** - * Sets the number of filters to skip. - * @param from set the `from` parameter - */ - public void setFrom(Integer from) { - this.from = from; - } - - /** - * Sets the number of filters to return. - * @param size set the `size` parameter - */ - public void setSize(Integer size) { - this.size = size; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (filterId != null) { - builder.field(MlFilter.ID.getPreferredName(), filterId); - } - if (from != null) { - builder.field(PageParams.FROM.getPreferredName(), from); - } - if (size != null) { - builder.field(PageParams.SIZE.getPreferredName(), size); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetFiltersRequest request = (GetFiltersRequest) obj; - return Objects.equals(filterId, request.filterId) && Objects.equals(from, request.from) && Objects.equals(size, request.size); - } - - @Override - public int hashCode() { - return Objects.hash(filterId, from, size); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersResponse.java deleted file mode 100644 index a0a190d89cfc2..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetFiltersResponse.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link MlFilter} objects and the total count found - */ -public class GetFiltersResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("filters"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_filters_response", - true, - a -> new GetFiltersResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), MlFilter.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT); - } - - GetFiltersResponse(List filters, long count) { - super(RESULTS_FIELD, filters.stream().map(MlFilter.Builder::build).collect(Collectors.toList()), count); - } - - /** - * The collection of {@link MlFilter} objects found in the query - */ - public List filters() { - return results; - } - - public static GetFiltersResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetFiltersResponse other = (GetFiltersResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java deleted file mode 100644 index b174f4c91f1af..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersRequest.java +++ /dev/null @@ -1,212 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve influencers of a given job - */ -public class GetInfluencersRequest implements Validatable, ToXContentObject { - - public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField INFLUENCER_SCORE = new ParseField("influencer_score"); - public static final ParseField SORT = new ParseField("sort"); - public static final ParseField DESCENDING = new ParseField("desc"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_influencers_request", - a -> new GetInfluencersRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareBoolean(GetInfluencersRequest::setExcludeInterim, EXCLUDE_INTERIM); - PARSER.declareStringOrNull(GetInfluencersRequest::setStart, START); - PARSER.declareStringOrNull(GetInfluencersRequest::setEnd, END); - PARSER.declareObject(GetInfluencersRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - PARSER.declareDouble(GetInfluencersRequest::setInfluencerScore, INFLUENCER_SCORE); - PARSER.declareString(GetInfluencersRequest::setSort, SORT); - PARSER.declareBoolean(GetInfluencersRequest::setDescending, DESCENDING); - } - - private final String jobId; - private Boolean excludeInterim; - private String start; - private String end; - private Double influencerScore; - private PageParams pageParams; - private String sort; - private Boolean descending; - - /** - * Constructs a request to retrieve influencers of a given job - * @param jobId id of the job to retrieve influencers of - */ - public GetInfluencersRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - public Boolean getExcludeInterim() { - return excludeInterim; - } - - /** - * Sets the value of "exclude_interim". - * When {@code true}, interim influencers will be filtered out. - * @param excludeInterim value of "exclude_interim" to be set - */ - public void setExcludeInterim(Boolean excludeInterim) { - this.excludeInterim = excludeInterim; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only influencers whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only influencers whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * Sets the paging parameters - * @param pageParams The paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public Double getInfluencerScore() { - return influencerScore; - } - - /** - * Sets the value of "influencer_score". - * Only influencers with "influencer_score" equal or greater will be returned. - * @param influencerScore value of "influencer_score". - */ - public void setInfluencerScore(Double influencerScore) { - this.influencerScore = influencerScore; - } - - public String getSort() { - return sort; - } - - /** - * Sets the value of "sort". - * Specifies the influencer field to sort on. - * @param sort value of "sort". - */ - public void setSort(String sort) { - this.sort = sort; - } - - public Boolean getDescending() { - return descending; - } - - /** - * Sets the value of "desc". - * Specifies the sorting order. - * @param descending value of "desc" - */ - public void setDescending(Boolean descending) { - this.descending = descending; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (excludeInterim != null) { - builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - if (influencerScore != null) { - builder.field(INFLUENCER_SCORE.getPreferredName(), influencerScore); - } - if (sort != null) { - builder.field(SORT.getPreferredName(), sort); - } - if (descending != null) { - builder.field(DESCENDING.getPreferredName(), descending); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, excludeInterim, influencerScore, pageParams, start, end, sort, descending); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetInfluencersRequest other = (GetInfluencersRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(excludeInterim, other.excludeInterim) - && Objects.equals(influencerScore, other.influencerScore) - && Objects.equals(pageParams, other.pageParams) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(sort, other.sort) - && Objects.equals(descending, other.descending); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersResponse.java deleted file mode 100644 index 6d075c7fb535d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetInfluencersResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.Influencer; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested influencers - */ -public class GetInfluencersResponse extends AbstractResultResponse { - - public static final ParseField INFLUENCERS = new ParseField("influencers"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_influencers_response", - true, - a -> new GetInfluencersResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), Influencer.PARSER, INFLUENCERS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetInfluencersResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetInfluencersResponse(List influencers, long count) { - super(INFLUENCERS, influencers, count); - } - - /** - * The retrieved influencers - * @return the retrieved influencers - */ - public List influencers() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetInfluencersResponse other = (GetInfluencersResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java deleted file mode 100644 index 037af8a412132..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobRequest.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request object to get {@link Job} objects with the matching {@code jobId}s or - * {@code groupName}s. - * - * {@code _all} explicitly gets all the jobs in the cluster - * An empty request (no {@code jobId}s) implicitly gets all the jobs in the cluster - */ -public class GetJobRequest implements Validatable, ToXContentObject { - - public static final ParseField JOB_IDS = new ParseField("job_ids"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - public static final String EXCLUDE_GENERATED = "exclude_generated"; - - private static final String ALL_JOBS = "_all"; - private final List jobIds; - private Boolean allowNoMatch; - private Boolean excludeGenerated; - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_job_request", - true, - a -> new GetJobRequest(a[0] == null ? new ArrayList<>() : (List) a[0]) - ); - - static { - PARSER.declareStringArray(ConstructingObjectParser.optionalConstructorArg(), JOB_IDS); - PARSER.declareBoolean(GetJobRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - /** - * Helper method to create a query that will get ALL jobs - * @return new {@link GetJobRequest} object searching for the jobId "_all" - */ - public static GetJobRequest getAllJobsRequest() { - return new GetJobRequest(ALL_JOBS); - } - - /** - * Get the specified {@link Job} configurations via their unique jobIds - * @param jobIds must not contain any null values - */ - public GetJobRequest(String... jobIds) { - this(Arrays.asList(jobIds)); - } - - GetJobRequest(List jobIds) { - if (jobIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values"); - } - this.jobIds = new ArrayList<>(jobIds); - } - - /** - * All the jobIds for which to get configuration information - */ - public List getJobIds() { - return jobIds; - } - - /** - * Whether to ignore if a wildcard expression matches no jobs. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) does not match any jobs - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Setting this flag to `true` removes certain fields from the configuration on retrieval. - * - * This is useful when getting the configuration and wanting to put it in another cluster. - * - * Default value is false. - * @param excludeGenerated Boolean value indicating if certain fields should be removed - */ - public void setExcludeGenerated(boolean excludeGenerated) { - this.excludeGenerated = excludeGenerated; - } - - public Boolean getExcludeGenerated() { - return excludeGenerated; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, excludeGenerated, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || other.getClass() != getClass()) { - return false; - } - - GetJobRequest that = (GetJobRequest) other; - return Objects.equals(jobIds, that.jobIds) - && Objects.equals(excludeGenerated, that.excludeGenerated) - && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - if (jobIds.isEmpty() == false) { - builder.stringListField(JOB_IDS.getPreferredName(), jobIds); - } - - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobResponse.java deleted file mode 100644 index ccc40edf3687d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobResponse.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link Job} objects and the total count found - */ -public class GetJobResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("jobs"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "jobs_response", - true, - a -> new GetJobResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), Job.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), AbstractResultResponse.COUNT); - } - - GetJobResponse(List jobBuilders, long count) { - super(RESULTS_FIELD, jobBuilders.stream().map(Job.Builder::build).collect(Collectors.toList()), count); - } - - /** - * The collection of {@link Job} objects found in the query - */ - public List jobs() { - return results; - } - - public static GetJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetJobResponse other = (GetJobResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java deleted file mode 100644 index 009b0239e276d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsRequest.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request object to get {@link org.elasticsearch.client.ml.job.stats.JobStats} by their respective jobIds - * - * {@code _all} explicitly gets all the jobs' statistics in the cluster - * An empty request (no {@code jobId}s) implicitly gets all the jobs' statistics in the cluster - */ -public class GetJobStatsRequest implements Validatable, ToXContentObject { - - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_jobs_stats_request", - a -> new GetJobStatsRequest((List) a[0]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), - Job.ID, - ObjectParser.ValueType.STRING_ARRAY - ); - PARSER.declareBoolean(GetJobStatsRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private static final String ALL_JOBS = "_all"; - - private final List jobIds; - private Boolean allowNoMatch; - - /** - * Explicitly gets all jobs statistics - * - * @return a {@link GetJobStatsRequest} for all existing jobs - */ - public static GetJobStatsRequest getAllJobStatsRequest() { - return new GetJobStatsRequest(ALL_JOBS); - } - - GetJobStatsRequest(List jobIds) { - if (jobIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values"); - } - this.jobIds = new ArrayList<>(jobIds); - } - - /** - * Get the specified Job's statistics via their unique jobIds - * - * @param jobIds must be non-null and each jobId must be non-null - */ - public GetJobStatsRequest(String... jobIds) { - this(Arrays.asList(jobIds)); - } - - /** - * All the jobIds for which to get statistics - */ - public List getJobIds() { - return jobIds; - } - - public Boolean getAllowNoMatch() { - return this.allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no jobs. - * - * This includes {@code _all} string or when no jobs have been specified - * - * @param allowNoMatch When {@code true} ignore if wildcard or {@code _all} matches no jobs. Defaults to {@code true} - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - GetJobStatsRequest that = (GetJobStatsRequest) other; - return Objects.equals(jobIds, that.jobIds) && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds)); - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java deleted file mode 100644 index 3443010fe66a4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetJobStatsResponse.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.stats.JobStats; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Contains a {@link List} of the found {@link JobStats} objects and the total count found - */ -public class GetJobStatsResponse extends AbstractResultResponse { - - public static final ParseField RESULTS_FIELD = new ParseField("jobs"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "jobs_stats_response", - true, - a -> new GetJobStatsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), JobStats.PARSER, RESULTS_FIELD); - PARSER.declareLong(constructorArg(), COUNT); - } - - GetJobStatsResponse(List jobStats, long count) { - super(RESULTS_FIELD, jobStats, count); - } - - /** - * The collection of {@link JobStats} objects found in the query - */ - public List jobStats() { - return results; - } - - public static GetJobStatsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public int hashCode() { - return Objects.hash(results, count); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - GetJobStatsResponse other = (GetJobStatsResponse) obj; - return Objects.equals(results, other.results) && count == other.count; - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java deleted file mode 100644 index d6ecbf18a2444..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsRequest.java +++ /dev/null @@ -1,191 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve information about model snapshots for a given job - */ -public class GetModelSnapshotsRequest implements Validatable, ToXContentObject { - - public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id"); - public static final ParseField SORT = new ParseField("sort"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField DESC = new ParseField("desc"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_model_snapshots_request", - a -> new GetModelSnapshotsRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(GetModelSnapshotsRequest::setSnapshotId, SNAPSHOT_ID); - PARSER.declareString(GetModelSnapshotsRequest::setSort, SORT); - PARSER.declareStringOrNull(GetModelSnapshotsRequest::setStart, START); - PARSER.declareStringOrNull(GetModelSnapshotsRequest::setEnd, END); - PARSER.declareBoolean(GetModelSnapshotsRequest::setDesc, DESC); - PARSER.declareObject(GetModelSnapshotsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - } - - private final String jobId; - private String snapshotId; - private String sort; - private String start; - private String end; - private Boolean desc; - private PageParams pageParams; - - /** - * Constructs a request to retrieve snapshot information from a given job - * @param jobId id of the job from which to retrieve results - */ - public GetModelSnapshotsRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - /** - * Sets the id of the snapshot to retrieve. - * @param snapshotId the snapshot id - */ - public void setSnapshotId(String snapshotId) { - this.snapshotId = snapshotId; - } - - public String getSort() { - return sort; - } - - /** - * Sets the value of "sort". - * Specifies the snapshot field to sort on. - * @param sort value of "sort". - */ - public void setSort(String sort) { - this.sort = sort; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * Sets the paging parameters - * @param pageParams the paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only snapshots whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only snapshots whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public Boolean getDesc() { - return desc; - } - - /** - * Sets the value of "desc". - * Specifies the sorting order. - * @param desc value of "desc" - */ - public void setDesc(boolean desc) { - this.desc = desc; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (snapshotId != null) { - builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId); - } - if (sort != null) { - builder.field(SORT.getPreferredName(), sort); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (desc != null) { - builder.field(DESC.getPreferredName(), desc); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetModelSnapshotsRequest request = (GetModelSnapshotsRequest) obj; - return Objects.equals(jobId, request.jobId) - && Objects.equals(snapshotId, request.snapshotId) - && Objects.equals(sort, request.sort) - && Objects.equals(start, request.start) - && Objects.equals(end, request.end) - && Objects.equals(desc, request.desc) - && Objects.equals(pageParams, request.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId, pageParams, start, end, sort, desc); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsResponse.java deleted file mode 100644 index b52055ced3046..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetModelSnapshotsResponse.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -/** - * A response containing the requested snapshots - */ -public class GetModelSnapshotsResponse extends AbstractResultResponse { - - public static final ParseField SNAPSHOTS = new ParseField("model_snapshots"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_model_snapshots_response", - true, - a -> new GetModelSnapshotsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), ModelSnapshot.PARSER, SNAPSHOTS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetModelSnapshotsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetModelSnapshotsResponse(List snapshotBuilders, long count) { - super(SNAPSHOTS, snapshotBuilders.stream().map(ModelSnapshot.Builder::build).collect(Collectors.toList()), count); - } - - /** - * The retrieved snapshots - * @return the retrieved snapshots - */ - public List snapshots() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetModelSnapshotsResponse other = (GetModelSnapshotsResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java deleted file mode 100644 index 628fcc804d423..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsRequest.java +++ /dev/null @@ -1,251 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * A request to retrieve overall buckets of set of jobs - */ -public class GetOverallBucketsRequest implements Validatable, ToXContentObject { - - public static final ParseField TOP_N = new ParseField("top_n"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField OVERALL_SCORE = new ParseField("overall_score"); - public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - private static final String ALL_JOBS = "_all"; - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_overall_buckets_request", - a -> new GetOverallBucketsRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareInt(GetOverallBucketsRequest::setTopN, TOP_N); - PARSER.declareString(GetOverallBucketsRequest::setBucketSpan, BUCKET_SPAN); - PARSER.declareBoolean(GetOverallBucketsRequest::setExcludeInterim, EXCLUDE_INTERIM); - PARSER.declareDouble(GetOverallBucketsRequest::setOverallScore, OVERALL_SCORE); - PARSER.declareStringOrNull(GetOverallBucketsRequest::setStart, START); - PARSER.declareStringOrNull(GetOverallBucketsRequest::setEnd, END); - PARSER.declareBoolean(GetOverallBucketsRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private final List jobIds; - private Integer topN; - private TimeValue bucketSpan; - private Boolean excludeInterim; - private Double overallScore; - private String start; - private String end; - private Boolean allowNoMatch; - - private GetOverallBucketsRequest(String jobId) { - this(Strings.tokenizeToStringArray(jobId, ",")); - } - - /** - * Constructs a request to retrieve overall buckets for a set of jobs - * @param jobIds The job identifiers. Each can be a job identifier, a group name, or a wildcard expression. - */ - public GetOverallBucketsRequest(String... jobIds) { - this(Arrays.asList(jobIds)); - } - - /** - * Constructs a request to retrieve overall buckets for a set of jobs - * @param jobIds The job identifiers. Each can be a job identifier, a group name, or a wildcard expression. - */ - public GetOverallBucketsRequest(List jobIds) { - if (jobIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values"); - } - if (jobIds.isEmpty()) { - this.jobIds = Collections.singletonList(ALL_JOBS); - } else { - this.jobIds = Collections.unmodifiableList(jobIds); - } - } - - public List getJobIds() { - return jobIds; - } - - public Integer getTopN() { - return topN; - } - - /** - * Sets the value of "top_n". - * @param topN The number of top job bucket scores to be used in the overall_score calculation. Defaults to 1. - */ - public void setTopN(Integer topN) { - this.topN = topN; - } - - public TimeValue getBucketSpan() { - return bucketSpan; - } - - /** - * Sets the value of "bucket_span". - * @param bucketSpan The span of the overall buckets. Must be greater or equal to the largest job’s bucket_span. - * Defaults to the largest job’s bucket_span. - */ - public void setBucketSpan(TimeValue bucketSpan) { - this.bucketSpan = bucketSpan; - } - - private void setBucketSpan(String bucketSpan) { - this.bucketSpan = TimeValue.parseTimeValue(bucketSpan, BUCKET_SPAN.getPreferredName()); - } - - public boolean isExcludeInterim() { - return excludeInterim; - } - - /** - * Sets the value of "exclude_interim". - * When {@code true}, interim overall buckets will be filtered out. - * Overall buckets are interim if any of the job buckets within the overall bucket interval are interim. - * @param excludeInterim value of "exclude_interim" to be set - */ - public void setExcludeInterim(Boolean excludeInterim) { - this.excludeInterim = excludeInterim; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only overall buckets whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only overall buckets whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public Double getOverallScore() { - return overallScore; - } - - /** - * Sets the value of "overall_score". - * Only buckets with "overall_score" equal or greater will be returned. - * @param overallScore value of "anomaly_score". - */ - public void setOverallScore(double overallScore) { - this.overallScore = overallScore; - } - - /** - * See {@link GetJobRequest#getAllowNoMatch()} - * @param allowNoMatch value of "allow_no_match". - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no jobs. - * - * If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) does not match any jobs - */ - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - if (jobIds.isEmpty() == false) { - builder.field(Job.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(jobIds)); - } - if (topN != null) { - builder.field(TOP_N.getPreferredName(), topN); - } - if (bucketSpan != null) { - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan.getStringRep()); - } - if (excludeInterim != null) { - builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (overallScore != null) { - builder.field(OVERALL_SCORE.getPreferredName(), overallScore); - } - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, topN, bucketSpan, excludeInterim, overallScore, start, end, allowNoMatch); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetOverallBucketsRequest other = (GetOverallBucketsRequest) obj; - return Objects.equals(jobIds, other.jobIds) - && Objects.equals(topN, other.topN) - && Objects.equals(bucketSpan, other.bucketSpan) - && Objects.equals(excludeInterim, other.excludeInterim) - && Objects.equals(overallScore, other.overallScore) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(allowNoMatch, other.allowNoMatch); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsResponse.java deleted file mode 100644 index a75b740c99a14..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetOverallBucketsResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.OverallBucket; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested overall buckets - */ -public class GetOverallBucketsResponse extends AbstractResultResponse { - - public static final ParseField OVERALL_BUCKETS = new ParseField("overall_buckets"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_overall_buckets_response", - true, - a -> new GetOverallBucketsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), OverallBucket.PARSER, OVERALL_BUCKETS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetOverallBucketsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetOverallBucketsResponse(List overallBuckets, long count) { - super(OVERALL_BUCKETS, overallBuckets, count); - } - - /** - * The retrieved overall buckets - * @return the retrieved overall buckets - */ - public List overallBuckets() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetOverallBucketsResponse other = (GetOverallBucketsResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java deleted file mode 100644 index c3ebcd1f86e99..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsRequest.java +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to retrieve records of a given job - */ -public class GetRecordsRequest implements ToXContentObject, Validatable { - - public static final ParseField EXCLUDE_INTERIM = new ParseField("exclude_interim"); - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField RECORD_SCORE = new ParseField("record_score"); - public static final ParseField SORT = new ParseField("sort"); - public static final ParseField DESCENDING = new ParseField("desc"); - - public static final ObjectParser PARSER = new ObjectParser<>("get_records_request", GetRecordsRequest::new); - - static { - PARSER.declareString((request, jobId) -> request.jobId = jobId, Job.ID); - PARSER.declareBoolean(GetRecordsRequest::setExcludeInterim, EXCLUDE_INTERIM); - PARSER.declareStringOrNull(GetRecordsRequest::setStart, START); - PARSER.declareStringOrNull(GetRecordsRequest::setEnd, END); - PARSER.declareObject(GetRecordsRequest::setPageParams, PageParams.PARSER, PageParams.PAGE); - PARSER.declareDouble(GetRecordsRequest::setRecordScore, RECORD_SCORE); - PARSER.declareString(GetRecordsRequest::setSort, SORT); - PARSER.declareBoolean(GetRecordsRequest::setDescending, DESCENDING); - } - - private String jobId; - private Boolean excludeInterim; - private String start; - private String end; - private PageParams pageParams; - private Double recordScore; - private String sort; - private Boolean descending; - - private GetRecordsRequest() {} - - /** - * Constructs a request to retrieve records of a given job - * @param jobId id of the job to retrieve records of - */ - public GetRecordsRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId); - } - - public String getJobId() { - return jobId; - } - - public Boolean getExcludeInterim() { - return excludeInterim; - } - - /** - * Sets the value of "exclude_interim". - * When {@code true}, interim records will be filtered out. - * @param excludeInterim value of "exclude_interim" to be set - */ - public void setExcludeInterim(Boolean excludeInterim) { - this.excludeInterim = excludeInterim; - } - - public String getStart() { - return start; - } - - /** - * Sets the value of "start" which is a timestamp. - * Only records whose timestamp is on or after the "start" value will be returned. - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * Sets the value of "end" which is a timestamp. - * Only records whose timestamp is before the "end" value will be returned. - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO string - */ - public void setEnd(String end) { - this.end = end; - } - - public PageParams getPageParams() { - return pageParams; - } - - /** - * Sets the paging parameters - * @param pageParams The paging parameters - */ - public void setPageParams(PageParams pageParams) { - this.pageParams = pageParams; - } - - public Double getRecordScore() { - return recordScore; - } - - /** - * Sets the value of "record_score". - * Only records with "record_score" equal or greater will be returned. - * @param recordScore value of "record_score". - */ - public void setRecordScore(Double recordScore) { - this.recordScore = recordScore; - } - - public String getSort() { - return sort; - } - - /** - * Sets the value of "sort". - * Specifies the record field to sort on. - * @param sort value of "sort". - */ - public void setSort(String sort) { - this.sort = sort; - } - - public Boolean getDescending() { - return descending; - } - - /** - * Sets the value of "desc". - * Specifies the sorting order. - * @param descending value of "desc" - */ - public void setDescending(Boolean descending) { - this.descending = descending; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (excludeInterim != null) { - builder.field(EXCLUDE_INTERIM.getPreferredName(), excludeInterim); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (pageParams != null) { - builder.field(PageParams.PAGE.getPreferredName(), pageParams); - } - if (recordScore != null) { - builder.field(RECORD_SCORE.getPreferredName(), recordScore); - } - if (sort != null) { - builder.field(SORT.getPreferredName(), sort); - } - if (descending != null) { - builder.field(DESCENDING.getPreferredName(), descending); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, excludeInterim, recordScore, pageParams, start, end, sort, descending); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetRecordsRequest other = (GetRecordsRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(excludeInterim, other.excludeInterim) - && Objects.equals(recordScore, other.recordScore) - && Objects.equals(pageParams, other.pageParams) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(sort, other.sort) - && Objects.equals(descending, other.descending); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsResponse.java deleted file mode 100644 index 3f94a06211ade..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetRecordsResponse.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.results.AnomalyRecord; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * A response containing the requested records - */ -public class GetRecordsResponse extends AbstractResultResponse { - - public static final ParseField RECORDS = new ParseField("records"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_records_response", - true, - a -> new GetRecordsResponse((List) a[0], (long) a[1]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), AnomalyRecord.PARSER, RECORDS); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), COUNT); - } - - public static GetRecordsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - GetRecordsResponse(List records, long count) { - super(RECORDS, records, count); - } - - /** - * The retrieved records - * @return the retrieved records - */ - public List records() { - return results; - } - - @Override - public int hashCode() { - return Objects.hash(count, results); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - GetRecordsResponse other = (GetRecordsResponse) obj; - return count == other.count && Objects.equals(results, other.results); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsRequest.java deleted file mode 100644 index 50b59c6a92f4f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsRequest.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.client.ml.inference.TrainedModelConfig; -import org.elasticsearch.core.Nullable; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.Set; - -public class GetTrainedModelsRequest implements Validatable { - - private static final String DEFINITION = "definition"; - private static final String TOTAL_FEATURE_IMPORTANCE = "total_feature_importance"; - private static final String FEATURE_IMPORTANCE_BASELINE = "feature_importance_baseline"; - public static final String ALLOW_NO_MATCH = "allow_no_match"; - public static final String EXCLUDE_GENERATED = "exclude_generated"; - public static final String DECOMPRESS_DEFINITION = "decompress_definition"; - public static final String TAGS = "tags"; - public static final String INCLUDE = "include"; - - private final List ids; - private Boolean allowNoMatch; - private Set includes = new HashSet<>(); - private Boolean decompressDefinition; - private Boolean excludeGenerated; - private PageParams pageParams; - private List tags; - - /** - * Helper method to create a request that will get ALL TrainedModelConfigs - * @return new {@link GetTrainedModelsRequest} object for the id "_all" - */ - public static GetTrainedModelsRequest getAllTrainedModelConfigsRequest() { - return new GetTrainedModelsRequest("_all"); - } - - public GetTrainedModelsRequest(String... ids) { - this.ids = Arrays.asList(ids); - } - - public List getIds() { - return ids; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no trained models. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any trained models - */ - public GetTrainedModelsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public PageParams getPageParams() { - return pageParams; - } - - public GetTrainedModelsRequest setPageParams(@Nullable PageParams pageParams) { - this.pageParams = pageParams; - return this; - } - - public Set getIncludes() { - return Collections.unmodifiableSet(includes); - } - - public GetTrainedModelsRequest includeDefinition() { - this.includes.add(DEFINITION); - return this; - } - - public GetTrainedModelsRequest includeTotalFeatureImportance() { - this.includes.add(TOTAL_FEATURE_IMPORTANCE); - return this; - } - - public GetTrainedModelsRequest includeFeatureImportanceBaseline() { - this.includes.add(FEATURE_IMPORTANCE_BASELINE); - return this; - } - - /** - * Whether to include the full model definition. - * - * The full model definition can be very large. - * @deprecated Use {@link GetTrainedModelsRequest#includeDefinition()} - * @param includeDefinition If {@code true}, the definition is included. - */ - @Deprecated - public GetTrainedModelsRequest setIncludeDefinition(Boolean includeDefinition) { - if (includeDefinition != null && includeDefinition) { - return this.includeDefinition(); - } - return this; - } - - public Boolean getDecompressDefinition() { - return decompressDefinition; - } - - /** - * Whether or not to decompress the trained model, or keep it in its compressed string form - * - * @param decompressDefinition If {@code true}, the definition is decompressed. - */ - public GetTrainedModelsRequest setDecompressDefinition(Boolean decompressDefinition) { - this.decompressDefinition = decompressDefinition; - return this; - } - - public List getTags() { - return tags; - } - - /** - * The tags that the trained model must match. These correspond to {@link TrainedModelConfig#getTags()}. - * - * The models returned will match ALL tags supplied. - * If none are provided, only the provided ids are used to find models - * @param tags The tags to match when finding models - */ - public GetTrainedModelsRequest setTags(List tags) { - this.tags = tags; - return this; - } - - /** - * See {@link GetTrainedModelsRequest#setTags(List)} - */ - public GetTrainedModelsRequest setTags(String... tags) { - return setTags(Arrays.asList(tags)); - } - - public Boolean getExcludeGenerated() { - return excludeGenerated; - } - - /** - * Setting this flag to `true` removes certain fields from the model definition on retrieval. - * - * This is useful when getting the model and wanting to put it in another cluster. - * - * Default value is false. - * @param excludeGenerated Boolean value indicating if certain fields should be removed from the mode on GET - */ - public GetTrainedModelsRequest setExcludeGenerated(Boolean excludeGenerated) { - this.excludeGenerated = excludeGenerated; - return this; - } - - @Override - public Optional validate() { - if (ids == null || ids.isEmpty()) { - return Optional.of(ValidationException.withError("trained model id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetTrainedModelsRequest other = (GetTrainedModelsRequest) o; - return Objects.equals(ids, other.ids) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(decompressDefinition, other.decompressDefinition) - && Objects.equals(includes, other.includes) - && Objects.equals(excludeGenerated, other.excludeGenerated) - && Objects.equals(pageParams, other.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(ids, allowNoMatch, pageParams, decompressDefinition, includes, excludeGenerated); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsResponse.java deleted file mode 100644 index 9fb7cf8f7fd13..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsResponse.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.inference.TrainedModelConfig; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class GetTrainedModelsResponse { - - public static final ParseField TRAINED_MODEL_CONFIGS = new ParseField("trained_model_configs"); - public static final ParseField COUNT = new ParseField("count"); - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_trained_model_configs", - true, - args -> new GetTrainedModelsResponse((List) args[0], (Long) args[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), (p, c) -> TrainedModelConfig.fromXContent(p), TRAINED_MODEL_CONFIGS); - PARSER.declareLong(constructorArg(), COUNT); - } - - public static GetTrainedModelsResponse fromXContent(final XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List trainedModels; - private final Long count; - - public GetTrainedModelsResponse(List trainedModels, Long count) { - this.trainedModels = trainedModels; - this.count = count; - } - - public List getTrainedModels() { - return trainedModels; - } - - /** - * @return The total count of the trained models that matched the ID pattern. - */ - public Long getCount() { - return count; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetTrainedModelsResponse other = (GetTrainedModelsResponse) o; - return Objects.equals(this.trainedModels, other.trainedModels) && Objects.equals(this.count, other.count); - } - - @Override - public int hashCode() { - return Objects.hash(trainedModels, count); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsRequest.java deleted file mode 100644 index 0185f531b0c68..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsRequest.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.core.PageParams; -import org.elasticsearch.core.Nullable; - -import java.util.Arrays; -import java.util.List; -import java.util.Objects; -import java.util.Optional; - -public class GetTrainedModelsStatsRequest implements Validatable { - - public static final String ALLOW_NO_MATCH = "allow_no_match"; - - private final List ids; - private Boolean allowNoMatch; - private PageParams pageParams; - - /** - * Helper method to create a request that will get ALL TrainedModelStats - * @return new {@link GetTrainedModelsStatsRequest} object for the id "_all" - */ - public static GetTrainedModelsStatsRequest getAllTrainedModelStatsRequest() { - return new GetTrainedModelsStatsRequest("_all"); - } - - public GetTrainedModelsStatsRequest(String... ids) { - this.ids = Arrays.asList(ids); - } - - public List getIds() { - return ids; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no trained models. - * - * @param allowNoMatch If this is {@code false}, then an error is returned when a wildcard (or {@code _all}) - * does not match any trained models - */ - public GetTrainedModelsStatsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public PageParams getPageParams() { - return pageParams; - } - - public GetTrainedModelsStatsRequest setPageParams(@Nullable PageParams pageParams) { - this.pageParams = pageParams; - return this; - } - - @Override - public Optional validate() { - if (ids == null || ids.isEmpty()) { - return Optional.of(ValidationException.withError("trained model id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetTrainedModelsStatsRequest other = (GetTrainedModelsStatsRequest) o; - return Objects.equals(ids, other.ids) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(pageParams, other.pageParams); - } - - @Override - public int hashCode() { - return Objects.hash(ids, allowNoMatch, pageParams); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsResponse.java deleted file mode 100644 index ca218657cce83..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/GetTrainedModelsStatsResponse.java +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.inference.TrainedModelStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class GetTrainedModelsStatsResponse { - - public static final ParseField TRAINED_MODEL_STATS = new ParseField("trained_model_stats"); - public static final ParseField COUNT = new ParseField("count"); - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "get_trained_model_stats", - true, - args -> new GetTrainedModelsStatsResponse((List) args[0], (Long) args[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), (p, c) -> TrainedModelStats.fromXContent(p), TRAINED_MODEL_STATS); - PARSER.declareLong(constructorArg(), COUNT); - } - - public static GetTrainedModelsStatsResponse fromXContent(final XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List trainedModelStats; - private final Long count; - - public GetTrainedModelsStatsResponse(List trainedModelStats, Long count) { - this.trainedModelStats = trainedModelStats; - this.count = count; - } - - public List getTrainedModelStats() { - return trainedModelStats; - } - - /** - * @return The total count of the trained models that matched the ID pattern. - */ - public Long getCount() { - return count; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - GetTrainedModelsStatsResponse other = (GetTrainedModelsStatsResponse) o; - return Objects.equals(this.trainedModelStats, other.trainedModelStats) && Objects.equals(this.count, other.count); - } - - @Override - public int hashCode() { - return Objects.hash(trainedModelStats, count); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoRequest.java deleted file mode 100644 index 6c5f1787fd183..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoRequest.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -public class MlInfoRequest implements Validatable {} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoResponse.java deleted file mode 100644 index 6fa6c6eaaf6be..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/MlInfoResponse.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - -public class MlInfoResponse implements Validatable { - private final Map info; - - private MlInfoResponse(Map info) { - this.info = info; - } - - public Map getInfo() { - return info; - } - - public static MlInfoResponse fromXContent(XContentParser parser) throws IOException { - Map info = parser.map(); - return new MlInfoResponse(info); - } - - @Override - public int hashCode() { - return Objects.hash(info); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - MlInfoResponse other = (MlInfoResponse) obj; - return Objects.equals(info, other.info); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java deleted file mode 100644 index 4a732c9523415..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/NodeAttributes.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.Map; -import java.util.Objects; - -/** - * A Pojo class containing an Elastic Node's attributes - */ -public class NodeAttributes implements ToXContentObject { - - public static final ParseField ID = new ParseField("id"); - public static final ParseField NAME = new ParseField("name"); - public static final ParseField EPHEMERAL_ID = new ParseField("ephemeral_id"); - public static final ParseField TRANSPORT_ADDRESS = new ParseField("transport_address"); - public static final ParseField ATTRIBUTES = new ParseField("attributes"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("node", true, (a) -> { - int i = 0; - String id = (String) a[i++]; - String name = (String) a[i++]; - String ephemeralId = (String) a[i++]; - String transportAddress = (String) a[i++]; - Map attributes = (Map) a[i]; - return new NodeAttributes(id, name, ephemeralId, transportAddress, attributes); - }); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); - PARSER.declareString(ConstructingObjectParser.constructorArg(), EPHEMERAL_ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), TRANSPORT_ADDRESS); - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), ATTRIBUTES, ObjectParser.ValueType.OBJECT); - } - - private final String id; - private final String name; - private final String ephemeralId; - private final String transportAddress; - private final Map attributes; - - public NodeAttributes(String id, String name, String ephemeralId, String transportAddress, Map attributes) { - this.id = id; - this.name = name; - this.ephemeralId = ephemeralId; - this.transportAddress = transportAddress; - this.attributes = Collections.unmodifiableMap(attributes); - } - - /** - * The unique identifier of the node. - */ - public String getId() { - return id; - } - - /** - * The node name. - */ - public String getName() { - return name; - } - - /** - * The ephemeral id of the node. - */ - public String getEphemeralId() { - return ephemeralId; - } - - /** - * The host and port where transport HTTP connections are accepted. - */ - public String getTransportAddress() { - return transportAddress; - } - - /** - * Additional attributes related to this node e.g., {"ml.max_open_jobs": "10"}. - */ - public Map getAttributes() { - return attributes; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ID.getPreferredName(), id); - builder.field(NAME.getPreferredName(), name); - builder.field(EPHEMERAL_ID.getPreferredName(), ephemeralId); - builder.field(TRANSPORT_ADDRESS.getPreferredName(), transportAddress); - builder.field(ATTRIBUTES.getPreferredName(), attributes); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(id, name, ephemeralId, transportAddress, attributes); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - NodeAttributes that = (NodeAttributes) other; - return Objects.equals(id, that.id) - && Objects.equals(name, that.name) - && Objects.equals(ephemeralId, that.ephemeralId) - && Objects.equals(transportAddress, that.transportAddress) - && Objects.equals(attributes, that.attributes); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java deleted file mode 100644 index c19ff484242ad..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobRequest.java +++ /dev/null @@ -1,116 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to open a Machine Learning Job - */ -public class OpenJobRequest implements Validatable, ToXContentObject { - - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "open_job_request", - true, - a -> new OpenJobRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString((request, val) -> request.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - } - - public static OpenJobRequest fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private String jobId; - private TimeValue timeout; - - /** - * Create a new request with the desired jobId - * - * @param jobId unique jobId, must not be null - */ - public OpenJobRequest(String jobId) { - this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); - } - - public String getJobId() { - return jobId; - } - - /** - * The jobId to open - * - * @param jobId unique jobId, must not be null - */ - public void setJobId(String jobId) { - this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null"); - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * How long to wait for job to open before timing out the request - * - * @param timeout default value of 30 minutes - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, timeout); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - OpenJobRequest that = (OpenJobRequest) other; - return Objects.equals(jobId, that.jobId) && Objects.equals(timeout, that.timeout); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobResponse.java deleted file mode 100644 index a9c6118db26d6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/OpenJobResponse.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Job is now opened or not - */ -public class OpenJobResponse implements ToXContentObject { - - private static final ParseField OPENED = new ParseField("opened"); - private static final ParseField NODE = new ParseField("node"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "open_job_response", - true, - (a) -> new OpenJobResponse((Boolean) a[0], (String) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), OPENED); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NODE); - } - - private final boolean opened; - private final String node; - - OpenJobResponse(boolean opened, String node) { - this.opened = opened; - this.node = node; - } - - public static OpenJobResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Has the job opened or not - * - * @return boolean value indicating the job opened status - */ - public boolean isOpened() { - return opened; - } - - /** - * The node that the job was assigned to - * - * @return The ID of a node if the job was assigned to a node. If an empty string is returned - * it means the job was allowed to open lazily and has not yet been assigned to a node. - * If null is returned it means the server version is too old to return node - * information. - */ - public String getNode() { - return node; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - OpenJobResponse that = (OpenJobResponse) other; - return opened == that.opened && Objects.equals(node, that.node); - } - - @Override - public int hashCode() { - return Objects.hash(opened, node); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(OPENED.getPreferredName(), opened); - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java deleted file mode 100644 index 0752221e9eee0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventRequest.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.client.ml.calendars.ScheduledEvent; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * Request to add a ScheduledEvent to a Machine Learning calendar - */ -public class PostCalendarEventRequest implements Validatable, ToXContentObject { - - private final String calendarId; - private final List scheduledEvents; - - public static final String INCLUDE_CALENDAR_ID_KEY = "include_calendar_id"; - public static final ParseField EVENTS = new ParseField("events"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "post_calendar_event_request", - a -> new PostCalendarEventRequest((String) a[0], (List) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Calendar.ID); - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> ScheduledEvent.PARSER.apply(p, null), EVENTS); - } - public static final MapParams EXCLUDE_CALENDAR_ID_PARAMS = new MapParams( - Collections.singletonMap(INCLUDE_CALENDAR_ID_KEY, Boolean.toString(false)) - ); - - /** - * Create a new PostCalendarEventRequest with an existing non-null calendarId and a list of Scheduled events - * - * @param calendarId The ID of the calendar, must be non-null - * @param scheduledEvents The non-null, non-empty, list of {@link ScheduledEvent} objects to add to the calendar - */ - public PostCalendarEventRequest(String calendarId, List scheduledEvents) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - this.scheduledEvents = Objects.requireNonNull(scheduledEvents, "[events] must not be null."); - if (scheduledEvents.isEmpty()) { - throw new IllegalArgumentException("At least 1 event is required"); - } - } - - public String getCalendarId() { - return calendarId; - } - - public List getScheduledEvents() { - return scheduledEvents; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (params.paramAsBoolean(INCLUDE_CALENDAR_ID_KEY, true)) { - builder.field(Calendar.ID.getPreferredName(), calendarId); - } - builder.field(EVENTS.getPreferredName(), scheduledEvents); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendarId, scheduledEvents); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - PostCalendarEventRequest other = (PostCalendarEventRequest) obj; - return Objects.equals(calendarId, other.calendarId) && Objects.equals(scheduledEvents, other.scheduledEvents); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventResponse.java deleted file mode 100644 index 4aeb8da98f260..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostCalendarEventResponse.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.calendars.ScheduledEvent; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * Response to adding ScheduledEvent(s) to a Machine Learning calendar - */ -public class PostCalendarEventResponse implements ToXContentObject { - - private final List scheduledEvents; - public static final ParseField EVENTS = new ParseField("events"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "post_calendar_event_response", - true, - a -> new PostCalendarEventResponse((List) a[0]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> ScheduledEvent.PARSER.apply(p, null), EVENTS); - } - - public static PostCalendarEventResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Create a new PostCalendarEventResponse containing the scheduled Events - * - * @param scheduledEvents The list of {@link ScheduledEvent} objects - */ - public PostCalendarEventResponse(List scheduledEvents) { - this.scheduledEvents = scheduledEvents; - } - - public List getScheduledEvents() { - return scheduledEvents; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(EVENTS.getPreferredName(), scheduledEvents); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(scheduledEvents); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - PostCalendarEventResponse other = (PostCalendarEventResponse) obj; - return Objects.equals(scheduledEvents, other.scheduledEvents); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java deleted file mode 100644 index 5918f15c412c4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataRequest.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentType; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * Request to post data to a Machine Learning job - */ -public class PostDataRequest implements Validatable, ToXContentObject { - - public static final ParseField RESET_START = new ParseField("reset_start"); - public static final ParseField RESET_END = new ParseField("reset_end"); - public static final ParseField CONTENT_TYPE = new ParseField("content_type"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "post_data_request", - (a) -> new PostDataRequest((String) a[0], XContentType.fromMediaType((String) a[1]), new byte[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), CONTENT_TYPE); - PARSER.declareStringOrNull(PostDataRequest::setResetEnd, RESET_END); - PARSER.declareStringOrNull(PostDataRequest::setResetStart, RESET_START); - } - - private final String jobId; - private final XContentType xContentType; - private final BytesReference content; - private String resetStart; - private String resetEnd; - - /** - * Create a new PostDataRequest object - * - * @param jobId non-null jobId of the job to post data to - * @param xContentType content type of the data to post. Only {@link XContentType#JSON} or {@link XContentType#SMILE} are supported - * @param content bulk serialized content in the format of the passed {@link XContentType} - */ - public PostDataRequest(String jobId, XContentType xContentType, BytesReference content) { - this.jobId = Objects.requireNonNull(jobId, "job_id must not be null"); - this.xContentType = Objects.requireNonNull(xContentType, "content_type must not be null"); - this.content = Objects.requireNonNull(content, "content must not be null"); - } - - /** - * Create a new PostDataRequest object referencing the passed {@code byte[]} content - * - * @param jobId non-null jobId of the job to post data to - * @param xContentType content type of the data to post. Only {@link XContentType#JSON} or {@link XContentType#SMILE} are supported - * @param content bulk serialized content in the format of the passed {@link XContentType} - */ - public PostDataRequest(String jobId, XContentType xContentType, byte[] content) { - this(jobId, xContentType, new BytesArray(content)); - } - - /** - * Create a new PostDataRequest object referencing the passed {@link JsonBuilder} object - * - * @param jobId non-null jobId of the job to post data to - * @param builder {@link JsonBuilder} object containing documents to be serialized and sent in {@link XContentType#JSON} format - */ - public PostDataRequest(String jobId, JsonBuilder builder) { - this(jobId, XContentType.JSON, builder.build()); - } - - public String getJobId() { - return jobId; - } - - public String getResetStart() { - return resetStart; - } - - /** - * Specifies the start of the bucket resetting range - * - * @param resetStart String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string - */ - public void setResetStart(String resetStart) { - this.resetStart = resetStart; - } - - public String getResetEnd() { - return resetEnd; - } - - /** - * Specifies the end of the bucket resetting range - * - * @param resetEnd String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string - */ - public void setResetEnd(String resetEnd) { - this.resetEnd = resetEnd; - } - - public BytesReference getContent() { - return content; - } - - public XContentType getXContentType() { - return xContentType; - } - - @Override - public int hashCode() { - // We leave out the content for server side parity - return Objects.hash(jobId, resetStart, resetEnd, xContentType); - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - // We leave out the content for server side parity - PostDataRequest other = (PostDataRequest) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(resetStart, other.resetStart) - && Objects.equals(resetEnd, other.resetEnd) - && Objects.equals(xContentType, other.xContentType); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(CONTENT_TYPE.getPreferredName(), xContentType.mediaType()); - if (resetEnd != null) { - builder.field(RESET_END.getPreferredName(), resetEnd); - } - if (resetStart != null) { - builder.field(RESET_START.getPreferredName(), resetStart); - } - builder.endObject(); - return builder; - } - - /** - * Class for incrementally building a bulk document request in {@link XContentType#JSON} format - */ - public static class JsonBuilder { - - private final List bytes = new ArrayList<>(); - - /** - * Add a document via a {@code byte[]} array - * - * @param doc {@code byte[]} array of a serialized JSON object - */ - public JsonBuilder addDoc(byte[] doc) { - bytes.add(ByteBuffer.wrap(doc)); - return this; - } - - /** - * Add a document via a serialized JSON String - * - * @param doc a serialized JSON String - */ - public JsonBuilder addDoc(String doc) { - bytes.add(ByteBuffer.wrap(doc.getBytes(StandardCharsets.UTF_8))); - return this; - } - - /** - * Add a document via an object map - * - * @param doc document object to add to bulk request - * @throws IOException on parsing/serialization errors - */ - public JsonBuilder addDoc(Map doc) throws IOException { - try (XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent())) { - builder.map(doc); - bytes.add(ByteBuffer.wrap(BytesReference.toBytes(BytesReference.bytes(builder)))); - } - return this; - } - - private BytesReference build() { - ByteBuffer[] buffers = bytes.toArray(new ByteBuffer[bytes.size()]); - return BytesReference.fromByteBuffers(buffers); - } - - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java deleted file mode 100644 index 4d8c8886fd896..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PostDataResponse.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.process.DataCounts; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response object when posting data to a Machine Learning Job - */ -public class PostDataResponse implements ToXContentObject { - - private DataCounts dataCounts; - - public static PostDataResponse fromXContent(XContentParser parser) throws IOException { - return new PostDataResponse(DataCounts.PARSER.parse(parser, null)); - } - - public PostDataResponse(DataCounts counts) { - this.dataCounts = counts; - } - - public DataCounts getDataCounts() { - return dataCounts; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return dataCounts.toXContent(builder, params); - } - - @Override - public int hashCode() { - return Objects.hashCode(dataCounts); - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - PostDataResponse other = (PostDataResponse) obj; - return Objects.equals(dataCounts, other.dataCounts); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java deleted file mode 100644 index 8bd277fa31efc..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedRequest.java +++ /dev/null @@ -1,136 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to preview a MachineLearning Datafeed - */ -public class PreviewDatafeedRequest implements Validatable, ToXContentObject { - - private static final ParseField DATAFEED_CONFIG = new ParseField("datafeed_config"); - private static final ParseField JOB_CONFIG = new ParseField("job_config"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "preview_datafeed_request", - a -> new PreviewDatafeedRequest((String) a[0], (DatafeedConfig.Builder) a[1], (Job.Builder) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), DatafeedConfig.ID); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), DatafeedConfig.PARSER, DATAFEED_CONFIG); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), Job.PARSER, JOB_CONFIG); - } - - public static PreviewDatafeedRequest fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final String datafeedId; - private final DatafeedConfig datafeedConfig; - private final Job jobConfig; - - private PreviewDatafeedRequest( - @Nullable String datafeedId, - @Nullable DatafeedConfig.Builder datafeedConfig, - @Nullable Job.Builder jobConfig - ) { - this.datafeedId = datafeedId; - this.datafeedConfig = datafeedConfig == null ? null : datafeedConfig.build(); - this.jobConfig = jobConfig == null ? null : jobConfig.build(); - } - - /** - * Create a new request with the desired datafeedId - * - * @param datafeedId unique datafeedId, must not be null - */ - public PreviewDatafeedRequest(String datafeedId) { - this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null"); - this.datafeedConfig = null; - this.jobConfig = null; - } - - /** - * Create a new request to preview the provided datafeed config and optional job config - * @param datafeedConfig The datafeed to preview - * @param jobConfig The associated job config (required if the datafeed does not refer to an existing job) - */ - public PreviewDatafeedRequest(DatafeedConfig datafeedConfig, Job jobConfig) { - this.datafeedId = null; - this.datafeedConfig = datafeedConfig; - this.jobConfig = jobConfig; - } - - public String getDatafeedId() { - return datafeedId; - } - - public DatafeedConfig getDatafeedConfig() { - return datafeedConfig; - } - - public Job getJobConfig() { - return jobConfig; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (datafeedId != null) { - builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); - } - if (datafeedConfig != null) { - builder.field(DATAFEED_CONFIG.getPreferredName(), datafeedConfig); - } - if (jobConfig != null) { - builder.field(JOB_CONFIG.getPreferredName(), jobConfig); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, datafeedConfig, jobConfig); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - PreviewDatafeedRequest that = (PreviewDatafeedRequest) other; - return Objects.equals(datafeedId, that.datafeedId) - && Objects.equals(datafeedConfig, that.datafeedConfig) - && Objects.equals(jobConfig, that.jobConfig); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedResponse.java deleted file mode 100644 index 44ed4e40cd165..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PreviewDatafeedResponse.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.xcontent.DeprecationHandler; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.stream.Collectors; - -/** - * Response containing a datafeed preview in JSON format - */ -public class PreviewDatafeedResponse implements ToXContentObject { - - private BytesReference preview; - - public static PreviewDatafeedResponse fromXContent(XContentParser parser) throws IOException { - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - parser.nextToken(); - builder.copyCurrentStructure(parser); - return new PreviewDatafeedResponse(BytesReference.bytes(builder)); - } - } - - public PreviewDatafeedResponse(BytesReference preview) { - this.preview = preview; - } - - public BytesReference getPreview() { - return preview; - } - - /** - * Parses the preview to a list of {@link Map} objects - * @return List of previewed data - * @throws IOException If there is a parsing issue with the {@link BytesReference} - * @throws java.lang.ClassCastException If casting the raw {@link Object} entries to a {@link Map} fails - */ - @SuppressWarnings("unchecked") - public List> getDataList() throws IOException { - try ( - StreamInput streamInput = preview.streamInput(); - XContentParser parser = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, streamInput) - ) { - XContentParser.Token token = parser.nextToken(); - if (token == XContentParser.Token.START_ARRAY) { - return parser.listOrderedMap().stream().map(obj -> (Map) obj).collect(Collectors.toList()); - } else { - return Collections.singletonList(parser.mapOrdered()); - } - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - try (InputStream stream = preview.streamInput()) { - builder.rawValue(stream, XContentType.JSON); - } - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(preview); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - PreviewDatafeedResponse other = (PreviewDatafeedResponse) obj; - return Objects.equals(preview, other.preview); - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java deleted file mode 100644 index cba01a764f6ca..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarJobRequest.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.security.InvalidParameterException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request class for adding Machine Learning Jobs to an existing calendar - */ -public class PutCalendarJobRequest implements Validatable { - - private final List jobIds; - private final String calendarId; - - /** - * Create a new request referencing an existing Calendar and which JobIds to add - * to it. - * - * @param calendarId The non-null ID of the calendar - * @param jobIds JobIds to add to the calendar, cannot be empty, or contain null values. - * It can be a list of jobs or groups. - */ - public PutCalendarJobRequest(String calendarId, String... jobIds) { - this.calendarId = Objects.requireNonNull(calendarId, "[calendar_id] must not be null."); - if (jobIds.length == 0) { - throw new InvalidParameterException("jobIds must not be empty."); - } - if (Arrays.stream(jobIds).anyMatch(Objects::isNull)) { - throw new NullPointerException("jobIds must not contain null values."); - } - this.jobIds = Arrays.asList(jobIds); - } - - public List getJobIds() { - return jobIds; - } - - public String getCalendarId() { - return calendarId; - } - - @Override - public int hashCode() { - return Objects.hash(jobIds, calendarId); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - PutCalendarJobRequest that = (PutCalendarJobRequest) other; - return Objects.equals(jobIds, that.jobIds) && Objects.equals(calendarId, that.calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java deleted file mode 100644 index 7a45bc3163732..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarRequest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to create a new Machine Learning calendar - */ -public class PutCalendarRequest implements Validatable, ToXContentObject { - - private final Calendar calendar; - - public PutCalendarRequest(Calendar calendar) { - this.calendar = calendar; - } - - public Calendar getCalendar() { - return calendar; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - calendar.toXContent(builder, params); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendar); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - PutCalendarRequest other = (PutCalendarRequest) obj; - return Objects.equals(calendar, other.calendar); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarResponse.java deleted file mode 100644 index 3e3170a954815..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutCalendarResponse.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.calendars.Calendar; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class PutCalendarResponse implements ToXContentObject { - - public static PutCalendarResponse fromXContent(XContentParser parser) throws IOException { - return new PutCalendarResponse(Calendar.PARSER.parse(parser, null)); - } - - private final Calendar calendar; - - PutCalendarResponse(Calendar calendar) { - this.calendar = calendar; - } - - public Calendar getCalendar() { - return calendar; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - calendar.toXContent(builder, params); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(calendar); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - PutCalendarResponse other = (PutCalendarResponse) obj; - return Objects.equals(calendar, other.calendar); - } - - @Override - public final String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java deleted file mode 100644 index 33015ed97bf97..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; -import java.util.Optional; - -public class PutDataFrameAnalyticsRequest implements ToXContentObject, Validatable { - - private final DataFrameAnalyticsConfig config; - - public PutDataFrameAnalyticsRequest(DataFrameAnalyticsConfig config) { - this.config = config; - } - - public DataFrameAnalyticsConfig getConfig() { - return config; - } - - @Override - public Optional validate() { - if (config == null) { - return Optional.of(ValidationException.withError("put requires a non-null data frame analytics config")); - } - return Optional.empty(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return config.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - PutDataFrameAnalyticsRequest other = (PutDataFrameAnalyticsRequest) o; - return Objects.equals(config, other.config); - } - - @Override - public int hashCode() { - return Objects.hash(config); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsResponse.java deleted file mode 100644 index 7387de559c256..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class PutDataFrameAnalyticsResponse { - - public static PutDataFrameAnalyticsResponse fromXContent(XContentParser parser) throws IOException { - return new PutDataFrameAnalyticsResponse(DataFrameAnalyticsConfig.fromXContent(parser)); - } - - private final DataFrameAnalyticsConfig config; - - public PutDataFrameAnalyticsResponse(DataFrameAnalyticsConfig config) { - this.config = config; - } - - public DataFrameAnalyticsConfig getConfig() { - return config; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - PutDataFrameAnalyticsResponse other = (PutDataFrameAnalyticsResponse) o; - return Objects.equals(config, other.config); - } - - @Override - public int hashCode() { - return Objects.hash(config); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java deleted file mode 100644 index d079f1b0fc8d6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedRequest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to create a new Machine Learning Datafeed given a {@link DatafeedConfig} configuration - */ -public class PutDatafeedRequest implements Validatable, ToXContentObject { - - private final DatafeedConfig datafeed; - - /** - * Construct a new PutDatafeedRequest - * - * @param datafeed a {@link DatafeedConfig} configuration to create - */ - public PutDatafeedRequest(DatafeedConfig datafeed) { - this.datafeed = datafeed; - } - - public DatafeedConfig getDatafeed() { - return datafeed; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return datafeed.toXContent(builder, params); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PutDatafeedRequest request = (PutDatafeedRequest) object; - return Objects.equals(datafeed, request.datafeed); - } - - @Override - public int hashCode() { - return Objects.hash(datafeed); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedResponse.java deleted file mode 100644 index 6abaf8deb4be3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutDatafeedResponse.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response containing the newly created {@link DatafeedConfig} - */ -public class PutDatafeedResponse implements ToXContentObject { - - private DatafeedConfig datafeed; - - public static PutDatafeedResponse fromXContent(XContentParser parser) throws IOException { - return new PutDatafeedResponse(DatafeedConfig.PARSER.parse(parser, null).build()); - } - - PutDatafeedResponse(DatafeedConfig datafeed) { - this.datafeed = datafeed; - } - - public DatafeedConfig getResponse() { - return datafeed; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - datafeed.toXContent(builder, params); - return builder; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - PutDatafeedResponse response = (PutDatafeedResponse) object; - return Objects.equals(datafeed, response.datafeed); - } - - @Override - public int hashCode() { - return Objects.hash(datafeed); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java deleted file mode 100644 index dd08f7a96c9b0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterRequest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to create a new Machine Learning MlFilter given a {@link MlFilter} configuration - */ -public class PutFilterRequest implements Validatable, ToXContentObject { - - private final MlFilter filter; - - /** - * Construct a new PutMlFilterRequest - * - * @param filter a {@link MlFilter} configuration to create - */ - public PutFilterRequest(MlFilter filter) { - this.filter = filter; - } - - public MlFilter getMlFilter() { - return filter; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return filter.toXContent(builder, params); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PutFilterRequest request = (PutFilterRequest) object; - return Objects.equals(filter, request.filter); - } - - @Override - public int hashCode() { - return Objects.hash(filter); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterResponse.java deleted file mode 100644 index 48a850be7d228..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutFilterResponse.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response containing the newly created {@link MlFilter} - */ -public class PutFilterResponse implements ToXContentObject { - - private MlFilter filter; - - public static PutFilterResponse fromXContent(XContentParser parser) throws IOException { - return new PutFilterResponse(MlFilter.PARSER.parse(parser, null).build()); - } - - PutFilterResponse(MlFilter filter) { - this.filter = filter; - } - - public MlFilter getResponse() { - return filter; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - filter.toXContent(builder, params); - return builder; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - PutFilterResponse response = (PutFilterResponse) object; - return Objects.equals(filter, response.filter); - } - - @Override - public int hashCode() { - return Objects.hash(filter); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java deleted file mode 100644 index 04bfc4f3f9169..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobRequest.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to create a new Machine Learning Job given a {@link Job} configuration - */ -public class PutJobRequest implements Validatable, ToXContentObject { - - private final Job job; - - /** - * Construct a new PutJobRequest - * - * @param job a {@link Job} configuration to create - */ - public PutJobRequest(Job job) { - this.job = job; - } - - public Job getJob() { - return job; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return job.toXContent(builder, params); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PutJobRequest request = (PutJobRequest) object; - return Objects.equals(job, request.job); - } - - @Override - public int hashCode() { - return Objects.hash(job); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobResponse.java deleted file mode 100644 index 532a6f54ba30a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutJobResponse.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response containing the newly created {@link Job} - */ -public class PutJobResponse implements ToXContentObject { - - private Job job; - - public static PutJobResponse fromXContent(XContentParser parser) throws IOException { - return new PutJobResponse(Job.PARSER.parse(parser, null).build()); - } - - PutJobResponse(Job job) { - this.job = job; - } - - public Job getResponse() { - return job; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - job.toXContent(builder, params); - return builder; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - PutJobResponse response = (PutJobResponse) object; - return Objects.equals(job, response.job); - } - - @Override - public int hashCode() { - return Objects.hash(job); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelAliasRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelAliasRequest.java deleted file mode 100644 index 7988ae35f1c6a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelAliasRequest.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; - -import java.util.Objects; - -public class PutTrainedModelAliasRequest implements Validatable { - - public static final String REASSIGN = "reassign"; - - private final String modelAlias; - private final String modelId; - private final Boolean reassign; - - public PutTrainedModelAliasRequest(String modelAlias, String modelId, Boolean reassign) { - this.modelAlias = Objects.requireNonNull(modelAlias); - this.modelId = Objects.requireNonNull(modelId); - this.reassign = reassign; - } - - public String getModelAlias() { - return modelAlias; - } - - public String getModelId() { - return modelId; - } - - public Boolean getReassign() { - return reassign; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PutTrainedModelAliasRequest request = (PutTrainedModelAliasRequest) o; - return Objects.equals(modelAlias, request.modelAlias) - && Objects.equals(modelId, request.modelId) - && Objects.equals(reassign, request.reassign); - } - - @Override - public int hashCode() { - return Objects.hash(modelAlias, modelId, reassign); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelRequest.java deleted file mode 100644 index 5276713c921be..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelRequest.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.inference.TrainedModelConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class PutTrainedModelRequest implements Validatable, ToXContentObject { - - private final TrainedModelConfig config; - - public PutTrainedModelRequest(TrainedModelConfig config) { - this.config = config; - } - - public TrainedModelConfig getTrainedModelConfig() { - return config; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return config.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PutTrainedModelRequest request = (PutTrainedModelRequest) o; - return Objects.equals(config, request.config); - } - - @Override - public int hashCode() { - return Objects.hash(config); - } - - @Override - public final String toString() { - return Strings.toString(config); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelResponse.java deleted file mode 100644 index dabcc7d24cc0f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/PutTrainedModelResponse.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.inference.TrainedModelConfig; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class PutTrainedModelResponse implements ToXContentObject { - - private final TrainedModelConfig trainedModelConfig; - - public static PutTrainedModelResponse fromXContent(XContentParser parser) throws IOException { - return new PutTrainedModelResponse(TrainedModelConfig.PARSER.parse(parser, null).build()); - } - - public PutTrainedModelResponse(TrainedModelConfig trainedModelConfig) { - this.trainedModelConfig = trainedModelConfig; - } - - public TrainedModelConfig getResponse() { - return trainedModelConfig; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return trainedModelConfig.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PutTrainedModelResponse response = (PutTrainedModelResponse) o; - return Objects.equals(trainedModelConfig, response.trainedModelConfig); - } - - @Override - public int hashCode() { - return Objects.hash(trainedModelConfig); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java deleted file mode 100644 index 0295d72b7d9c5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotRequest.java +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to revert to a specific model snapshot for a given job - */ -public class RevertModelSnapshotRequest implements Validatable, ToXContentObject { - - public static final ParseField DELETE_INTERVENING = new ParseField("delete_intervening_results"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "revert_model_snapshots_request", - a -> new RevertModelSnapshotRequest((String) a[0], (String) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), ModelSnapshot.SNAPSHOT_ID); - PARSER.declareBoolean(RevertModelSnapshotRequest::setDeleteInterveningResults, DELETE_INTERVENING); - } - - private final String jobId; - private final String snapshotId; - private Boolean deleteInterveningResults; - - /** - * Constructs a request to revert to a given model snapshot - * @param jobId id of the job for which to revert the model snapshot - * @param snapshotId id of the snapshot to which to revert - */ - public RevertModelSnapshotRequest(String jobId, String snapshotId) { - this.jobId = Objects.requireNonNull(jobId, "[" + Job.ID + "] must not be null"); - this.snapshotId = Objects.requireNonNull(snapshotId, "[" + ModelSnapshot.SNAPSHOT_ID + "] must not be null"); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - public Boolean getDeleteInterveningResults() { - return deleteInterveningResults; - } - - /** - * Sets the request flag that indicates whether or not intervening results should be deleted. - * @param deleteInterveningResults Flag that indicates whether or not intervening results should be deleted. - */ - public void setDeleteInterveningResults(Boolean deleteInterveningResults) { - this.deleteInterveningResults = deleteInterveningResults; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(ModelSnapshot.SNAPSHOT_ID.getPreferredName(), snapshotId); - if (deleteInterveningResults != null) { - builder.field(DELETE_INTERVENING.getPreferredName(), deleteInterveningResults); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - RevertModelSnapshotRequest request = (RevertModelSnapshotRequest) obj; - return Objects.equals(jobId, request.jobId) - && Objects.equals(snapshotId, request.snapshotId) - && Objects.equals(deleteInterveningResults, request.deleteInterveningResults); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId, deleteInterveningResults); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotResponse.java deleted file mode 100644 index 6110569ac9197..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/RevertModelSnapshotResponse.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * A response containing the reverted model snapshot - */ -public class RevertModelSnapshotResponse implements ToXContentObject { - - private static final ParseField MODEL = new ParseField("model"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "revert_model_snapshot_response", - true, - a -> new RevertModelSnapshotResponse((ModelSnapshot.Builder) a[0]) - ); - - static { - PARSER.declareObject(ConstructingObjectParser.constructorArg(), ModelSnapshot.PARSER, MODEL); - } - - public static RevertModelSnapshotResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - public RevertModelSnapshotResponse(ModelSnapshot.Builder modelSnapshot) { - this.model = modelSnapshot.build(); - } - - private final ModelSnapshot model; - - /** - * Get full information about the reverted model snapshot - * @return the reverted model snapshot. - */ - public ModelSnapshot getModel() { - return model; - } - - @Override - public int hashCode() { - return Objects.hash(model); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - RevertModelSnapshotResponse other = (RevertModelSnapshotResponse) obj; - return Objects.equals(model, other.model); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (model != null) { - builder.field(MODEL.getPreferredName(), model); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java deleted file mode 100644 index 7e03117fd13d4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/SetUpgradeModeRequest.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ParseField; - -import java.util.Objects; - -/** - * Sets ML into upgrade_mode - */ -public class SetUpgradeModeRequest implements Validatable { - - public static final ParseField ENABLED = new ParseField("enabled"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - - private boolean enabled; - private TimeValue timeout; - - /** - * Create a new request - * - * @param enabled whether to enable `upgrade_mode` or not - */ - public SetUpgradeModeRequest(boolean enabled) { - this.enabled = enabled; - } - - public boolean isEnabled() { - return enabled; - } - - public void setEnabled(boolean enabled) { - this.enabled = enabled; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * How long to wait for the request to be completed - * - * @param timeout default value of 30 seconds - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - @Override - public int hashCode() { - return Objects.hash(enabled, timeout); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - SetUpgradeModeRequest that = (SetUpgradeModeRequest) other; - return Objects.equals(enabled, that.enabled) && Objects.equals(timeout, that.timeout); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsRequest.java deleted file mode 100644 index 0bb09846e7047..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; - -import java.util.Objects; -import java.util.Optional; - -public class StartDataFrameAnalyticsRequest implements Validatable { - - private final String id; - private TimeValue timeout; - - public StartDataFrameAnalyticsRequest(String id) { - this.id = id; - } - - public String getId() { - return id; - } - - public TimeValue getTimeout() { - return timeout; - } - - public StartDataFrameAnalyticsRequest setTimeout(@Nullable TimeValue timeout) { - this.timeout = timeout; - return this; - } - - @Override - public Optional validate() { - if (id == null) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - StartDataFrameAnalyticsRequest other = (StartDataFrameAnalyticsRequest) o; - return Objects.equals(id, other.id) && Objects.equals(timeout, other.timeout); - } - - @Override - public int hashCode() { - return Objects.hash(id, timeout); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsResponse.java deleted file mode 100644 index a158ad9eae705..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Datafeed is now started or not - */ -public class StartDataFrameAnalyticsResponse extends AcknowledgedResponse { - - private static final ParseField NODE = new ParseField("node"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "start_data_frame_analytics_response", - true, - (a) -> new StartDataFrameAnalyticsResponse((Boolean) a[0], (String) a[1]) - ); - - static { - declareAcknowledgedField(PARSER); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NODE); - } - - private final String node; - - public StartDataFrameAnalyticsResponse(boolean acknowledged, String node) { - super(acknowledged); - this.node = node; - } - - public static StartDataFrameAnalyticsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * The node that the job was assigned to - * - * @return The ID of a node if the job was assigned to a node. If an empty string is returned - * it means the job was allowed to open lazily and has not yet been assigned to a node. - * If null is returned it means the server version is too old to return node - * information. - */ - public String getNode() { - return node; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - StartDataFrameAnalyticsResponse that = (StartDataFrameAnalyticsResponse) other; - return isAcknowledged() == that.isAcknowledged() && Objects.equals(node, that.node); - } - - @Override - public int hashCode() { - return Objects.hash(isAcknowledged(), node); - } - - @Override - public void addCustomFields(XContentBuilder builder, Params params) throws IOException { - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java deleted file mode 100644 index 48a9d41192215..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedRequest.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Request to start a Datafeed - */ -public class StartDatafeedRequest implements Validatable, ToXContentObject { - - public static final ParseField START = new ParseField("start"); - public static final ParseField END = new ParseField("end"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "start_datafeed_request", - a -> new StartDatafeedRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID); - PARSER.declareString(StartDatafeedRequest::setStart, START); - PARSER.declareString(StartDatafeedRequest::setEnd, END); - PARSER.declareString((params, val) -> params.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - } - - private final String datafeedId; - private String start; - private String end; - private TimeValue timeout; - - /** - * Create a new StartDatafeedRequest for the given DatafeedId - * - * @param datafeedId non-null existing Datafeed ID - */ - public StartDatafeedRequest(String datafeedId) { - this.datafeedId = Objects.requireNonNull(datafeedId, "[datafeed_id] must not be null"); - } - - public String getDatafeedId() { - return datafeedId; - } - - public String getStart() { - return start; - } - - /** - * The time that the datafeed should begin. This value is inclusive. - * - * If you specify a start value that is earlier than the timestamp of the latest processed record, - * the datafeed continues from 1 millisecond after the timestamp of the latest processed record. - * - * If you do not specify a start time and the datafeed is associated with a new job, - * the analysis starts from the earliest time for which data is available. - * - * @param start String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string - */ - public void setStart(String start) { - this.start = start; - } - - public String getEnd() { - return end; - } - - /** - * The time that the datafeed should end. This value is exclusive. - * If you do not specify an end time, the datafeed runs continuously. - * - * @param end String representation of a timestamp; may be an epoch seconds, epoch millis or an ISO 8601 string - */ - public void setEnd(String end) { - this.end = end; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * Indicates how long to wait for the cluster to respond to the request. - * - * @param timeout TimeValue for how long to wait for a response from the cluster - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, start, end, timeout); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || obj.getClass() != getClass()) { - return false; - } - - StartDatafeedRequest other = (StartDatafeedRequest) obj; - return Objects.equals(datafeedId, other.datafeedId) - && Objects.equals(start, other.start) - && Objects.equals(end, other.end) - && Objects.equals(timeout, other.timeout); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (end != null) { - builder.field(END.getPreferredName(), end); - } - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedResponse.java deleted file mode 100644 index 25417797bb6ba..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StartDatafeedResponse.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Datafeed is now started or not - */ -public class StartDatafeedResponse implements ToXContentObject { - - private static final ParseField STARTED = new ParseField("started"); - private static final ParseField NODE = new ParseField("node"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "start_datafeed_response", - true, - (a) -> new StartDatafeedResponse((Boolean) a[0], (String) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), STARTED); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NODE); - } - - private final boolean started; - private final String node; - - public StartDatafeedResponse(boolean started, String node) { - this.started = started; - this.node = node; - } - - public static StartDatafeedResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Has the Datafeed started or not - * - * @return boolean value indicating the Datafeed started status - */ - public boolean isStarted() { - return started; - } - - /** - * The node that the datafeed was assigned to - * - * @return The ID of a node if the datafeed was assigned to a node. If an empty string is returned - * it means the datafeed was allowed to open lazily and has not yet been assigned to a node. - * If null is returned it means the server version is too old to return node - * information. - */ - public String getNode() { - return node; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - StartDatafeedResponse that = (StartDatafeedResponse) other; - return started == that.started && Objects.equals(node, that.node); - } - - @Override - public int hashCode() { - return Objects.hash(started, node); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(STARTED.getPreferredName(), started); - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsRequest.java deleted file mode 100644 index c8263bed50fac..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ParseField; - -import java.util.Objects; -import java.util.Optional; - -public class StopDataFrameAnalyticsRequest implements Validatable { - - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - public static final ParseField FORCE = new ParseField("force"); - - private final String id; - private Boolean allowNoMatch; - private Boolean force; - private TimeValue timeout; - - public StopDataFrameAnalyticsRequest(String id) { - this.id = id; - } - - public String getId() { - return id; - } - - public TimeValue getTimeout() { - return timeout; - } - - public StopDataFrameAnalyticsRequest setTimeout(@Nullable TimeValue timeout) { - this.timeout = timeout; - return this; - } - - public Boolean getAllowNoMatch() { - return allowNoMatch; - } - - public StopDataFrameAnalyticsRequest setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - return this; - } - - public Boolean getForce() { - return force; - } - - public StopDataFrameAnalyticsRequest setForce(boolean force) { - this.force = force; - return this; - } - - @Override - public Optional validate() { - if (id == null) { - return Optional.of(ValidationException.withError("data frame analytics id must not be null")); - } - return Optional.empty(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - StopDataFrameAnalyticsRequest other = (StopDataFrameAnalyticsRequest) o; - return Objects.equals(id, other.id) - && Objects.equals(timeout, other.timeout) - && Objects.equals(allowNoMatch, other.allowNoMatch) - && Objects.equals(force, other.force); - } - - @Override - public int hashCode() { - return Objects.hash(id, timeout, allowNoMatch, force); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsResponse.java deleted file mode 100644 index 9c4dc1d67be5c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDataFrameAnalyticsResponse.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Data Frame Analytics is now stopped or not - */ -public class StopDataFrameAnalyticsResponse implements ToXContentObject { - - private static final ParseField STOPPED = new ParseField("stopped"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "stop_data_frame_analytics_response", - true, - args -> new StopDataFrameAnalyticsResponse((Boolean) args[0]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), STOPPED); - } - - public static StopDataFrameAnalyticsResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final boolean stopped; - - public StopDataFrameAnalyticsResponse(boolean stopped) { - this.stopped = stopped; - } - - /** - * Has the Data Frame Analytics stopped or not - * - * @return boolean value indicating the Data Frame Analytics stopped status - */ - public boolean isStopped() { - return stopped; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - StopDataFrameAnalyticsResponse other = (StopDataFrameAnalyticsResponse) o; - return stopped == other.stopped; - } - - @Override - public int hashCode() { - return Objects.hash(stopped); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject().field(STOPPED.getPreferredName(), stopped).endObject(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java deleted file mode 100644 index 9c62b0a4d2bf4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedRequest.java +++ /dev/null @@ -1,182 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedConfig; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.security.InvalidParameterException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * Request to stop Machine Learning Datafeeds - */ -public class StopDatafeedRequest implements Validatable, ToXContentObject { - - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ParseField FORCE = new ParseField("force"); - public static final ParseField ALLOW_NO_MATCH = new ParseField("allow_no_match"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "stop_datafeed_request", - a -> new StopDatafeedRequest((List) a[0]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> Arrays.asList(Strings.commaDelimitedListToStringArray(p.text())), - DatafeedConfig.ID, - ObjectParser.ValueType.STRING_ARRAY - ); - PARSER.declareString((obj, val) -> obj.setTimeout(TimeValue.parseTimeValue(val, TIMEOUT.getPreferredName())), TIMEOUT); - PARSER.declareBoolean(StopDatafeedRequest::setForce, FORCE); - PARSER.declareBoolean(StopDatafeedRequest::setAllowNoMatch, ALLOW_NO_MATCH); - } - - private static final String ALL_DATAFEEDS = "_all"; - - private final List datafeedIds; - private TimeValue timeout; - private Boolean force; - private Boolean allowNoMatch; - - /** - * Explicitly stop all datafeeds - * - * @return a {@link StopDatafeedRequest} for all existing datafeeds - */ - public static StopDatafeedRequest stopAllDatafeedsRequest() { - return new StopDatafeedRequest(ALL_DATAFEEDS); - } - - StopDatafeedRequest(List datafeedIds) { - if (datafeedIds.isEmpty()) { - throw new InvalidParameterException("datafeedIds must not be empty"); - } - if (datafeedIds.stream().anyMatch(Objects::isNull)) { - throw new NullPointerException("datafeedIds must not contain null values"); - } - this.datafeedIds = new ArrayList<>(datafeedIds); - } - - /** - * Close the specified Datafeeds via their unique datafeedIds - * - * @param datafeedIds must be non-null and non-empty and each datafeedId must be non-null - */ - public StopDatafeedRequest(String... datafeedIds) { - this(Arrays.asList(datafeedIds)); - } - - /** - * All the datafeedIds to be stopped - */ - public List getDatafeedIds() { - return datafeedIds; - } - - public TimeValue getTimeout() { - return timeout; - } - - /** - * How long to wait for the stop request to complete before timing out. - * - * @param timeout Default value: 30 minutes - */ - public void setTimeout(TimeValue timeout) { - this.timeout = timeout; - } - - public Boolean getForce() { - return force; - } - - /** - * Should the stopping be forced. - * - * Use to forcefully stop a datafeed - * - * @param force When {@code true} forcefully stop the datafeed. Defaults to {@code false} - */ - public void setForce(boolean force) { - this.force = force; - } - - public Boolean getAllowNoMatch() { - return this.allowNoMatch; - } - - /** - * Whether to ignore if a wildcard expression matches no datafeeds. - * - * This includes {@code _all} string. - * - * @param allowNoMatch When {@code true} ignore if wildcard or {@code _all} matches no datafeeds. Defaults to {@code true} - */ - public void setAllowNoMatch(boolean allowNoMatch) { - this.allowNoMatch = allowNoMatch; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedIds, timeout, force, allowNoMatch); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - StopDatafeedRequest that = (StopDatafeedRequest) other; - return Objects.equals(datafeedIds, that.datafeedIds) - && Objects.equals(timeout, that.timeout) - && Objects.equals(force, that.force) - && Objects.equals(allowNoMatch, that.allowNoMatch); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), Strings.collectionToCommaDelimitedString(datafeedIds)); - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - if (force != null) { - builder.field(FORCE.getPreferredName(), force); - } - if (allowNoMatch != null) { - builder.field(ALLOW_NO_MATCH.getPreferredName(), allowNoMatch); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedResponse.java deleted file mode 100644 index 864b9ea6d4127..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/StopDatafeedResponse.java +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Response indicating if the Machine Learning Datafeed is now stopped or not - */ -public class StopDatafeedResponse implements ToXContentObject { - - private static final ParseField STOPPED = new ParseField("stopped"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "stop_datafeed_response", - true, - (a) -> new StopDatafeedResponse((Boolean) a[0]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), STOPPED); - } - - private final boolean stopped; - - public StopDatafeedResponse(boolean stopped) { - this.stopped = stopped; - } - - public static StopDatafeedResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - /** - * Has the Datafeed stopped or not - * - * @return boolean value indicating the Datafeed stopped status - */ - public boolean isStopped() { - return stopped; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - StopDatafeedResponse that = (StopDatafeedResponse) other; - return isStopped() == that.isStopped(); - } - - @Override - public int hashCode() { - return Objects.hash(isStopped()); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(STOPPED.getPreferredName(), stopped); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDataFrameAnalyticsRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDataFrameAnalyticsRequest.java deleted file mode 100644 index f14d4b75687bd..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDataFrameAnalyticsRequest.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; -import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfigUpdate; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; -import java.util.Optional; - -public class UpdateDataFrameAnalyticsRequest implements ToXContentObject, Validatable { - - private final DataFrameAnalyticsConfigUpdate update; - - public UpdateDataFrameAnalyticsRequest(DataFrameAnalyticsConfigUpdate update) { - this.update = update; - } - - public DataFrameAnalyticsConfigUpdate getUpdate() { - return update; - } - - @Override - public Optional validate() { - if (update == null) { - return Optional.of(ValidationException.withError("update requires a non-null data frame analytics config update")); - } - return Optional.empty(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return update.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - UpdateDataFrameAnalyticsRequest other = (UpdateDataFrameAnalyticsRequest) o; - return Objects.equals(update, other.update); - } - - @Override - public int hashCode() { - return Objects.hash(update); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java deleted file mode 100644 index d2de264c75411..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateDatafeedRequest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.datafeed.DatafeedUpdate; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Requests an update to a {@link org.elasticsearch.client.ml.datafeed.DatafeedConfig} with the passed {@link DatafeedUpdate} - * settings - */ -public class UpdateDatafeedRequest implements Validatable, ToXContentObject { - - private final DatafeedUpdate update; - - public UpdateDatafeedRequest(DatafeedUpdate update) { - this.update = update; - } - - public DatafeedUpdate getDatafeedUpdate() { - return update; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return update.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - - if (o == null || getClass() != o.getClass()) { - return false; - } - - UpdateDatafeedRequest that = (UpdateDatafeedRequest) o; - return Objects.equals(update, that.update); - } - - @Override - public int hashCode() { - return Objects.hash(update); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java deleted file mode 100644 index 3a76432715067..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateFilterRequest.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.MlFilter; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collection; -import java.util.Objects; -import java.util.SortedSet; -import java.util.TreeSet; - -/** - * Updates an existing {@link MlFilter} configuration - */ -public class UpdateFilterRequest implements Validatable, ToXContentObject { - - public static final ParseField ADD_ITEMS = new ParseField("add_items"); - public static final ParseField REMOVE_ITEMS = new ParseField("remove_items"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "update_filter_request", - (a) -> new UpdateFilterRequest((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), MlFilter.ID); - PARSER.declareStringOrNull(UpdateFilterRequest::setDescription, MlFilter.DESCRIPTION); - PARSER.declareStringArray(UpdateFilterRequest::setAddItems, ADD_ITEMS); - PARSER.declareStringArray(UpdateFilterRequest::setRemoveItems, REMOVE_ITEMS); - } - - private String filterId; - private String description; - private SortedSet addItems; - private SortedSet removeItems; - - /** - * Construct a new request referencing a non-null, existing filter_id - * @param filterId Id referencing the filter to update - */ - public UpdateFilterRequest(String filterId) { - this.filterId = Objects.requireNonNull(filterId, "[" + MlFilter.ID.getPreferredName() + "] must not be null"); - } - - public String getFilterId() { - return filterId; - } - - public String getDescription() { - return description; - } - - /** - * The new description of the filter - * @param description the updated filter description - */ - public void setDescription(String description) { - this.description = description; - } - - public SortedSet getAddItems() { - return addItems; - } - - /** - * The collection of items to add to the filter - * @param addItems non-null items to add to the filter, defaults to empty array - */ - public void setAddItems(Collection addItems) { - this.addItems = new TreeSet<>(Objects.requireNonNull(addItems, "[" + ADD_ITEMS.getPreferredName() + "] must not be null")); - } - - public SortedSet getRemoveItems() { - return removeItems; - } - - /** - * The collection of items to remove from the filter - * @param removeItems non-null items to remove from the filter, defaults to empty array - */ - public void setRemoveItems(Collection removeItems) { - this.removeItems = new TreeSet<>(Objects.requireNonNull(removeItems, "[" + REMOVE_ITEMS.getPreferredName() + "] must not be null")); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(MlFilter.ID.getPreferredName(), filterId); - if (description != null) { - builder.field(MlFilter.DESCRIPTION.getPreferredName(), description); - } - if (addItems != null) { - builder.stringListField(ADD_ITEMS.getPreferredName(), addItems); - } - if (removeItems != null) { - builder.stringListField(REMOVE_ITEMS.getPreferredName(), removeItems); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(filterId, description, addItems, removeItems); - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - UpdateFilterRequest other = (UpdateFilterRequest) obj; - return Objects.equals(filterId, other.filterId) - && Objects.equals(description, other.description) - && Objects.equals(addItems, other.addItems) - && Objects.equals(removeItems, other.removeItems); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java deleted file mode 100644 index aa46d5677c77d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateJobRequest.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.JobUpdate; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Updates a {@link org.elasticsearch.client.ml.job.config.Job} with the passed {@link JobUpdate} - * settings - */ -public class UpdateJobRequest implements Validatable, ToXContentObject { - - private final JobUpdate update; - - public UpdateJobRequest(JobUpdate update) { - this.update = update; - } - - public JobUpdate getJobUpdate() { - return update; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return update.toXContent(builder, params); - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - - if (o == null || getClass() != o.getClass()) { - return false; - } - - UpdateJobRequest that = (UpdateJobRequest) o; - return Objects.equals(update, that.update); - } - - @Override - public int hashCode() { - return Objects.hash(update); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java deleted file mode 100644 index a06880369f6fa..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotRequest.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A request to update information about an existing model snapshot for a given job - */ -public class UpdateModelSnapshotRequest implements Validatable, ToXContentObject { - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "update_model_snapshot_request", - a -> new UpdateModelSnapshotRequest((String) a[0], (String) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), ModelSnapshot.SNAPSHOT_ID); - PARSER.declareStringOrNull(UpdateModelSnapshotRequest::setDescription, ModelSnapshot.DESCRIPTION); - PARSER.declareBoolean(UpdateModelSnapshotRequest::setRetain, ModelSnapshot.RETAIN); - } - - private final String jobId; - private String snapshotId; - private String description; - private Boolean retain; - - /** - * Constructs a request to update information for a snapshot of given job - * @param jobId id of the job from which to retrieve results - * @param snapshotId id of the snapshot from which to retrieve results - */ - public UpdateModelSnapshotRequest(String jobId, String snapshotId) { - this.jobId = Objects.requireNonNull(jobId, "[" + Job.ID + "] must not be null"); - this.snapshotId = Objects.requireNonNull(snapshotId, "[" + ModelSnapshot.SNAPSHOT_ID + "] must not be null"); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - public String getDescription() { - return description; - } - - /** - * The new description of the snapshot. - * @param description the updated snapshot description - */ - public void setDescription(String description) { - this.description = description; - } - - public Boolean getRetain() { - return retain; - } - - /** - * The new value of the "retain" property of the snapshot - * @param retain the updated retain property - */ - public void setRetain(boolean retain) { - this.retain = retain; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(ModelSnapshot.SNAPSHOT_ID.getPreferredName(), snapshotId); - if (description != null) { - builder.field(ModelSnapshot.DESCRIPTION.getPreferredName(), description); - } - if (retain != null) { - builder.field(ModelSnapshot.RETAIN.getPreferredName(), retain); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - UpdateModelSnapshotRequest request = (UpdateModelSnapshotRequest) obj; - return Objects.equals(jobId, request.jobId) - && Objects.equals(snapshotId, request.snapshotId) - && Objects.equals(description, request.description) - && Objects.equals(retain, request.retain); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId, description, retain); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotResponse.java deleted file mode 100644 index 82b73fe260aa9..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpdateModelSnapshotResponse.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.ml.job.process.ModelSnapshot; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * A response acknowledging the update of information for an existing model snapshot for a given job - */ -public class UpdateModelSnapshotResponse implements ToXContentObject { - - private static final ParseField ACKNOWLEDGED = new ParseField("acknowledged"); - private static final ParseField MODEL = new ParseField("model"); - - public UpdateModelSnapshotResponse(boolean acknowledged, ModelSnapshot.Builder modelSnapshot) { - this.acknowledged = acknowledged; - this.model = modelSnapshot.build(); - } - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "update_model_snapshot_response", - true, - a -> new UpdateModelSnapshotResponse((Boolean) a[0], ((ModelSnapshot.Builder) a[1])) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ACKNOWLEDGED); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), ModelSnapshot.PARSER, MODEL); - } - - public static UpdateModelSnapshotResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final Boolean acknowledged; - private final ModelSnapshot model; - - /** - * Get the action acknowledgement - * @return a {@code boolean} that indicates whether the model snapshot was updated successfully. - */ - public Boolean getAcknowledged() { - return acknowledged; - } - - /** - * Get the updated snapshot of the model - * @return the updated model snapshot. - */ - public ModelSnapshot getModel() { - return model; - } - - @Override - public int hashCode() { - return Objects.hash(acknowledged, model); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (acknowledged != null) { - builder.field(ACKNOWLEDGED.getPreferredName(), acknowledged); - } - if (model != null) { - builder.field(MODEL.getPreferredName(), model); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - UpdateModelSnapshotResponse request = (UpdateModelSnapshotResponse) obj; - return Objects.equals(acknowledged, request.acknowledged) && Objects.equals(model, request.model); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotRequest.java deleted file mode 100644 index cc1660ed4dc6b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotRequest.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class UpgradeJobModelSnapshotRequest implements Validatable, ToXContentObject { - - public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id"); - public static final ParseField TIMEOUT = new ParseField("timeout"); - public static final ParseField WAIT_FOR_COMPLETION = new ParseField("wait_for_completion"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "upgrade_job_snapshot_request", - true, - a -> new UpgradeJobModelSnapshotRequest((String) a[0], (String) a[1], (String) a[2], (Boolean) a[3]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), SNAPSHOT_ID); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TIMEOUT); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), WAIT_FOR_COMPLETION); - } - - private final String jobId; - private final String snapshotId; - private final TimeValue timeout; - private final Boolean waitForCompletion; - - UpgradeJobModelSnapshotRequest(String jobId, String snapshotId, String timeout, Boolean waitForCompletion) { - this(jobId, snapshotId, timeout == null ? null : TimeValue.parseTimeValue(timeout, TIMEOUT.getPreferredName()), waitForCompletion); - } - - public UpgradeJobModelSnapshotRequest(String jobId, String snapshotId, TimeValue timeValue, Boolean waitForCompletion) { - this.jobId = Objects.requireNonNull(jobId, Job.ID.getPreferredName()); - this.snapshotId = Objects.requireNonNull(snapshotId, SNAPSHOT_ID.getPreferredName()); - this.timeout = timeValue; - this.waitForCompletion = waitForCompletion; - } - - public static UpgradeJobModelSnapshotRequest fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public String getJobId() { - return jobId; - } - - public String getSnapshotId() { - return snapshotId; - } - - public TimeValue getTimeout() { - return timeout; - } - - public Boolean getWaitForCompletion() { - return waitForCompletion; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - UpgradeJobModelSnapshotRequest request = (UpgradeJobModelSnapshotRequest) o; - return Objects.equals(jobId, request.jobId) - && Objects.equals(timeout, request.timeout) - && Objects.equals(waitForCompletion, request.waitForCompletion) - && Objects.equals(snapshotId, request.snapshotId); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, snapshotId, timeout, waitForCompletion); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId); - if (timeout != null) { - builder.field(TIMEOUT.getPreferredName(), timeout.getStringRep()); - } - if (waitForCompletion != null) { - builder.field(WAIT_FOR_COMPLETION.getPreferredName(), waitForCompletion); - } - builder.endObject(); - return builder; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotResponse.java deleted file mode 100644 index b260bbaa5d22b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/UpgradeJobModelSnapshotResponse.java +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class UpgradeJobModelSnapshotResponse implements ToXContentObject { - - private static final ParseField COMPLETED = new ParseField("completed"); - private static final ParseField NODE = new ParseField("node"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "upgrade_job_snapshot_response", - true, - (a) -> new UpgradeJobModelSnapshotResponse((Boolean) a[0], (String) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), COMPLETED); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), NODE); - } - - private final boolean completed; - private final String node; - - public UpgradeJobModelSnapshotResponse(Boolean opened, String node) { - this.completed = opened != null && opened; - this.node = node; - } - - public static UpgradeJobModelSnapshotResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - public boolean isCompleted() { - return completed; - } - - /** - * The node that the job was assigned to - * - * @return The ID of a node if the job was assigned to a node. - */ - public String getNode() { - return node; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - UpgradeJobModelSnapshotResponse that = (UpgradeJobModelSnapshotResponse) other; - return completed == that.completed && Objects.equals(node, that.node); - } - - @Override - public int hashCode() { - return Objects.hash(completed, node); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(COMPLETED.getPreferredName(), completed); - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java deleted file mode 100644 index da7c0ac2cac52..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/Calendar.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.calendars; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * A simple calendar object for scheduled (special) events. - * The calendar consists of a name an a list of job Ids or job groups - * the events are stored separately and reference the calendar. - */ -public class Calendar implements ToXContentObject { - - public static final String CALENDAR_TYPE = "calendar"; - - public static final ParseField JOB_IDS = new ParseField("job_ids"); - public static final ParseField ID = new ParseField("calendar_id"); - public static final ParseField DESCRIPTION = new ParseField("description"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - CALENDAR_TYPE, - true, - a -> new Calendar((String) a[0], (List) a[1], (String) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), ID); - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), JOB_IDS); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), DESCRIPTION); - } - - private final String id; - private final List jobIds; - private final String description; - - /** - * {@code jobIds} can be a mix of job groups and job Ids - * @param id The calendar Id - * @param jobIds List of job Ids or job groups - * @param description An optional description - */ - public Calendar(String id, List jobIds, @Nullable String description) { - this.id = Objects.requireNonNull(id, ID.getPreferredName() + " must not be null"); - this.jobIds = Collections.unmodifiableList(Objects.requireNonNull(jobIds, JOB_IDS.getPreferredName() + " must not be null")); - this.description = description; - } - - public String getId() { - return id; - } - - public List getJobIds() { - return jobIds; - } - - @Nullable - public String getDescription() { - return description; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ID.getPreferredName(), id); - builder.stringListField(JOB_IDS.getPreferredName(), jobIds); - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - Calendar other = (Calendar) obj; - return id.equals(other.id) && jobIds.equals(other.jobIds) && Objects.equals(description, other.description); - } - - @Override - public int hashCode() { - return Objects.hash(id, jobIds, description); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/ScheduledEvent.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/ScheduledEvent.java deleted file mode 100644 index 8aecc33d32b1a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/calendars/ScheduledEvent.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.calendars; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -public class ScheduledEvent implements ToXContentObject { - - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField START_TIME = new ParseField("start_time"); - public static final ParseField END_TIME = new ParseField("end_time"); - public static final ParseField EVENT_ID = new ParseField("event_id"); - public static final String SCHEDULED_EVENT_TYPE = "scheduled_event"; - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - SCHEDULED_EVENT_TYPE, - true, - a -> new ScheduledEvent((String) a[0], (Date) a[1], (Date) a[2], (String) a[3], (String) a[4]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DESCRIPTION); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, START_TIME.getPreferredName()), - START_TIME, - ObjectParser.ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, END_TIME.getPreferredName()), - END_TIME, - ObjectParser.ValueType.VALUE - ); - PARSER.declareString(ConstructingObjectParser.constructorArg(), Calendar.ID); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), EVENT_ID); - } - - private final String description; - private final Date startTime; - private final Date endTime; - private final String calendarId; - private final String eventId; - - ScheduledEvent(String description, Date startTime, Date endTime, String calendarId, @Nullable String eventId) { - this.description = Objects.requireNonNull(description); - this.startTime = Objects.requireNonNull(startTime); - this.endTime = Objects.requireNonNull(endTime); - this.calendarId = Objects.requireNonNull(calendarId); - this.eventId = eventId; - } - - public String getDescription() { - return description; - } - - public Date getStartTime() { - return startTime; - } - - public Date getEndTime() { - return endTime; - } - - public String getCalendarId() { - return calendarId; - } - - public String getEventId() { - return eventId; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DESCRIPTION.getPreferredName(), description); - builder.timeField(START_TIME.getPreferredName(), START_TIME.getPreferredName() + "_string", startTime.getTime()); - builder.timeField(END_TIME.getPreferredName(), END_TIME.getPreferredName() + "_string", endTime.getTime()); - builder.field(Calendar.ID.getPreferredName(), calendarId); - if (eventId != null) { - builder.field(EVENT_ID.getPreferredName(), eventId); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - ScheduledEvent other = (ScheduledEvent) obj; - return Objects.equals(this.description, other.description) - && Objects.equals(this.startTime, other.startTime) - && Objects.equals(this.endTime, other.endTime) - && Objects.equals(this.calendarId, other.calendarId); - } - - @Override - public int hashCode() { - return Objects.hash(description, startTime, endTime, calendarId); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/ChunkingConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/ChunkingConfig.java deleted file mode 100644 index 5f23f2478c070..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/ChunkingConfig.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -/** - * The description of how searches should be chunked. - */ -public class ChunkingConfig implements ToXContentObject { - - public static final ParseField MODE_FIELD = new ParseField("mode"); - public static final ParseField TIME_SPAN_FIELD = new ParseField("time_span"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "chunking_config", - true, - a -> new ChunkingConfig((Mode) a[0], (TimeValue) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Mode::fromString, MODE_FIELD); - PARSER.declareString( - ConstructingObjectParser.optionalConstructorArg(), - text -> TimeValue.parseTimeValue(text, TIME_SPAN_FIELD.getPreferredName()), - TIME_SPAN_FIELD - ); - - } - - private final Mode mode; - private final TimeValue timeSpan; - - ChunkingConfig(Mode mode, @Nullable TimeValue timeSpan) { - this.mode = Objects.requireNonNull(mode, MODE_FIELD.getPreferredName()); - this.timeSpan = timeSpan; - } - - @Nullable - public TimeValue getTimeSpan() { - return timeSpan; - } - - Mode getMode() { - return mode; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(MODE_FIELD.getPreferredName(), mode); - if (timeSpan != null) { - builder.field(TIME_SPAN_FIELD.getPreferredName(), timeSpan.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(mode, timeSpan); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - ChunkingConfig other = (ChunkingConfig) obj; - return Objects.equals(this.mode, other.mode) && Objects.equals(this.timeSpan, other.timeSpan); - } - - public static ChunkingConfig newAuto() { - return new ChunkingConfig(Mode.AUTO, null); - } - - public static ChunkingConfig newOff() { - return new ChunkingConfig(Mode.OFF, null); - } - - public static ChunkingConfig newManual(TimeValue timeSpan) { - return new ChunkingConfig(Mode.MANUAL, timeSpan); - } - - public enum Mode { - AUTO, - MANUAL, - OFF; - - public static Mode fromString(String value) { - return Mode.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java deleted file mode 100644 index e1363239f4e44..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedConfig.java +++ /dev/null @@ -1,491 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * The datafeed configuration object. It specifies which indices - * to get the data from and offers parameters for customizing different - * aspects of the process. - */ -public class DatafeedConfig implements ToXContentObject { - - public static final ParseField ID = new ParseField("datafeed_id"); - public static final ParseField QUERY_DELAY = new ParseField("query_delay"); - public static final ParseField FREQUENCY = new ParseField("frequency"); - public static final ParseField INDEXES = new ParseField("indexes"); - public static final ParseField INDICES = new ParseField("indices"); - public static final ParseField QUERY = new ParseField("query"); - public static final ParseField SCROLL_SIZE = new ParseField("scroll_size"); - public static final ParseField AGGREGATIONS = new ParseField("aggregations"); - public static final ParseField SCRIPT_FIELDS = new ParseField("script_fields"); - public static final ParseField CHUNKING_CONFIG = new ParseField("chunking_config"); - public static final ParseField DELAYED_DATA_CHECK_CONFIG = new ParseField("delayed_data_check_config"); - public static final ParseField MAX_EMPTY_SEARCHES = new ParseField("max_empty_searches"); - public static final ParseField INDICES_OPTIONS = new ParseField("indices_options"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "datafeed_config", - true, - a -> new Builder((String) a[0], (String) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - - PARSER.declareStringArray(Builder::setIndices, INDEXES); - PARSER.declareStringArray(Builder::setIndices, INDICES); - PARSER.declareString( - (builder, val) -> builder.setQueryDelay(TimeValue.parseTimeValue(val, QUERY_DELAY.getPreferredName())), - QUERY_DELAY - ); - PARSER.declareString( - (builder, val) -> builder.setFrequency(TimeValue.parseTimeValue(val, FREQUENCY.getPreferredName())), - FREQUENCY - ); - PARSER.declareField(Builder::setQuery, DatafeedConfig::parseBytes, QUERY, ObjectParser.ValueType.OBJECT); - PARSER.declareField(Builder::setAggregations, DatafeedConfig::parseBytes, AGGREGATIONS, ObjectParser.ValueType.OBJECT); - PARSER.declareObject(Builder::setScriptFields, (p, c) -> { - List parsedScriptFields = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_OBJECT) { - parsedScriptFields.add(new SearchSourceBuilder.ScriptField(p)); - } - return parsedScriptFields; - }, SCRIPT_FIELDS); - PARSER.declareInt(Builder::setScrollSize, SCROLL_SIZE); - PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, CHUNKING_CONFIG); - PARSER.declareObject(Builder::setDelayedDataCheckConfig, DelayedDataCheckConfig.PARSER, DELAYED_DATA_CHECK_CONFIG); - PARSER.declareInt(Builder::setMaxEmptySearches, MAX_EMPTY_SEARCHES); - PARSER.declareObject( - Builder::setIndicesOptions, - (p, c) -> IndicesOptions.fromMap(p.map(), new IndicesOptions(IndicesOptions.Option.NONE, IndicesOptions.WildcardStates.NONE)), - INDICES_OPTIONS - ); - PARSER.declareObject(Builder::setRuntimeMappings, (p, c) -> p.map(), SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD); - } - - private static BytesReference parseBytes(XContentParser parser) throws IOException { - XContentBuilder contentBuilder = JsonXContent.contentBuilder(); - contentBuilder.generator().copyCurrentStructure(parser); - return BytesReference.bytes(contentBuilder); - } - - private final String id; - private final String jobId; - private final TimeValue queryDelay; - private final TimeValue frequency; - private final List indices; - private final BytesReference query; - private final BytesReference aggregations; - private final List scriptFields; - private final Integer scrollSize; - private final ChunkingConfig chunkingConfig; - private final DelayedDataCheckConfig delayedDataCheckConfig; - private final Integer maxEmptySearches; - private final IndicesOptions indicesOptions; - private final Map runtimeMappings; - - private DatafeedConfig( - String id, - String jobId, - TimeValue queryDelay, - TimeValue frequency, - List indices, - BytesReference query, - BytesReference aggregations, - List scriptFields, - Integer scrollSize, - ChunkingConfig chunkingConfig, - DelayedDataCheckConfig delayedDataCheckConfig, - Integer maxEmptySearches, - IndicesOptions indicesOptions, - Map runtimeMappings - ) { - this.id = id; - this.jobId = jobId; - this.queryDelay = queryDelay; - this.frequency = frequency; - this.indices = indices == null ? null : Collections.unmodifiableList(indices); - this.query = query; - this.aggregations = aggregations; - this.scriptFields = scriptFields == null ? null : Collections.unmodifiableList(scriptFields); - this.scrollSize = scrollSize; - this.chunkingConfig = chunkingConfig; - this.delayedDataCheckConfig = delayedDataCheckConfig; - this.maxEmptySearches = maxEmptySearches; - this.indicesOptions = indicesOptions; - this.runtimeMappings = Collections.unmodifiableMap(runtimeMappings); - } - - public String getId() { - return id; - } - - public String getJobId() { - return jobId; - } - - public TimeValue getQueryDelay() { - return queryDelay; - } - - public TimeValue getFrequency() { - return frequency; - } - - public List getIndices() { - return indices; - } - - public Integer getScrollSize() { - return scrollSize; - } - - public BytesReference getQuery() { - return query; - } - - public BytesReference getAggregations() { - return aggregations; - } - - public List getScriptFields() { - return scriptFields == null ? Collections.emptyList() : scriptFields; - } - - public ChunkingConfig getChunkingConfig() { - return chunkingConfig; - } - - public DelayedDataCheckConfig getDelayedDataCheckConfig() { - return delayedDataCheckConfig; - } - - public Integer getMaxEmptySearches() { - return maxEmptySearches; - } - - public IndicesOptions getIndicesOptions() { - return indicesOptions; - } - - public Map getRuntimeMappings() { - return runtimeMappings; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ID.getPreferredName(), id); - builder.field(Job.ID.getPreferredName(), jobId); - if (queryDelay != null) { - builder.field(QUERY_DELAY.getPreferredName(), queryDelay.getStringRep()); - } - if (frequency != null) { - builder.field(FREQUENCY.getPreferredName(), frequency.getStringRep()); - } - if (indices != null) { - builder.field(INDICES.getPreferredName(), indices); - } - if (query != null) { - builder.field(QUERY.getPreferredName(), asMap(query)); - } - if (aggregations != null) { - builder.field(AGGREGATIONS.getPreferredName(), asMap(aggregations)); - } - if (scriptFields != null) { - builder.startObject(SCRIPT_FIELDS.getPreferredName()); - for (SearchSourceBuilder.ScriptField scriptField : scriptFields) { - scriptField.toXContent(builder, params); - } - builder.endObject(); - } - if (scrollSize != null) { - builder.field(SCROLL_SIZE.getPreferredName(), scrollSize); - } - if (chunkingConfig != null) { - builder.field(CHUNKING_CONFIG.getPreferredName(), chunkingConfig); - } - if (delayedDataCheckConfig != null) { - builder.field(DELAYED_DATA_CHECK_CONFIG.getPreferredName(), delayedDataCheckConfig); - } - if (maxEmptySearches != null) { - builder.field(MAX_EMPTY_SEARCHES.getPreferredName(), maxEmptySearches); - } - if (indicesOptions != null) { - builder.startObject(INDICES_OPTIONS.getPreferredName()); - indicesOptions.toXContent(builder, params); - builder.endObject(); - } - if (runtimeMappings.isEmpty() == false) { - builder.field(SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD.getPreferredName(), runtimeMappings); - } - - builder.endObject(); - return builder; - } - - private static Map asMap(BytesReference bytesReference) { - return bytesReference == null ? null : XContentHelper.convertToMap(bytesReference, true, XContentType.JSON).v2(); - } - - /** - * The lists of indices and types are compared for equality but they are not - * sorted first so this test could fail simply because the indices and types - * lists are in different orders. - * - * Also note this could be a heavy operation when a query or aggregations - * are set as we need to convert the bytes references into maps to correctly - * compare them. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DatafeedConfig that = (DatafeedConfig) other; - - return Objects.equals(this.id, that.id) - && Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.frequency, that.frequency) - && Objects.equals(this.queryDelay, that.queryDelay) - && Objects.equals(this.indices, that.indices) - && Objects.equals(asMap(this.query), asMap(that.query)) - && Objects.equals(this.scrollSize, that.scrollSize) - && Objects.equals(asMap(this.aggregations), asMap(that.aggregations)) - && Objects.equals(this.scriptFields, that.scriptFields) - && Objects.equals(this.chunkingConfig, that.chunkingConfig) - && Objects.equals(this.delayedDataCheckConfig, that.delayedDataCheckConfig) - && Objects.equals(this.maxEmptySearches, that.maxEmptySearches) - && Objects.equals(this.indicesOptions, that.indicesOptions) - && Objects.equals(this.runtimeMappings, that.runtimeMappings); - } - - /** - * Note this could be a heavy operation when a query or aggregations - * are set as we need to convert the bytes references into maps to - * compute a stable hash code. - */ - @Override - public int hashCode() { - return Objects.hash( - id, - jobId, - frequency, - queryDelay, - indices, - asMap(query), - scrollSize, - asMap(aggregations), - scriptFields, - chunkingConfig, - delayedDataCheckConfig, - maxEmptySearches, - indicesOptions, - runtimeMappings - ); - } - - public static Builder builder(String id, String jobId) { - return new Builder(id, jobId); - } - - public static class Builder { - - private final String id; - private final String jobId; - private TimeValue queryDelay; - private TimeValue frequency; - private List indices; - private BytesReference query; - private BytesReference aggregations; - private List scriptFields; - private Integer scrollSize; - private ChunkingConfig chunkingConfig; - private DelayedDataCheckConfig delayedDataCheckConfig; - private Integer maxEmptySearches; - private IndicesOptions indicesOptions; - private Map runtimeMappings = Collections.emptyMap(); - - public Builder(String id, String jobId) { - this.id = Objects.requireNonNull(id, ID.getPreferredName()); - this.jobId = Objects.requireNonNull(jobId, Job.ID.getPreferredName()); - } - - public Builder(DatafeedConfig config) { - this.id = config.id; - this.jobId = config.jobId; - this.queryDelay = config.queryDelay; - this.frequency = config.frequency; - this.indices = config.indices == null ? null : new ArrayList<>(config.indices); - this.query = config.query; - this.aggregations = config.aggregations; - this.scriptFields = config.scriptFields == null ? null : new ArrayList<>(config.scriptFields); - this.scrollSize = config.scrollSize; - this.chunkingConfig = config.chunkingConfig; - this.delayedDataCheckConfig = config.getDelayedDataCheckConfig(); - this.maxEmptySearches = config.getMaxEmptySearches(); - this.indicesOptions = config.indicesOptions; - this.runtimeMappings = new HashMap<>(config.runtimeMappings); - } - - public Builder setIndices(List indices) { - this.indices = Objects.requireNonNull(indices, INDICES.getPreferredName()); - return this; - } - - public Builder setIndices(String... indices) { - return setIndices(Arrays.asList(indices)); - } - - public Builder setQueryDelay(TimeValue queryDelay) { - this.queryDelay = queryDelay; - return this; - } - - public Builder setFrequency(TimeValue frequency) { - this.frequency = frequency; - return this; - } - - private Builder setQuery(BytesReference query) { - this.query = query; - return this; - } - - public Builder setQuery(String queryAsJson) { - this.query = queryAsJson == null ? null : new BytesArray(queryAsJson); - return this; - } - - public Builder setQuery(QueryBuilder query) throws IOException { - this.query = query == null ? null : xContentToBytes(query); - return this; - } - - private Builder setAggregations(BytesReference aggregations) { - this.aggregations = aggregations; - return this; - } - - public Builder setAggregations(String aggsAsJson) { - this.aggregations = aggsAsJson == null ? null : new BytesArray(aggsAsJson); - return this; - } - - public Builder setAggregations(AggregatorFactories.Builder aggregations) throws IOException { - this.aggregations = aggregations == null ? null : xContentToBytes(aggregations); - return this; - } - - public Builder setScriptFields(List scriptFields) { - List sorted = new ArrayList<>(scriptFields); - sorted.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName)); - this.scriptFields = sorted; - return this; - } - - public Builder setScrollSize(int scrollSize) { - this.scrollSize = scrollSize; - return this; - } - - public Builder setChunkingConfig(ChunkingConfig chunkingConfig) { - this.chunkingConfig = chunkingConfig; - return this; - } - - /** - * This sets the {@link DelayedDataCheckConfig} settings. - * - * See {@link DelayedDataCheckConfig} for more information. - * - * @param delayedDataCheckConfig the delayed data check configuration - * Default value is enabled, with `check_window` being null. This means the true window is - * calculated when the real-time Datafeed runs. - */ - public Builder setDelayedDataCheckConfig(DelayedDataCheckConfig delayedDataCheckConfig) { - this.delayedDataCheckConfig = delayedDataCheckConfig; - return this; - } - - public Builder setMaxEmptySearches(int maxEmptySearches) { - this.maxEmptySearches = maxEmptySearches; - return this; - } - - public Builder setIndicesOptions(IndicesOptions indicesOptions) { - this.indicesOptions = indicesOptions; - return this; - } - - public Builder setRuntimeMappings(Map runtimeMappings) { - this.runtimeMappings = Objects.requireNonNull(runtimeMappings, SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD.getPreferredName()); - return this; - } - - public DatafeedConfig build() { - return new DatafeedConfig( - id, - jobId, - queryDelay, - frequency, - indices, - query, - aggregations, - scriptFields, - scrollSize, - chunkingConfig, - delayedDataCheckConfig, - maxEmptySearches, - indicesOptions, - runtimeMappings - ); - } - - private static BytesReference xContentToBytes(ToXContentObject object) throws IOException { - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - object.toXContent(builder, ToXContentObject.EMPTY_PARAMS); - return BytesReference.bytes(builder); - } - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedState.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedState.java deleted file mode 100644 index 4d309c31ab375..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedState.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.xcontent.ParseField; - -import java.util.Locale; - -/** - * Datafeed State POJO - */ -public enum DatafeedState { - - STARTED, - STOPPED, - STARTING, - STOPPING; - - public static final ParseField STATE = new ParseField("state"); - - public static DatafeedState fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedStats.java deleted file mode 100644 index b218f749a10f3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedStats.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.client.ml.NodeAttributes; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - -/** - * Datafeed Statistics POJO - */ -public class DatafeedStats implements ToXContentObject { - - private final String datafeedId; - private final DatafeedState datafeedState; - @Nullable - private final NodeAttributes node; - @Nullable - private final String assignmentExplanation; - @Nullable - private final DatafeedTimingStats timingStats; - - public static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation"); - public static final ParseField NODE = new ParseField("node"); - public static final ParseField TIMING_STATS = new ParseField("timing_stats"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("datafeed_stats", true, a -> { - String datafeedId = (String) a[0]; - DatafeedState datafeedState = DatafeedState.fromString((String) a[1]); - NodeAttributes nodeAttributes = (NodeAttributes) a[2]; - String assignmentExplanation = (String) a[3]; - DatafeedTimingStats timingStats = (DatafeedTimingStats) a[4]; - return new DatafeedStats(datafeedId, datafeedState, nodeAttributes, assignmentExplanation, timingStats); - }); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedState.STATE); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), NodeAttributes.PARSER, NODE); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ASSIGNMENT_EXPLANATION); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), DatafeedTimingStats.PARSER, TIMING_STATS); - } - - public DatafeedStats( - String datafeedId, - DatafeedState datafeedState, - @Nullable NodeAttributes node, - @Nullable String assignmentExplanation, - @Nullable DatafeedTimingStats timingStats - ) { - this.datafeedId = Objects.requireNonNull(datafeedId); - this.datafeedState = Objects.requireNonNull(datafeedState); - this.node = node; - this.assignmentExplanation = assignmentExplanation; - this.timingStats = timingStats; - } - - public String getDatafeedId() { - return datafeedId; - } - - public DatafeedState getDatafeedState() { - return datafeedState; - } - - public NodeAttributes getNode() { - return node; - } - - public String getAssignmentExplanation() { - return assignmentExplanation; - } - - public DatafeedTimingStats getDatafeedTimingStats() { - return timingStats; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), datafeedId); - builder.field(DatafeedState.STATE.getPreferredName(), datafeedState.toString()); - if (node != null) { - builder.startObject("node"); - builder.field("id", node.getId()); - builder.field("name", node.getName()); - builder.field("ephemeral_id", node.getEphemeralId()); - builder.field("transport_address", node.getTransportAddress()); - - builder.startObject("attributes"); - for (Map.Entry entry : node.getAttributes().entrySet()) { - if (entry.getKey().startsWith("ml.")) { - builder.field(entry.getKey(), entry.getValue()); - } - } - builder.endObject(); - builder.endObject(); - } - if (assignmentExplanation != null) { - builder.field(ASSIGNMENT_EXPLANATION.getPreferredName(), assignmentExplanation); - } - if (timingStats != null) { - builder.field(TIMING_STATS.getPreferredName(), timingStats); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(datafeedId, datafeedState.toString(), node, assignmentExplanation, timingStats); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - DatafeedStats other = (DatafeedStats) obj; - return Objects.equals(datafeedId, other.datafeedId) - && Objects.equals(this.datafeedState, other.datafeedState) - && Objects.equals(this.node, other.node) - && Objects.equals(this.assignmentExplanation, other.assignmentExplanation) - && Objects.equals(this.timingStats, other.timingStats); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedTimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedTimingStats.java deleted file mode 100644 index 4aa464228f0e8..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedTimingStats.java +++ /dev/null @@ -1,166 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class DatafeedTimingStats implements ToXContentObject { - - public static final ParseField JOB_ID = new ParseField("job_id"); - public static final ParseField SEARCH_COUNT = new ParseField("search_count"); - public static final ParseField BUCKET_COUNT = new ParseField("bucket_count"); - public static final ParseField TOTAL_SEARCH_TIME_MS = new ParseField("total_search_time_ms"); - public static final ParseField AVG_SEARCH_TIME_PER_BUCKET_MS = new ParseField("average_search_time_per_bucket_ms"); - public static final ParseField EXPONENTIAL_AVG_SEARCH_TIME_PER_HOUR_MS = new ParseField("exponential_average_search_time_per_hour_ms"); - - public static final ParseField TYPE = new ParseField("datafeed_timing_stats"); - - public static final ConstructingObjectParser PARSER = createParser(); - - @SuppressWarnings("HiddenField") - private static ConstructingObjectParser createParser() { - ConstructingObjectParser parser = new ConstructingObjectParser<>("datafeed_timing_stats", true, args -> { - String jobId = (String) args[0]; - Long searchCount = (Long) args[1]; - Long bucketCount = (Long) args[2]; - Double totalSearchTimeMs = (Double) args[3]; - Double avgSearchTimePerBucketMs = (Double) args[4]; - Double exponentialAvgSearchTimePerHourMs = (Double) args[5]; - return new DatafeedTimingStats( - jobId, - getOrDefault(searchCount, 0L), - getOrDefault(bucketCount, 0L), - getOrDefault(totalSearchTimeMs, 0.0), - avgSearchTimePerBucketMs, - exponentialAvgSearchTimePerHourMs - ); - }); - parser.declareString(constructorArg(), JOB_ID); - parser.declareLong(optionalConstructorArg(), SEARCH_COUNT); - parser.declareLong(optionalConstructorArg(), BUCKET_COUNT); - parser.declareDouble(optionalConstructorArg(), TOTAL_SEARCH_TIME_MS); - parser.declareDouble(optionalConstructorArg(), AVG_SEARCH_TIME_PER_BUCKET_MS); - parser.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_SEARCH_TIME_PER_HOUR_MS); - return parser; - } - - private final String jobId; - private long searchCount; - private long bucketCount; - private double totalSearchTimeMs; - private Double avgSearchTimePerBucketMs; - private Double exponentialAvgSearchTimePerHourMs; - - public DatafeedTimingStats( - String jobId, - long searchCount, - long bucketCount, - double totalSearchTimeMs, - @Nullable Double avgSearchTimePerBucketMs, - @Nullable Double exponentialAvgSearchTimePerHourMs - ) { - this.jobId = Objects.requireNonNull(jobId); - this.searchCount = searchCount; - this.bucketCount = bucketCount; - this.totalSearchTimeMs = totalSearchTimeMs; - this.avgSearchTimePerBucketMs = avgSearchTimePerBucketMs; - this.exponentialAvgSearchTimePerHourMs = exponentialAvgSearchTimePerHourMs; - } - - public String getJobId() { - return jobId; - } - - public long getSearchCount() { - return searchCount; - } - - public long getBucketCount() { - return bucketCount; - } - - public double getTotalSearchTimeMs() { - return totalSearchTimeMs; - } - - public Double getAvgSearchTimePerBucketMs() { - return avgSearchTimePerBucketMs; - } - - public Double getExponentialAvgSearchTimePerHourMs() { - return exponentialAvgSearchTimePerHourMs; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(JOB_ID.getPreferredName(), jobId); - builder.field(SEARCH_COUNT.getPreferredName(), searchCount); - builder.field(BUCKET_COUNT.getPreferredName(), bucketCount); - builder.field(TOTAL_SEARCH_TIME_MS.getPreferredName(), totalSearchTimeMs); - if (avgSearchTimePerBucketMs != null) { - builder.field(AVG_SEARCH_TIME_PER_BUCKET_MS.getPreferredName(), avgSearchTimePerBucketMs); - } - if (exponentialAvgSearchTimePerHourMs != null) { - builder.field(EXPONENTIAL_AVG_SEARCH_TIME_PER_HOUR_MS.getPreferredName(), exponentialAvgSearchTimePerHourMs); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - DatafeedTimingStats other = (DatafeedTimingStats) obj; - return Objects.equals(this.jobId, other.jobId) - && this.searchCount == other.searchCount - && this.bucketCount == other.bucketCount - && this.totalSearchTimeMs == other.totalSearchTimeMs - && Objects.equals(this.avgSearchTimePerBucketMs, other.avgSearchTimePerBucketMs) - && Objects.equals(this.exponentialAvgSearchTimePerHourMs, other.exponentialAvgSearchTimePerHourMs); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - searchCount, - bucketCount, - totalSearchTimeMs, - avgSearchTimePerBucketMs, - exponentialAvgSearchTimePerHourMs - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - private static T getOrDefault(@Nullable T value, T defaultValue) { - return value != null ? value : defaultValue; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java deleted file mode 100644 index 3b4be882a868b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DatafeedUpdate.java +++ /dev/null @@ -1,455 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.action.support.IndicesOptions; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.search.aggregations.AggregatorFactories; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xcontent.json.JsonXContent; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.Comparator; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * A datafeed update contains partial properties to update a {@link DatafeedConfig}. - * The main difference between this class and {@link DatafeedConfig} is that here all - * fields are nullable. - */ -public class DatafeedUpdate implements ToXContentObject { - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "datafeed_update", - true, - a -> new Builder((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DatafeedConfig.ID); - - PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDEXES); - PARSER.declareStringArray(Builder::setIndices, DatafeedConfig.INDICES); - PARSER.declareString( - (builder, val) -> builder.setQueryDelay(TimeValue.parseTimeValue(val, DatafeedConfig.QUERY_DELAY.getPreferredName())), - DatafeedConfig.QUERY_DELAY - ); - PARSER.declareString( - (builder, val) -> builder.setFrequency(TimeValue.parseTimeValue(val, DatafeedConfig.FREQUENCY.getPreferredName())), - DatafeedConfig.FREQUENCY - ); - PARSER.declareField(Builder::setQuery, DatafeedUpdate::parseBytes, DatafeedConfig.QUERY, ObjectParser.ValueType.OBJECT); - PARSER.declareField( - Builder::setAggregations, - DatafeedUpdate::parseBytes, - DatafeedConfig.AGGREGATIONS, - ObjectParser.ValueType.OBJECT - ); - PARSER.declareObject(Builder::setScriptFields, (p, c) -> { - List parsedScriptFields = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_OBJECT) { - parsedScriptFields.add(new SearchSourceBuilder.ScriptField(p)); - } - return parsedScriptFields; - }, DatafeedConfig.SCRIPT_FIELDS); - PARSER.declareInt(Builder::setScrollSize, DatafeedConfig.SCROLL_SIZE); - PARSER.declareObject(Builder::setChunkingConfig, ChunkingConfig.PARSER, DatafeedConfig.CHUNKING_CONFIG); - PARSER.declareObject(Builder::setDelayedDataCheckConfig, DelayedDataCheckConfig.PARSER, DatafeedConfig.DELAYED_DATA_CHECK_CONFIG); - PARSER.declareInt(Builder::setMaxEmptySearches, DatafeedConfig.MAX_EMPTY_SEARCHES); - PARSER.declareObject( - Builder::setIndicesOptions, - (p, c) -> IndicesOptions.fromMap(p.map(), new IndicesOptions(IndicesOptions.Option.NONE, IndicesOptions.WildcardStates.NONE)), - DatafeedConfig.INDICES_OPTIONS - ); - PARSER.declareObject(Builder::setRuntimeMappings, (p, c) -> p.map(), SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD); - } - - private static BytesReference parseBytes(XContentParser parser) throws IOException { - XContentBuilder contentBuilder = JsonXContent.contentBuilder(); - contentBuilder.generator().copyCurrentStructure(parser); - return BytesReference.bytes(contentBuilder); - } - - private final String id; - private final TimeValue queryDelay; - private final TimeValue frequency; - private final List indices; - private final BytesReference query; - private final BytesReference aggregations; - private final List scriptFields; - private final Integer scrollSize; - private final ChunkingConfig chunkingConfig; - private final DelayedDataCheckConfig delayedDataCheckConfig; - private final Integer maxEmptySearches; - private final IndicesOptions indicesOptions; - private final Map runtimeMappings; - - private DatafeedUpdate( - String id, - TimeValue queryDelay, - TimeValue frequency, - List indices, - BytesReference query, - BytesReference aggregations, - List scriptFields, - Integer scrollSize, - ChunkingConfig chunkingConfig, - DelayedDataCheckConfig delayedDataCheckConfig, - Integer maxEmptySearches, - IndicesOptions indicesOptions, - Map runtimeMappings - ) { - this.id = id; - this.queryDelay = queryDelay; - this.frequency = frequency; - this.indices = indices; - this.query = query; - this.aggregations = aggregations; - this.scriptFields = scriptFields; - this.scrollSize = scrollSize; - this.chunkingConfig = chunkingConfig; - this.delayedDataCheckConfig = delayedDataCheckConfig; - this.maxEmptySearches = maxEmptySearches; - this.indicesOptions = indicesOptions; - this.runtimeMappings = runtimeMappings; - } - - /** - * Get the id of the datafeed to update - */ - public String getId() { - return id; - } - - public Map getRuntimeMappings() { - return runtimeMappings; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DatafeedConfig.ID.getPreferredName(), id); - if (queryDelay != null) { - builder.field(DatafeedConfig.QUERY_DELAY.getPreferredName(), queryDelay.getStringRep()); - } - if (frequency != null) { - builder.field(DatafeedConfig.FREQUENCY.getPreferredName(), frequency.getStringRep()); - } - addOptionalField(builder, DatafeedConfig.INDICES, indices); - if (query != null) { - builder.field(DatafeedConfig.QUERY.getPreferredName(), asMap(query)); - } - if (aggregations != null) { - builder.field(DatafeedConfig.AGGREGATIONS.getPreferredName(), asMap(aggregations)); - } - if (scriptFields != null) { - builder.startObject(DatafeedConfig.SCRIPT_FIELDS.getPreferredName()); - for (SearchSourceBuilder.ScriptField scriptField : scriptFields) { - scriptField.toXContent(builder, params); - } - builder.endObject(); - } - if (delayedDataCheckConfig != null) { - builder.field(DatafeedConfig.DELAYED_DATA_CHECK_CONFIG.getPreferredName(), delayedDataCheckConfig); - } - addOptionalField(builder, DatafeedConfig.SCROLL_SIZE, scrollSize); - addOptionalField(builder, DatafeedConfig.CHUNKING_CONFIG, chunkingConfig); - addOptionalField(builder, DatafeedConfig.MAX_EMPTY_SEARCHES, maxEmptySearches); - if (indicesOptions != null) { - builder.startObject(DatafeedConfig.INDICES_OPTIONS.getPreferredName()); - indicesOptions.toXContent(builder, params); - builder.endObject(); - } - addOptionalField(builder, SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD, runtimeMappings); - builder.endObject(); - return builder; - } - - private void addOptionalField(XContentBuilder builder, ParseField field, Object value) throws IOException { - if (value != null) { - builder.field(field.getPreferredName(), value); - } - } - - public TimeValue getQueryDelay() { - return queryDelay; - } - - public TimeValue getFrequency() { - return frequency; - } - - public List getIndices() { - return indices; - } - - public Integer getScrollSize() { - return scrollSize; - } - - public BytesReference getQuery() { - return query; - } - - public BytesReference getAggregations() { - return aggregations; - } - - public List getScriptFields() { - return scriptFields == null ? Collections.emptyList() : scriptFields; - } - - public ChunkingConfig getChunkingConfig() { - return chunkingConfig; - } - - public DelayedDataCheckConfig getDelayedDataCheckConfig() { - return delayedDataCheckConfig; - } - - public Integer getMaxEmptySearches() { - return maxEmptySearches; - } - - public IndicesOptions getIndicesOptions() { - return indicesOptions; - } - - private static Map asMap(BytesReference bytesReference) { - return bytesReference == null ? null : XContentHelper.convertToMap(bytesReference, true, XContentType.JSON).v2(); - } - - /** - * The lists of indices and types are compared for equality but they are not - * sorted first so this test could fail simply because the indices and types - * lists are in different orders. - * - * Also note this could be a heavy operation when a query or aggregations - * are set as we need to convert the bytes references into maps to correctly - * compare them. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DatafeedUpdate that = (DatafeedUpdate) other; - - return Objects.equals(this.id, that.id) - && Objects.equals(this.frequency, that.frequency) - && Objects.equals(this.queryDelay, that.queryDelay) - && Objects.equals(this.indices, that.indices) - && Objects.equals(asMap(this.query), asMap(that.query)) - && Objects.equals(this.scrollSize, that.scrollSize) - && Objects.equals(asMap(this.aggregations), asMap(that.aggregations)) - && Objects.equals(this.delayedDataCheckConfig, that.delayedDataCheckConfig) - && Objects.equals(this.scriptFields, that.scriptFields) - && Objects.equals(this.chunkingConfig, that.chunkingConfig) - && Objects.equals(this.maxEmptySearches, that.maxEmptySearches) - && Objects.equals(this.indicesOptions, that.indicesOptions) - && Objects.equals(this.runtimeMappings, that.runtimeMappings); - } - - /** - * Note this could be a heavy operation when a query or aggregations - * are set as we need to convert the bytes references into maps to - * compute a stable hash code. - */ - @Override - public int hashCode() { - return Objects.hash( - id, - frequency, - queryDelay, - indices, - asMap(query), - scrollSize, - asMap(aggregations), - scriptFields, - chunkingConfig, - delayedDataCheckConfig, - maxEmptySearches, - indicesOptions, - runtimeMappings - ); - } - - public static Builder builder(String id) { - return new Builder(id); - } - - public static class Builder { - - private String id; - private TimeValue queryDelay; - private TimeValue frequency; - private List indices; - private BytesReference query; - private BytesReference aggregations; - private List scriptFields; - private Integer scrollSize; - private ChunkingConfig chunkingConfig; - private DelayedDataCheckConfig delayedDataCheckConfig; - private Integer maxEmptySearches; - private IndicesOptions indicesOptions; - private Map runtimeMappings; - - public Builder(String id) { - this.id = Objects.requireNonNull(id, DatafeedConfig.ID.getPreferredName()); - } - - public Builder(DatafeedUpdate config) { - this.id = config.id; - this.queryDelay = config.queryDelay; - this.frequency = config.frequency; - this.indices = config.indices; - this.query = config.query; - this.aggregations = config.aggregations; - this.scriptFields = config.scriptFields; - this.scrollSize = config.scrollSize; - this.chunkingConfig = config.chunkingConfig; - this.delayedDataCheckConfig = config.delayedDataCheckConfig; - this.maxEmptySearches = config.maxEmptySearches; - this.indicesOptions = config.indicesOptions; - this.runtimeMappings = config.runtimeMappings != null ? new HashMap<>(config.runtimeMappings) : null; - } - - public Builder setIndices(List indices) { - this.indices = indices; - return this; - } - - public Builder setIndices(String... indices) { - return setIndices(Arrays.asList(indices)); - } - - public Builder setQueryDelay(TimeValue queryDelay) { - this.queryDelay = queryDelay; - return this; - } - - public Builder setFrequency(TimeValue frequency) { - this.frequency = frequency; - return this; - } - - private Builder setQuery(BytesReference query) { - this.query = query; - return this; - } - - public Builder setQuery(String queryAsJson) { - this.query = queryAsJson == null ? null : new BytesArray(queryAsJson); - return this; - } - - public Builder setQuery(QueryBuilder query) throws IOException { - this.query = query == null ? null : xContentToBytes(query); - return this; - } - - private Builder setAggregations(BytesReference aggregations) { - this.aggregations = aggregations; - return this; - } - - public Builder setAggregations(String aggsAsJson) { - this.aggregations = aggsAsJson == null ? null : new BytesArray(aggsAsJson); - return this; - } - - public Builder setAggregations(AggregatorFactories.Builder aggregations) throws IOException { - this.aggregations = aggregations == null ? null : xContentToBytes(aggregations); - return this; - } - - public Builder setScriptFields(List scriptFields) { - List sorted = new ArrayList<>(scriptFields); - sorted.sort(Comparator.comparing(SearchSourceBuilder.ScriptField::fieldName)); - this.scriptFields = sorted; - return this; - } - - public Builder setScrollSize(int scrollSize) { - this.scrollSize = scrollSize; - return this; - } - - public Builder setChunkingConfig(ChunkingConfig chunkingConfig) { - this.chunkingConfig = chunkingConfig; - return this; - } - - public Builder setDelayedDataCheckConfig(DelayedDataCheckConfig delayedDataCheckConfig) { - this.delayedDataCheckConfig = delayedDataCheckConfig; - return this; - } - - public Builder setMaxEmptySearches(int maxEmptySearches) { - this.maxEmptySearches = maxEmptySearches; - return this; - } - - public Builder setIndicesOptions(IndicesOptions indicesOptions) { - this.indicesOptions = indicesOptions; - return this; - } - - public Builder setRuntimeMappings(Map runtimeMappings) { - this.runtimeMappings = runtimeMappings; - return this; - } - - public DatafeedUpdate build() { - return new DatafeedUpdate( - id, - queryDelay, - frequency, - indices, - query, - aggregations, - scriptFields, - scrollSize, - chunkingConfig, - delayedDataCheckConfig, - maxEmptySearches, - indicesOptions, - runtimeMappings - ); - } - - private static BytesReference xContentToBytes(ToXContentObject object) throws IOException { - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - object.toXContent(builder, ToXContentObject.EMPTY_PARAMS); - return BytesReference.bytes(builder); - } - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DelayedDataCheckConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DelayedDataCheckConfig.java deleted file mode 100644 index 4c55662f8b833..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/datafeed/DelayedDataCheckConfig.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.datafeed; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * The configuration object containing the delayed data check settings. - * - * See {@link DelayedDataCheckConfig#enabledDelayedDataCheckConfig(TimeValue)} for creating a new - * enabled datacheck with the given check_window - * - * See {@link DelayedDataCheckConfig#disabledDelayedDataCheckConfig()} for creating a config for disabling - * delayed data checking. - */ -public class DelayedDataCheckConfig implements ToXContentObject { - - public static final ParseField ENABLED = new ParseField("enabled"); - public static final ParseField CHECK_WINDOW = new ParseField("check_window"); - - // These parsers follow the pattern that metadata is parsed leniently (to allow for enhancements), whilst config is parsed strictly - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "delayed_data_check_config", - true, - a -> new DelayedDataCheckConfig((Boolean) a[0], (TimeValue) a[1]) - ); - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED); - PARSER.declareString( - ConstructingObjectParser.optionalConstructorArg(), - text -> TimeValue.parseTimeValue(text, CHECK_WINDOW.getPreferredName()), - CHECK_WINDOW - ); - } - - /** - * This creates a new DelayedDataCheckConfig that has a check_window of the passed `timeValue` - * - * We query the index to the latest finalized bucket from this TimeValue in the past looking to see if any data has been indexed - * since the data was read with the Datafeed. - * - * The window must be larger than the {@link org.elasticsearch.client.ml.job.config.AnalysisConfig#bucketSpan}, less than - * 24 hours, and span less than 10,000x buckets. - * - * - * @param timeValue The time length in the past from the latest finalized bucket to look for latent data. - * If `null` is provided, the appropriate window is calculated when it is used - **/ - public static DelayedDataCheckConfig enabledDelayedDataCheckConfig(TimeValue timeValue) { - return new DelayedDataCheckConfig(true, timeValue); - } - - /** - * This creates a new DelayedDataCheckConfig that disables the data check. - */ - public static DelayedDataCheckConfig disabledDelayedDataCheckConfig() { - return new DelayedDataCheckConfig(false, null); - } - - private final boolean enabled; - private final TimeValue checkWindow; - - DelayedDataCheckConfig(Boolean enabled, TimeValue checkWindow) { - this.enabled = enabled; - this.checkWindow = checkWindow; - } - - public boolean isEnabled() { - return enabled; - } - - @Nullable - public TimeValue getCheckWindow() { - return checkWindow; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ENABLED.getPreferredName(), enabled); - if (checkWindow != null) { - builder.field(CHECK_WINDOW.getPreferredName(), checkWindow.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(enabled, checkWindow); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - DelayedDataCheckConfig other = (DelayedDataCheckConfig) obj; - return Objects.equals(this.enabled, other.enabled) && Objects.equals(this.checkWindow, other.checkWindow); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java deleted file mode 100644 index b1fe4a5d1b87c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java +++ /dev/null @@ -1,550 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper; -import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; -import java.util.Objects; - -public class Classification implements DataFrameAnalysis { - - public static Classification fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static Builder builder(String dependentVariable) { - return new Builder(dependentVariable); - } - - public static final ParseField NAME = new ParseField("classification"); - - static final ParseField DEPENDENT_VARIABLE = new ParseField("dependent_variable"); - static final ParseField LAMBDA = new ParseField("lambda"); - static final ParseField GAMMA = new ParseField("gamma"); - static final ParseField ETA = new ParseField("eta"); - static final ParseField MAX_TREES = new ParseField("max_trees"); - static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); - static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); - static final ParseField PREDICTION_FIELD_NAME = new ParseField("prediction_field_name"); - static final ParseField TRAINING_PERCENT = new ParseField("training_percent"); - static final ParseField CLASS_ASSIGNMENT_OBJECTIVE = new ParseField("class_assignment_objective"); - static final ParseField NUM_TOP_CLASSES = new ParseField("num_top_classes"); - static final ParseField RANDOMIZE_SEED = new ParseField("randomize_seed"); - static final ParseField FEATURE_PROCESSORS = new ParseField("feature_processors"); - static final ParseField ALPHA = new ParseField("alpha"); - static final ParseField ETA_GROWTH_RATE_PER_TREE = new ParseField("eta_growth_rate_per_tree"); - static final ParseField SOFT_TREE_DEPTH_LIMIT = new ParseField("soft_tree_depth_limit"); - static final ParseField SOFT_TREE_DEPTH_TOLERANCE = new ParseField("soft_tree_depth_tolerance"); - static final ParseField DOWNSAMPLE_FACTOR = new ParseField("downsample_factor"); - static final ParseField MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER = new ParseField("max_optimization_rounds_per_hyperparameter"); - static final ParseField EARLY_STOPPING_ENABLED = new ParseField("early_stopping_enabled"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new Classification( - (String) a[0], - (Double) a[1], - (Double) a[2], - (Double) a[3], - (Integer) a[4], - (Double) a[5], - (Integer) a[6], - (String) a[7], - (Double) a[8], - (Integer) a[9], - (Long) a[10], - (ClassAssignmentObjective) a[11], - (List) a[12], - (Double) a[13], - (Double) a[14], - (Double) a[15], - (Double) a[16], - (Double) a[17], - (Integer) a[18], - (Boolean) a[19] - ) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DEPENDENT_VARIABLE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), LAMBDA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), GAMMA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_TREES); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), FEATURE_BAG_FRACTION); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), NUM_TOP_FEATURE_IMPORTANCE_VALUES); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), PREDICTION_FIELD_NAME); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), TRAINING_PERCENT); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), NUM_TOP_CLASSES); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), RANDOMIZE_SEED); - PARSER.declareString( - ConstructingObjectParser.optionalConstructorArg(), - ClassAssignmentObjective::fromString, - CLASS_ASSIGNMENT_OBJECTIVE - ); - PARSER.declareNamedObjects( - ConstructingObjectParser.optionalConstructorArg(), - (p, c, n) -> p.namedObject(PreProcessor.class, n, c), - (classification) -> {}, - FEATURE_PROCESSORS - ); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ALPHA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA_GROWTH_RATE_PER_TREE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), SOFT_TREE_DEPTH_LIMIT); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), SOFT_TREE_DEPTH_TOLERANCE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), DOWNSAMPLE_FACTOR); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), EARLY_STOPPING_ENABLED); - } - - private final String dependentVariable; - private final Double lambda; - private final Double gamma; - private final Double eta; - private final Integer maxTrees; - private final Double featureBagFraction; - private final Integer numTopFeatureImportanceValues; - private final String predictionFieldName; - private final Double trainingPercent; - private final ClassAssignmentObjective classAssignmentObjective; - private final Integer numTopClasses; - private final Long randomizeSeed; - private final List featureProcessors; - private final Double alpha; - private final Double etaGrowthRatePerTree; - private final Double softTreeDepthLimit; - private final Double softTreeDepthTolerance; - private final Double downsampleFactor; - private final Integer maxOptimizationRoundsPerHyperparameter; - private final Boolean earlyStoppingEnabled; - - private Classification( - String dependentVariable, - @Nullable Double lambda, - @Nullable Double gamma, - @Nullable Double eta, - @Nullable Integer maxTrees, - @Nullable Double featureBagFraction, - @Nullable Integer numTopFeatureImportanceValues, - @Nullable String predictionFieldName, - @Nullable Double trainingPercent, - @Nullable Integer numTopClasses, - @Nullable Long randomizeSeed, - @Nullable ClassAssignmentObjective classAssignmentObjective, - @Nullable List featureProcessors, - @Nullable Double alpha, - @Nullable Double etaGrowthRatePerTree, - @Nullable Double softTreeDepthLimit, - @Nullable Double softTreeDepthTolerance, - @Nullable Double downsampleFactor, - @Nullable Integer maxOptimizationRoundsPerHyperparameter, - @Nullable Boolean earlyStoppingEnabled - ) { - this.dependentVariable = Objects.requireNonNull(dependentVariable); - this.lambda = lambda; - this.gamma = gamma; - this.eta = eta; - this.maxTrees = maxTrees; - this.featureBagFraction = featureBagFraction; - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - this.predictionFieldName = predictionFieldName; - this.trainingPercent = trainingPercent; - this.classAssignmentObjective = classAssignmentObjective; - this.numTopClasses = numTopClasses; - this.randomizeSeed = randomizeSeed; - this.featureProcessors = featureProcessors; - this.alpha = alpha; - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - this.softTreeDepthLimit = softTreeDepthLimit; - this.softTreeDepthTolerance = softTreeDepthTolerance; - this.downsampleFactor = downsampleFactor; - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - this.earlyStoppingEnabled = earlyStoppingEnabled; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - - public String getDependentVariable() { - return dependentVariable; - } - - public Double getLambda() { - return lambda; - } - - public Double getGamma() { - return gamma; - } - - public Double getEta() { - return eta; - } - - public Integer getMaxTrees() { - return maxTrees; - } - - public Double getFeatureBagFraction() { - return featureBagFraction; - } - - public Integer getNumTopFeatureImportanceValues() { - return numTopFeatureImportanceValues; - } - - public String getPredictionFieldName() { - return predictionFieldName; - } - - public Double getTrainingPercent() { - return trainingPercent; - } - - public Long getRandomizeSeed() { - return randomizeSeed; - } - - public ClassAssignmentObjective getClassAssignmentObjective() { - return classAssignmentObjective; - } - - public Integer getNumTopClasses() { - return numTopClasses; - } - - public List getFeatureProcessors() { - return featureProcessors; - } - - public Double getAlpha() { - return alpha; - } - - public Double getEtaGrowthRatePerTree() { - return etaGrowthRatePerTree; - } - - public Double getSoftTreeDepthLimit() { - return softTreeDepthLimit; - } - - public Double getSoftTreeDepthTolerance() { - return softTreeDepthTolerance; - } - - public Double getDownsampleFactor() { - return downsampleFactor; - } - - public Integer getMaxOptimizationRoundsPerHyperparameter() { - return maxOptimizationRoundsPerHyperparameter; - } - - public Boolean getEarlyStoppingEnable() { - return earlyStoppingEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DEPENDENT_VARIABLE.getPreferredName(), dependentVariable); - if (lambda != null) { - builder.field(LAMBDA.getPreferredName(), lambda); - } - if (gamma != null) { - builder.field(GAMMA.getPreferredName(), gamma); - } - if (eta != null) { - builder.field(ETA.getPreferredName(), eta); - } - if (maxTrees != null) { - builder.field(MAX_TREES.getPreferredName(), maxTrees); - } - if (featureBagFraction != null) { - builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); - } - if (numTopFeatureImportanceValues != null) { - builder.field(NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName(), numTopFeatureImportanceValues); - } - if (predictionFieldName != null) { - builder.field(PREDICTION_FIELD_NAME.getPreferredName(), predictionFieldName); - } - if (trainingPercent != null) { - builder.field(TRAINING_PERCENT.getPreferredName(), trainingPercent); - } - if (randomizeSeed != null) { - builder.field(RANDOMIZE_SEED.getPreferredName(), randomizeSeed); - } - if (classAssignmentObjective != null) { - builder.field(CLASS_ASSIGNMENT_OBJECTIVE.getPreferredName(), classAssignmentObjective); - } - if (numTopClasses != null) { - builder.field(NUM_TOP_CLASSES.getPreferredName(), numTopClasses); - } - if (featureProcessors != null) { - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, FEATURE_PROCESSORS.getPreferredName(), featureProcessors); - } - if (alpha != null) { - builder.field(ALPHA.getPreferredName(), alpha); - } - if (etaGrowthRatePerTree != null) { - builder.field(ETA_GROWTH_RATE_PER_TREE.getPreferredName(), etaGrowthRatePerTree); - } - if (softTreeDepthLimit != null) { - builder.field(SOFT_TREE_DEPTH_LIMIT.getPreferredName(), softTreeDepthLimit); - } - if (softTreeDepthTolerance != null) { - builder.field(SOFT_TREE_DEPTH_TOLERANCE.getPreferredName(), softTreeDepthTolerance); - } - if (downsampleFactor != null) { - builder.field(DOWNSAMPLE_FACTOR.getPreferredName(), downsampleFactor); - } - if (maxOptimizationRoundsPerHyperparameter != null) { - builder.field(MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER.getPreferredName(), maxOptimizationRoundsPerHyperparameter); - } - if (earlyStoppingEnabled != null) { - builder.field(EARLY_STOPPING_ENABLED.getPreferredName(), earlyStoppingEnabled); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash( - dependentVariable, - lambda, - gamma, - eta, - maxTrees, - featureBagFraction, - numTopFeatureImportanceValues, - predictionFieldName, - trainingPercent, - randomizeSeed, - numTopClasses, - classAssignmentObjective, - featureProcessors, - alpha, - etaGrowthRatePerTree, - softTreeDepthLimit, - softTreeDepthTolerance, - downsampleFactor, - maxOptimizationRoundsPerHyperparameter, - earlyStoppingEnabled - ); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Classification that = (Classification) o; - return Objects.equals(dependentVariable, that.dependentVariable) - && Objects.equals(lambda, that.lambda) - && Objects.equals(gamma, that.gamma) - && Objects.equals(eta, that.eta) - && Objects.equals(maxTrees, that.maxTrees) - && Objects.equals(featureBagFraction, that.featureBagFraction) - && Objects.equals(numTopFeatureImportanceValues, that.numTopFeatureImportanceValues) - && Objects.equals(predictionFieldName, that.predictionFieldName) - && Objects.equals(trainingPercent, that.trainingPercent) - && Objects.equals(randomizeSeed, that.randomizeSeed) - && Objects.equals(numTopClasses, that.numTopClasses) - && Objects.equals(classAssignmentObjective, that.classAssignmentObjective) - && Objects.equals(featureProcessors, that.featureProcessors) - && Objects.equals(alpha, that.alpha) - && Objects.equals(etaGrowthRatePerTree, that.etaGrowthRatePerTree) - && Objects.equals(softTreeDepthLimit, that.softTreeDepthLimit) - && Objects.equals(softTreeDepthTolerance, that.softTreeDepthTolerance) - && Objects.equals(downsampleFactor, that.downsampleFactor) - && Objects.equals(maxOptimizationRoundsPerHyperparameter, that.maxOptimizationRoundsPerHyperparameter) - && Objects.equals(earlyStoppingEnabled, that.earlyStoppingEnabled); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public enum ClassAssignmentObjective { - MAXIMIZE_ACCURACY, - MAXIMIZE_MINIMUM_RECALL; - - public static ClassAssignmentObjective fromString(String value) { - return ClassAssignmentObjective.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - public static class Builder { - private String dependentVariable; - private Double lambda; - private Double gamma; - private Double eta; - private Integer maxTrees; - private Double featureBagFraction; - private Integer numTopFeatureImportanceValues; - private String predictionFieldName; - private Double trainingPercent; - private Integer numTopClasses; - private Long randomizeSeed; - private ClassAssignmentObjective classAssignmentObjective; - private List featureProcessors; - private Double alpha; - private Double etaGrowthRatePerTree; - private Double softTreeDepthLimit; - private Double softTreeDepthTolerance; - private Double downsampleFactor; - private Integer maxOptimizationRoundsPerHyperparameter; - private Boolean earlyStoppingEnabled; - - private Builder(String dependentVariable) { - this.dependentVariable = Objects.requireNonNull(dependentVariable); - } - - public Builder setLambda(Double lambda) { - this.lambda = lambda; - return this; - } - - public Builder setGamma(Double gamma) { - this.gamma = gamma; - return this; - } - - public Builder setEta(Double eta) { - this.eta = eta; - return this; - } - - public Builder setMaxTrees(Integer maxTrees) { - this.maxTrees = maxTrees; - return this; - } - - public Builder setFeatureBagFraction(Double featureBagFraction) { - this.featureBagFraction = featureBagFraction; - return this; - } - - public Builder setNumTopFeatureImportanceValues(Integer numTopFeatureImportanceValues) { - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - return this; - } - - public Builder setPredictionFieldName(String predictionFieldName) { - this.predictionFieldName = predictionFieldName; - return this; - } - - public Builder setTrainingPercent(Double trainingPercent) { - this.trainingPercent = trainingPercent; - return this; - } - - public Builder setRandomizeSeed(Long randomizeSeed) { - this.randomizeSeed = randomizeSeed; - return this; - } - - public Builder setNumTopClasses(Integer numTopClasses) { - this.numTopClasses = numTopClasses; - return this; - } - - public Builder setClassAssignmentObjective(ClassAssignmentObjective classAssignmentObjective) { - this.classAssignmentObjective = classAssignmentObjective; - return this; - } - - public Builder setFeatureProcessors(List featureProcessors) { - this.featureProcessors = featureProcessors; - return this; - } - - public Builder setAlpha(Double alpha) { - this.alpha = alpha; - return this; - } - - public Builder setEtaGrowthRatePerTree(Double etaGrowthRatePerTree) { - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - return this; - } - - public Builder setSoftTreeDepthLimit(Double softTreeDepthLimit) { - this.softTreeDepthLimit = softTreeDepthLimit; - return this; - } - - public Builder setSoftTreeDepthTolerance(Double softTreeDepthTolerance) { - this.softTreeDepthTolerance = softTreeDepthTolerance; - return this; - } - - public Builder setDownsampleFactor(Double downsampleFactor) { - this.downsampleFactor = downsampleFactor; - return this; - } - - public Builder setMaxOptimizationRoundsPerHyperparameter(Integer maxOptimizationRoundsPerHyperparameter) { - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - return this; - } - - public Builder setEarlyStoppingEnabled(Boolean earlyStoppingEnabled) { - this.earlyStoppingEnabled = earlyStoppingEnabled; - return this; - } - - public Classification build() { - return new Classification( - dependentVariable, - lambda, - gamma, - eta, - maxTrees, - featureBagFraction, - numTopFeatureImportanceValues, - predictionFieldName, - trainingPercent, - numTopClasses, - randomizeSeed, - classAssignmentObjective, - featureProcessors, - alpha, - etaGrowthRatePerTree, - softTreeDepthLimit, - softTreeDepthTolerance, - downsampleFactor, - maxOptimizationRoundsPerHyperparameter, - earlyStoppingEnabled - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalysis.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalysis.java deleted file mode 100644 index e7c13da72880a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalysis.java +++ /dev/null @@ -1,16 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.xcontent.ToXContentObject; - -public interface DataFrameAnalysis extends ToXContentObject { - - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java deleted file mode 100644 index 53c92d792fe20..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfig.java +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.Version; -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.search.fetch.subphase.FetchSourceContext; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class DataFrameAnalyticsConfig implements ToXContentObject { - - public static DataFrameAnalyticsConfig fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static Builder builder() { - return new Builder(); - } - - static final ParseField ID = new ParseField("id"); - static final ParseField DESCRIPTION = new ParseField("description"); - static final ParseField SOURCE = new ParseField("source"); - static final ParseField DEST = new ParseField("dest"); - static final ParseField ANALYSIS = new ParseField("analysis"); - static final ParseField ANALYZED_FIELDS = new ParseField("analyzed_fields"); - static final ParseField MODEL_MEMORY_LIMIT = new ParseField("model_memory_limit"); - static final ParseField CREATE_TIME = new ParseField("create_time"); - static final ParseField VERSION = new ParseField("version"); - static final ParseField ALLOW_LAZY_START = new ParseField("allow_lazy_start"); - static final ParseField MAX_NUM_THREADS = new ParseField("max_num_threads"); - - private static final ObjectParser PARSER = new ObjectParser<>("data_frame_analytics_config", true, Builder::new); - - static { - PARSER.declareString(Builder::setId, ID); - PARSER.declareString(Builder::setDescription, DESCRIPTION); - PARSER.declareObject(Builder::setSource, (p, c) -> DataFrameAnalyticsSource.fromXContent(p), SOURCE); - PARSER.declareObject(Builder::setDest, (p, c) -> DataFrameAnalyticsDest.fromXContent(p), DEST); - PARSER.declareObject(Builder::setAnalysis, (p, c) -> parseAnalysis(p), ANALYSIS); - PARSER.declareField( - Builder::setAnalyzedFields, - (p, c) -> FetchSourceContext.fromXContent(p), - ANALYZED_FIELDS, - ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING - ); - PARSER.declareField( - Builder::setModelMemoryLimit, - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()), - MODEL_MEMORY_LIMIT, - ValueType.VALUE - ); - PARSER.declareField( - Builder::setCreateTime, - p -> TimeUtil.parseTimeFieldToInstant(p, CREATE_TIME.getPreferredName()), - CREATE_TIME, - ValueType.VALUE - ); - PARSER.declareString(Builder::setVersion, Version::fromString, VERSION); - PARSER.declareBoolean(Builder::setAllowLazyStart, ALLOW_LAZY_START); - PARSER.declareInt(Builder::setMaxNumThreads, MAX_NUM_THREADS); - } - - private static DataFrameAnalysis parseAnalysis(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); - DataFrameAnalysis analysis = parser.namedObject(DataFrameAnalysis.class, parser.currentName(), true); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return analysis; - } - - private final String id; - private final String description; - private final DataFrameAnalyticsSource source; - private final DataFrameAnalyticsDest dest; - private final DataFrameAnalysis analysis; - private final FetchSourceContext analyzedFields; - private final ByteSizeValue modelMemoryLimit; - private final Instant createTime; - private final Version version; - private final Boolean allowLazyStart; - private final Integer maxNumThreads; - - private DataFrameAnalyticsConfig( - @Nullable String id, - @Nullable String description, - @Nullable DataFrameAnalyticsSource source, - @Nullable DataFrameAnalyticsDest dest, - @Nullable DataFrameAnalysis analysis, - @Nullable FetchSourceContext analyzedFields, - @Nullable ByteSizeValue modelMemoryLimit, - @Nullable Instant createTime, - @Nullable Version version, - @Nullable Boolean allowLazyStart, - @Nullable Integer maxNumThreads - ) { - this.id = id; - this.description = description; - this.source = source; - this.dest = dest; - this.analysis = analysis; - this.analyzedFields = analyzedFields; - this.modelMemoryLimit = modelMemoryLimit; - this.createTime = createTime == null ? null : Instant.ofEpochMilli(createTime.toEpochMilli()); - ; - this.version = version; - this.allowLazyStart = allowLazyStart; - this.maxNumThreads = maxNumThreads; - } - - public String getId() { - return id; - } - - public String getDescription() { - return description; - } - - public DataFrameAnalyticsSource getSource() { - return source; - } - - public DataFrameAnalyticsDest getDest() { - return dest; - } - - public DataFrameAnalysis getAnalysis() { - return analysis; - } - - public FetchSourceContext getAnalyzedFields() { - return analyzedFields; - } - - public ByteSizeValue getModelMemoryLimit() { - return modelMemoryLimit; - } - - public Instant getCreateTime() { - return createTime; - } - - public Version getVersion() { - return version; - } - - public Boolean getAllowLazyStart() { - return allowLazyStart; - } - - public Integer getMaxNumThreads() { - return maxNumThreads; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (id != null) { - builder.field(ID.getPreferredName(), id); - } - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - if (source != null) { - builder.field(SOURCE.getPreferredName(), source); - } - if (dest != null) { - builder.field(DEST.getPreferredName(), dest); - } - if (analysis != null) { - builder.startObject(ANALYSIS.getPreferredName()).field(analysis.getName(), analysis).endObject(); - } - if (analyzedFields != null) { - builder.field(ANALYZED_FIELDS.getPreferredName(), analyzedFields); - } - if (modelMemoryLimit != null) { - builder.field(MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit.getStringRep()); - } - if (createTime != null) { - builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli()); - } - if (version != null) { - builder.field(VERSION.getPreferredName(), version); - } - if (allowLazyStart != null) { - builder.field(ALLOW_LAZY_START.getPreferredName(), allowLazyStart); - } - if (maxNumThreads != null) { - builder.field(MAX_NUM_THREADS.getPreferredName(), maxNumThreads); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - DataFrameAnalyticsConfig other = (DataFrameAnalyticsConfig) o; - return Objects.equals(id, other.id) - && Objects.equals(description, other.description) - && Objects.equals(source, other.source) - && Objects.equals(dest, other.dest) - && Objects.equals(analysis, other.analysis) - && Objects.equals(analyzedFields, other.analyzedFields) - && Objects.equals(modelMemoryLimit, other.modelMemoryLimit) - && Objects.equals(createTime, other.createTime) - && Objects.equals(version, other.version) - && Objects.equals(allowLazyStart, other.allowLazyStart) - && Objects.equals(maxNumThreads, other.maxNumThreads); - } - - @Override - public int hashCode() { - return Objects.hash( - id, - description, - source, - dest, - analysis, - analyzedFields, - modelMemoryLimit, - createTime, - version, - allowLazyStart, - maxNumThreads - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static class Builder { - - private String id; - private String description; - private DataFrameAnalyticsSource source; - private DataFrameAnalyticsDest dest; - private DataFrameAnalysis analysis; - private FetchSourceContext analyzedFields; - private ByteSizeValue modelMemoryLimit; - private Instant createTime; - private Version version; - private Boolean allowLazyStart; - private Integer maxNumThreads; - - private Builder() {} - - public Builder setId(String id) { - this.id = Objects.requireNonNull(id); - return this; - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setSource(DataFrameAnalyticsSource source) { - this.source = Objects.requireNonNull(source); - return this; - } - - public Builder setDest(DataFrameAnalyticsDest dest) { - this.dest = Objects.requireNonNull(dest); - return this; - } - - public Builder setAnalysis(DataFrameAnalysis analysis) { - this.analysis = Objects.requireNonNull(analysis); - return this; - } - - public Builder setAnalyzedFields(FetchSourceContext fields) { - this.analyzedFields = fields; - return this; - } - - public Builder setModelMemoryLimit(ByteSizeValue modelMemoryLimit) { - this.modelMemoryLimit = modelMemoryLimit; - return this; - } - - Builder setCreateTime(Instant createTime) { - this.createTime = createTime; - return this; - } - - Builder setVersion(Version version) { - this.version = version; - return this; - } - - public Builder setAllowLazyStart(Boolean allowLazyStart) { - this.allowLazyStart = allowLazyStart; - return this; - } - - public Builder setMaxNumThreads(Integer maxNumThreads) { - this.maxNumThreads = maxNumThreads; - return this; - } - - public DataFrameAnalyticsConfig build() { - return new DataFrameAnalyticsConfig( - id, - description, - source, - dest, - analysis, - analyzedFields, - modelMemoryLimit, - createTime, - version, - allowLazyStart, - maxNumThreads - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigUpdate.java deleted file mode 100644 index 4dccee1019ce1..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsConfigUpdate.java +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ObjectParser.ValueType.VALUE; - -public class DataFrameAnalyticsConfigUpdate implements ToXContentObject { - - public static DataFrameAnalyticsConfigUpdate fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static Builder builder() { - return new Builder(); - } - - public static final ObjectParser PARSER = new ObjectParser<>("data_frame_analytics_config_update", true, Builder::new); - - static { - PARSER.declareString(Builder::setId, DataFrameAnalyticsConfig.ID); - PARSER.declareStringOrNull(Builder::setDescription, DataFrameAnalyticsConfig.DESCRIPTION); - PARSER.declareField( - Builder::setModelMemoryLimit, - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), DataFrameAnalyticsConfig.MODEL_MEMORY_LIMIT.getPreferredName()), - DataFrameAnalyticsConfig.MODEL_MEMORY_LIMIT, - VALUE - ); - PARSER.declareBoolean(Builder::setAllowLazyStart, DataFrameAnalyticsConfig.ALLOW_LAZY_START); - PARSER.declareInt(Builder::setMaxNumThreads, DataFrameAnalyticsConfig.MAX_NUM_THREADS); - } - - private final String id; - private final String description; - private final ByteSizeValue modelMemoryLimit; - private final Boolean allowLazyStart; - private final Integer maxNumThreads; - - private DataFrameAnalyticsConfigUpdate( - String id, - @Nullable String description, - @Nullable ByteSizeValue modelMemoryLimit, - @Nullable Boolean allowLazyStart, - @Nullable Integer maxNumThreads - ) { - this.id = id; - this.description = description; - this.modelMemoryLimit = modelMemoryLimit; - this.allowLazyStart = allowLazyStart; - this.maxNumThreads = maxNumThreads; - } - - public String getId() { - return id; - } - - public String getDescription() { - return description; - } - - public ByteSizeValue getModelMemoryLimit() { - return modelMemoryLimit; - } - - public Boolean isAllowLazyStart() { - return allowLazyStart; - } - - public Integer getMaxNumThreads() { - return maxNumThreads; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DataFrameAnalyticsConfig.ID.getPreferredName(), id); - if (description != null) { - builder.field(DataFrameAnalyticsConfig.DESCRIPTION.getPreferredName(), description); - } - if (modelMemoryLimit != null) { - builder.field(DataFrameAnalyticsConfig.MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit.getStringRep()); - } - if (allowLazyStart != null) { - builder.field(DataFrameAnalyticsConfig.ALLOW_LAZY_START.getPreferredName(), allowLazyStart); - } - if (maxNumThreads != null) { - builder.field(DataFrameAnalyticsConfig.MAX_NUM_THREADS.getPreferredName(), maxNumThreads); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof DataFrameAnalyticsConfigUpdate == false) { - return false; - } - - DataFrameAnalyticsConfigUpdate that = (DataFrameAnalyticsConfigUpdate) other; - - return Objects.equals(this.id, that.id) - && Objects.equals(this.description, that.description) - && Objects.equals(this.modelMemoryLimit, that.modelMemoryLimit) - && Objects.equals(this.allowLazyStart, that.allowLazyStart) - && Objects.equals(this.maxNumThreads, that.maxNumThreads); - } - - @Override - public int hashCode() { - return Objects.hash(id, description, modelMemoryLimit, allowLazyStart, maxNumThreads); - } - - public static class Builder { - - private String id; - private String description; - private ByteSizeValue modelMemoryLimit; - private Boolean allowLazyStart; - private Integer maxNumThreads; - - private Builder() {} - - public String getId() { - return id; - } - - public Builder setId(String id) { - this.id = id; - return this; - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setModelMemoryLimit(ByteSizeValue modelMemoryLimit) { - this.modelMemoryLimit = modelMemoryLimit; - return this; - } - - public Builder setAllowLazyStart(Boolean allowLazyStart) { - this.allowLazyStart = allowLazyStart; - return this; - } - - public Builder setMaxNumThreads(Integer maxNumThreads) { - this.maxNumThreads = maxNumThreads; - return this; - } - - public DataFrameAnalyticsConfigUpdate build() { - return new DataFrameAnalyticsConfigUpdate(id, description, modelMemoryLimit, allowLazyStart, maxNumThreads); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsDest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsDest.java deleted file mode 100644 index fe576411f131b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsDest.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static java.util.Objects.requireNonNull; - -public class DataFrameAnalyticsDest implements ToXContentObject { - - public static DataFrameAnalyticsDest fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static Builder builder() { - return new Builder(); - } - - private static final ParseField INDEX = new ParseField("index"); - private static final ParseField RESULTS_FIELD = new ParseField("results_field"); - - private static final ObjectParser PARSER = new ObjectParser<>("data_frame_analytics_dest", true, Builder::new); - - static { - PARSER.declareString(Builder::setIndex, INDEX); - PARSER.declareString(Builder::setResultsField, RESULTS_FIELD); - } - - private final String index; - private final String resultsField; - - private DataFrameAnalyticsDest(String index, @Nullable String resultsField) { - this.index = requireNonNull(index); - this.resultsField = resultsField; - } - - public String getIndex() { - return index; - } - - public String getResultsField() { - return resultsField; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(INDEX.getPreferredName(), index); - if (resultsField != null) { - builder.field(RESULTS_FIELD.getPreferredName(), resultsField); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - DataFrameAnalyticsDest other = (DataFrameAnalyticsDest) o; - return Objects.equals(index, other.index) && Objects.equals(resultsField, other.resultsField); - } - - @Override - public int hashCode() { - return Objects.hash(index, resultsField); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static class Builder { - - private String index; - private String resultsField; - - private Builder() {} - - public Builder setIndex(String index) { - this.index = index; - return this; - } - - public Builder setResultsField(String resultsField) { - this.resultsField = resultsField; - return this; - } - - public DataFrameAnalyticsDest build() { - return new DataFrameAnalyticsDest(index, resultsField); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java deleted file mode 100644 index da9cf7aa15b44..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsSource.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.fetch.subphase.FetchSourceContext; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -public class DataFrameAnalyticsSource implements ToXContentObject { - - public static DataFrameAnalyticsSource fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static Builder builder() { - return new Builder(); - } - - private static final ParseField INDEX = new ParseField("index"); - private static final ParseField QUERY = new ParseField("query"); - public static final ParseField _SOURCE = new ParseField("_source"); - - private static final ObjectParser PARSER = new ObjectParser<>("data_frame_analytics_source", true, Builder::new); - - static { - PARSER.declareStringArray(Builder::setIndex, INDEX); - PARSER.declareObject(Builder::setQueryConfig, (p, c) -> QueryConfig.fromXContent(p), QUERY); - PARSER.declareField( - Builder::setSourceFiltering, - (p, c) -> FetchSourceContext.fromXContent(p), - _SOURCE, - ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING - ); - PARSER.declareObject(Builder::setRuntimeMappings, (p, c) -> p.map(), SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD); - } - - private final String[] index; - private final QueryConfig queryConfig; - private final FetchSourceContext sourceFiltering; - private final Map runtimeMappings; - - private DataFrameAnalyticsSource( - String[] index, - @Nullable QueryConfig queryConfig, - @Nullable FetchSourceContext sourceFiltering, - @Nullable Map runtimeMappings - ) { - this.index = Objects.requireNonNull(index); - this.queryConfig = queryConfig; - this.sourceFiltering = sourceFiltering; - this.runtimeMappings = runtimeMappings; - } - - public String[] getIndex() { - return index; - } - - public QueryConfig getQueryConfig() { - return queryConfig; - } - - public FetchSourceContext getSourceFiltering() { - return sourceFiltering; - } - - public Map getRuntimeMappings() { - return runtimeMappings; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(INDEX.getPreferredName(), index); - if (queryConfig != null) { - builder.field(QUERY.getPreferredName(), queryConfig.getQuery()); - } - if (sourceFiltering != null) { - builder.field(_SOURCE.getPreferredName(), sourceFiltering); - } - if (runtimeMappings != null) { - builder.field(SearchSourceBuilder.RUNTIME_MAPPINGS_FIELD.getPreferredName(), runtimeMappings); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - DataFrameAnalyticsSource other = (DataFrameAnalyticsSource) o; - return Arrays.equals(index, other.index) - && Objects.equals(queryConfig, other.queryConfig) - && Objects.equals(sourceFiltering, other.sourceFiltering) - && Objects.equals(runtimeMappings, other.runtimeMappings); - } - - @Override - public int hashCode() { - return Objects.hash(Arrays.asList(index), queryConfig, sourceFiltering, runtimeMappings); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static class Builder { - - private String[] index; - private QueryConfig queryConfig; - private FetchSourceContext sourceFiltering; - private Map runtimeMappings; - - private Builder() {} - - public Builder setIndex(String... index) { - this.index = index; - return this; - } - - public Builder setIndex(List index) { - this.index = index.toArray(new String[0]); - return this; - } - - public Builder setQueryConfig(QueryConfig queryConfig) { - this.queryConfig = queryConfig; - return this; - } - - public Builder setSourceFiltering(FetchSourceContext sourceFiltering) { - this.sourceFiltering = sourceFiltering; - return this; - } - - public Builder setRuntimeMappings(Map runtimeMappings) { - this.runtimeMappings = runtimeMappings; - return this; - } - - public DataFrameAnalyticsSource build() { - return new DataFrameAnalyticsSource(index, queryConfig, sourceFiltering, runtimeMappings); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsState.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsState.java deleted file mode 100644 index 157ebe614f761..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsState.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import java.util.Locale; - -public enum DataFrameAnalyticsState { - STARTED, - REINDEXING, - ANALYZING, - STOPPING, - STOPPED, - STARTING, - FAILED; - - public static DataFrameAnalyticsState fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - public String value() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java deleted file mode 100644 index 75eb216aed402..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/DataFrameAnalyticsStats.java +++ /dev/null @@ -1,188 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.client.ml.NodeAttributes; -import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats; -import org.elasticsearch.client.ml.dataframe.stats.common.DataCounts; -import org.elasticsearch.client.ml.dataframe.stats.common.MemoryUsage; -import org.elasticsearch.common.inject.internal.ToStringBuilder; -import org.elasticsearch.common.xcontent.XContentParserUtils; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class DataFrameAnalyticsStats { - - public static DataFrameAnalyticsStats fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - static final ParseField ID = new ParseField("id"); - static final ParseField STATE = new ParseField("state"); - static final ParseField FAILURE_REASON = new ParseField("failure_reason"); - static final ParseField PROGRESS = new ParseField("progress"); - static final ParseField DATA_COUNTS = new ParseField("data_counts"); - static final ParseField MEMORY_USAGE = new ParseField("memory_usage"); - static final ParseField ANALYSIS_STATS = new ParseField("analysis_stats"); - static final ParseField NODE = new ParseField("node"); - static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "data_frame_analytics_stats", - true, - args -> new DataFrameAnalyticsStats( - (String) args[0], - (DataFrameAnalyticsState) args[1], - (String) args[2], - (List) args[3], - (DataCounts) args[4], - (MemoryUsage) args[5], - (AnalysisStats) args[6], - (NodeAttributes) args[7], - (String) args[8] - ) - ); - - static { - PARSER.declareString(constructorArg(), ID); - PARSER.declareString(constructorArg(), DataFrameAnalyticsState::fromString, STATE); - PARSER.declareString(optionalConstructorArg(), FAILURE_REASON); - PARSER.declareObjectArray(optionalConstructorArg(), PhaseProgress.PARSER, PROGRESS); - PARSER.declareObject(optionalConstructorArg(), DataCounts.PARSER, DATA_COUNTS); - PARSER.declareObject(optionalConstructorArg(), MemoryUsage.PARSER, MEMORY_USAGE); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseAnalysisStats(p), ANALYSIS_STATS); - PARSER.declareObject(optionalConstructorArg(), NodeAttributes.PARSER, NODE); - PARSER.declareString(optionalConstructorArg(), ASSIGNMENT_EXPLANATION); - } - - private static AnalysisStats parseAnalysisStats(XContentParser parser) throws IOException { - XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); - AnalysisStats analysisStats = parser.namedObject(AnalysisStats.class, parser.currentName(), true); - XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser); - return analysisStats; - } - - private final String id; - private final DataFrameAnalyticsState state; - private final String failureReason; - private final List progress; - private final DataCounts dataCounts; - private final MemoryUsage memoryUsage; - private final AnalysisStats analysisStats; - private final NodeAttributes node; - private final String assignmentExplanation; - - public DataFrameAnalyticsStats( - String id, - DataFrameAnalyticsState state, - @Nullable String failureReason, - @Nullable List progress, - @Nullable DataCounts dataCounts, - @Nullable MemoryUsage memoryUsage, - @Nullable AnalysisStats analysisStats, - @Nullable NodeAttributes node, - @Nullable String assignmentExplanation - ) { - this.id = id; - this.state = state; - this.failureReason = failureReason; - this.progress = progress; - this.dataCounts = dataCounts; - this.memoryUsage = memoryUsage; - this.analysisStats = analysisStats; - this.node = node; - this.assignmentExplanation = assignmentExplanation; - } - - public String getId() { - return id; - } - - public DataFrameAnalyticsState getState() { - return state; - } - - public String getFailureReason() { - return failureReason; - } - - public List getProgress() { - return progress; - } - - @Nullable - public DataCounts getDataCounts() { - return dataCounts; - } - - @Nullable - public MemoryUsage getMemoryUsage() { - return memoryUsage; - } - - @Nullable - public AnalysisStats getAnalysisStats() { - return analysisStats; - } - - public NodeAttributes getNode() { - return node; - } - - public String getAssignmentExplanation() { - return assignmentExplanation; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - DataFrameAnalyticsStats other = (DataFrameAnalyticsStats) o; - return Objects.equals(id, other.id) - && Objects.equals(state, other.state) - && Objects.equals(failureReason, other.failureReason) - && Objects.equals(progress, other.progress) - && Objects.equals(dataCounts, other.dataCounts) - && Objects.equals(memoryUsage, other.memoryUsage) - && Objects.equals(analysisStats, other.analysisStats) - && Objects.equals(node, other.node) - && Objects.equals(assignmentExplanation, other.assignmentExplanation); - } - - @Override - public int hashCode() { - return Objects.hash(id, state, failureReason, progress, dataCounts, memoryUsage, analysisStats, node, assignmentExplanation); - } - - @Override - public String toString() { - return new ToStringBuilder(getClass()).add("id", id) - .add("state", state) - .add("failureReason", failureReason) - .add("progress", progress) - .add("dataCounts", dataCounts) - .add("memoryUsage", memoryUsage) - .add("analysisStats", analysisStats) - .add("node", node) - .add("assignmentExplanation", assignmentExplanation) - .toString(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/MlDataFrameAnalysisNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/MlDataFrameAnalysisNamedXContentProvider.java deleted file mode 100644 index 562409b53df8d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/MlDataFrameAnalysisNamedXContentProvider.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.xcontent.NamedXContentRegistry; - -import java.util.Arrays; -import java.util.List; - -public class MlDataFrameAnalysisNamedXContentProvider implements NamedXContentProvider { - - @Override - public List getNamedXContentParsers() { - return Arrays.asList( - new NamedXContentRegistry.Entry(DataFrameAnalysis.class, OutlierDetection.NAME, (p, c) -> OutlierDetection.fromXContent(p)), - new NamedXContentRegistry.Entry(DataFrameAnalysis.class, Regression.NAME, (p, c) -> Regression.fromXContent(p)), - new NamedXContentRegistry.Entry(DataFrameAnalysis.class, Classification.NAME, (p, c) -> Classification.fromXContent(p)) - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java deleted file mode 100644 index d514de3558db6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/OutlierDetection.java +++ /dev/null @@ -1,253 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -public class OutlierDetection implements DataFrameAnalysis { - - public static OutlierDetection fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - public static OutlierDetection createDefault() { - return builder().build(); - } - - public static Builder builder() { - return new Builder(); - } - - public static final ParseField NAME = new ParseField("outlier_detection"); - static final ParseField N_NEIGHBORS = new ParseField("n_neighbors"); - static final ParseField METHOD = new ParseField("method"); - public static final ParseField FEATURE_INFLUENCE_THRESHOLD = new ParseField("feature_influence_threshold"); - static final ParseField COMPUTE_FEATURE_INFLUENCE = new ParseField("compute_feature_influence"); - static final ParseField OUTLIER_FRACTION = new ParseField("outlier_fraction"); - static final ParseField STANDARDIZATION_ENABLED = new ParseField("standardization_enabled"); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME.getPreferredName(), true, Builder::new); - - static { - PARSER.declareInt(Builder::setNNeighbors, N_NEIGHBORS); - PARSER.declareString(Builder::setMethod, Method::fromString, METHOD); - PARSER.declareDouble(Builder::setFeatureInfluenceThreshold, FEATURE_INFLUENCE_THRESHOLD); - PARSER.declareBoolean(Builder::setComputeFeatureInfluence, COMPUTE_FEATURE_INFLUENCE); - PARSER.declareDouble(Builder::setOutlierFraction, OUTLIER_FRACTION); - PARSER.declareBoolean(Builder::setStandardizationEnabled, STANDARDIZATION_ENABLED); - } - - /** - * The number of neighbors. Leave unspecified for dynamic detection. - */ - private final Integer nNeighbors; - - /** - * The method. Leave unspecified for a dynamic mixture of methods. - */ - private final Method method; - - /** - * The min outlier score required to calculate feature influence. Defaults to 0.1. - */ - private final Double featureInfluenceThreshold; - - /** - * Whether to compute feature influence or not. Defaults to true. - */ - private final Boolean computeFeatureInfluence; - - /** - * The proportion of data assumed to be outlying prior to outlier detection. Defaults to 0.05. - */ - private final Double outlierFraction; - - /** - * Whether to perform standardization. - */ - private final Boolean standardizationEnabled; - - private OutlierDetection( - Integer nNeighbors, - Method method, - Double featureInfluenceThreshold, - Boolean computeFeatureInfluence, - Double outlierFraction, - Boolean standardizationEnabled - ) { - this.nNeighbors = nNeighbors; - this.method = method; - this.featureInfluenceThreshold = featureInfluenceThreshold; - this.computeFeatureInfluence = computeFeatureInfluence; - this.outlierFraction = outlierFraction; - this.standardizationEnabled = standardizationEnabled; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - - public Integer getNNeighbors() { - return nNeighbors; - } - - public Method getMethod() { - return method; - } - - public Double getFeatureInfluenceThreshold() { - return featureInfluenceThreshold; - } - - public Boolean getComputeFeatureInfluence() { - return computeFeatureInfluence; - } - - public Double getOutlierFraction() { - return outlierFraction; - } - - public Boolean getStandardizationEnabled() { - return standardizationEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (nNeighbors != null) { - builder.field(N_NEIGHBORS.getPreferredName(), nNeighbors); - } - if (method != null) { - builder.field(METHOD.getPreferredName(), method); - } - if (featureInfluenceThreshold != null) { - builder.field(FEATURE_INFLUENCE_THRESHOLD.getPreferredName(), featureInfluenceThreshold); - } - if (computeFeatureInfluence != null) { - builder.field(COMPUTE_FEATURE_INFLUENCE.getPreferredName(), computeFeatureInfluence); - } - if (outlierFraction != null) { - builder.field(OUTLIER_FRACTION.getPreferredName(), outlierFraction); - } - if (standardizationEnabled != null) { - builder.field(STANDARDIZATION_ENABLED.getPreferredName(), standardizationEnabled); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - OutlierDetection other = (OutlierDetection) o; - return Objects.equals(nNeighbors, other.nNeighbors) - && Objects.equals(method, other.method) - && Objects.equals(featureInfluenceThreshold, other.featureInfluenceThreshold) - && Objects.equals(computeFeatureInfluence, other.computeFeatureInfluence) - && Objects.equals(outlierFraction, other.outlierFraction) - && Objects.equals(standardizationEnabled, other.standardizationEnabled); - } - - @Override - public int hashCode() { - return Objects.hash( - nNeighbors, - method, - featureInfluenceThreshold, - computeFeatureInfluence, - outlierFraction, - standardizationEnabled - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public enum Method { - LOF, - LDOF, - DISTANCE_KTH_NN, - DISTANCE_KNN; - - public static Method fromString(String value) { - return Method.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - public static class Builder { - - private Integer nNeighbors; - private Method method; - private Double featureInfluenceThreshold; - private Boolean computeFeatureInfluence; - private Double outlierFraction; - private Boolean standardizationEnabled; - - private Builder() {} - - public Builder setNNeighbors(Integer nNeighborsValue) { - this.nNeighbors = nNeighborsValue; - return this; - } - - public Builder setMethod(Method method) { - this.method = method; - return this; - } - - public Builder setFeatureInfluenceThreshold(Double featureInfluenceThreshold) { - this.featureInfluenceThreshold = featureInfluenceThreshold; - return this; - } - - public Builder setComputeFeatureInfluence(Boolean computeFeatureInfluence) { - this.computeFeatureInfluence = computeFeatureInfluence; - return this; - } - - public Builder setOutlierFraction(Double outlierFraction) { - this.outlierFraction = outlierFraction; - return this; - } - - public Builder setStandardizationEnabled(Boolean standardizationEnabled) { - this.standardizationEnabled = standardizationEnabled; - return this; - } - - public OutlierDetection build() { - return new OutlierDetection( - nNeighbors, - method, - featureInfluenceThreshold, - computeFeatureInfluence, - outlierFraction, - standardizationEnabled - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/PhaseProgress.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/PhaseProgress.java deleted file mode 100644 index f8d629586d2e7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/PhaseProgress.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.inject.internal.ToStringBuilder; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * A class that describes a phase and its progress as a percentage - */ -public class PhaseProgress implements ToXContentObject { - - static final ParseField PHASE = new ParseField("phase"); - static final ParseField PROGRESS_PERCENT = new ParseField("progress_percent"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "phase_progress", - true, - a -> new PhaseProgress((String) a[0], (int) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), PHASE); - PARSER.declareInt(ConstructingObjectParser.constructorArg(), PROGRESS_PERCENT); - } - - private final String phase; - private final int progressPercent; - - public PhaseProgress(String phase, int progressPercent) { - this.phase = Objects.requireNonNull(phase); - this.progressPercent = progressPercent; - } - - public String getPhase() { - return phase; - } - - public int getProgressPercent() { - return progressPercent; - } - - @Override - public int hashCode() { - return Objects.hash(phase, progressPercent); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PhaseProgress that = (PhaseProgress) o; - return Objects.equals(phase, that.phase) && progressPercent == that.progressPercent; - } - - @Override - public String toString() { - return new ToStringBuilder(getClass()).add(PHASE.getPreferredName(), phase) - .add(PROGRESS_PERCENT.getPreferredName(), progressPercent) - .toString(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(PhaseProgress.PHASE.getPreferredName(), phase); - builder.field(PhaseProgress.PROGRESS_PERCENT.getPreferredName(), progressPercent); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/QueryConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/QueryConfig.java deleted file mode 100644 index 73dd0d82b2221..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/QueryConfig.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.index.query.AbstractQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static java.util.Objects.requireNonNull; - -/** - * Object for encapsulating the desired Query for a DataFrameAnalysis - */ -public class QueryConfig implements ToXContentObject { - - public static QueryConfig fromXContent(XContentParser parser) throws IOException { - QueryBuilder query = AbstractQueryBuilder.parseInnerQueryBuilder(parser); - return new QueryConfig(query); - } - - private final QueryBuilder query; - - public QueryConfig(QueryBuilder query) { - this.query = requireNonNull(query); - } - - public QueryConfig(QueryConfig queryConfig) { - this(requireNonNull(queryConfig).query); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - query.toXContent(builder, params); - return builder; - } - - public QueryBuilder getQuery() { - return query; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - QueryConfig other = (QueryConfig) o; - return Objects.equals(query, other.query); - } - - @Override - public int hashCode() { - return Objects.hash(query); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java deleted file mode 100644 index 04f61d09305c2..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java +++ /dev/null @@ -1,549 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe; - -import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper; -import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class Regression implements DataFrameAnalysis { - - public static Regression fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static Builder builder(String dependentVariable) { - return new Builder(dependentVariable); - } - - public static final ParseField NAME = new ParseField("regression"); - - static final ParseField DEPENDENT_VARIABLE = new ParseField("dependent_variable"); - static final ParseField LAMBDA = new ParseField("lambda"); - static final ParseField GAMMA = new ParseField("gamma"); - static final ParseField ETA = new ParseField("eta"); - static final ParseField MAX_TREES = new ParseField("max_trees"); - static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); - static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); - static final ParseField PREDICTION_FIELD_NAME = new ParseField("prediction_field_name"); - static final ParseField TRAINING_PERCENT = new ParseField("training_percent"); - static final ParseField RANDOMIZE_SEED = new ParseField("randomize_seed"); - static final ParseField LOSS_FUNCTION = new ParseField("loss_function"); - static final ParseField LOSS_FUNCTION_PARAMETER = new ParseField("loss_function_parameter"); - static final ParseField FEATURE_PROCESSORS = new ParseField("feature_processors"); - static final ParseField ALPHA = new ParseField("alpha"); - static final ParseField ETA_GROWTH_RATE_PER_TREE = new ParseField("eta_growth_rate_per_tree"); - static final ParseField SOFT_TREE_DEPTH_LIMIT = new ParseField("soft_tree_depth_limit"); - static final ParseField SOFT_TREE_DEPTH_TOLERANCE = new ParseField("soft_tree_depth_tolerance"); - static final ParseField DOWNSAMPLE_FACTOR = new ParseField("downsample_factor"); - static final ParseField MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER = new ParseField("max_optimization_rounds_per_hyperparameter"); - static final ParseField EARLY_STOPPING_ENABLED = new ParseField("early_stopping_enabled"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new Regression( - (String) a[0], - (Double) a[1], - (Double) a[2], - (Double) a[3], - (Integer) a[4], - (Double) a[5], - (Integer) a[6], - (String) a[7], - (Double) a[8], - (Long) a[9], - (LossFunction) a[10], - (Double) a[11], - (List) a[12], - (Double) a[13], - (Double) a[14], - (Double) a[15], - (Double) a[16], - (Double) a[17], - (Integer) a[18], - (Boolean) a[19] - ) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), DEPENDENT_VARIABLE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), LAMBDA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), GAMMA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_TREES); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), FEATURE_BAG_FRACTION); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), NUM_TOP_FEATURE_IMPORTANCE_VALUES); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), PREDICTION_FIELD_NAME); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), TRAINING_PERCENT); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), RANDOMIZE_SEED); - PARSER.declareString(optionalConstructorArg(), LossFunction::fromString, LOSS_FUNCTION); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), LOSS_FUNCTION_PARAMETER); - PARSER.declareNamedObjects( - ConstructingObjectParser.optionalConstructorArg(), - (p, c, n) -> p.namedObject(PreProcessor.class, n, c), - (regression) -> {}, - FEATURE_PROCESSORS - ); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ALPHA); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA_GROWTH_RATE_PER_TREE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), SOFT_TREE_DEPTH_LIMIT); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), SOFT_TREE_DEPTH_TOLERANCE); - PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), DOWNSAMPLE_FACTOR); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), EARLY_STOPPING_ENABLED); - } - - private final String dependentVariable; - private final Double lambda; - private final Double gamma; - private final Double eta; - private final Integer maxTrees; - private final Double featureBagFraction; - private final Integer numTopFeatureImportanceValues; - private final String predictionFieldName; - private final Double trainingPercent; - private final Long randomizeSeed; - private final LossFunction lossFunction; - private final Double lossFunctionParameter; - private final List featureProcessors; - private final Double alpha; - private final Double etaGrowthRatePerTree; - private final Double softTreeDepthLimit; - private final Double softTreeDepthTolerance; - private final Double downsampleFactor; - private final Integer maxOptimizationRoundsPerHyperparameter; - private final Boolean earlyStoppingEnabled; - - private Regression( - String dependentVariable, - @Nullable Double lambda, - @Nullable Double gamma, - @Nullable Double eta, - @Nullable Integer maxTrees, - @Nullable Double featureBagFraction, - @Nullable Integer numTopFeatureImportanceValues, - @Nullable String predictionFieldName, - @Nullable Double trainingPercent, - @Nullable Long randomizeSeed, - @Nullable LossFunction lossFunction, - @Nullable Double lossFunctionParameter, - @Nullable List featureProcessors, - @Nullable Double alpha, - @Nullable Double etaGrowthRatePerTree, - @Nullable Double softTreeDepthLimit, - @Nullable Double softTreeDepthTolerance, - @Nullable Double downsampleFactor, - @Nullable Integer maxOptimizationRoundsPerHyperparameter, - @Nullable Boolean earlyStoppingEnabled - ) { - this.dependentVariable = Objects.requireNonNull(dependentVariable); - this.lambda = lambda; - this.gamma = gamma; - this.eta = eta; - this.maxTrees = maxTrees; - this.featureBagFraction = featureBagFraction; - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - this.predictionFieldName = predictionFieldName; - this.trainingPercent = trainingPercent; - this.randomizeSeed = randomizeSeed; - this.lossFunction = lossFunction; - this.lossFunctionParameter = lossFunctionParameter; - this.featureProcessors = featureProcessors; - this.alpha = alpha; - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - this.softTreeDepthLimit = softTreeDepthLimit; - this.softTreeDepthTolerance = softTreeDepthTolerance; - this.downsampleFactor = downsampleFactor; - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - this.earlyStoppingEnabled = earlyStoppingEnabled; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - - public String getDependentVariable() { - return dependentVariable; - } - - public Double getLambda() { - return lambda; - } - - public Double getGamma() { - return gamma; - } - - public Double getEta() { - return eta; - } - - public Integer getMaxTrees() { - return maxTrees; - } - - public Double getFeatureBagFraction() { - return featureBagFraction; - } - - public Integer getNumTopFeatureImportanceValues() { - return numTopFeatureImportanceValues; - } - - public String getPredictionFieldName() { - return predictionFieldName; - } - - public Double getTrainingPercent() { - return trainingPercent; - } - - public Long getRandomizeSeed() { - return randomizeSeed; - } - - public LossFunction getLossFunction() { - return lossFunction; - } - - public Double getLossFunctionParameter() { - return lossFunctionParameter; - } - - public List getFeatureProcessors() { - return featureProcessors; - } - - public Double getAlpha() { - return alpha; - } - - public Double getEtaGrowthRatePerTree() { - return etaGrowthRatePerTree; - } - - public Double getSoftTreeDepthLimit() { - return softTreeDepthLimit; - } - - public Double getSoftTreeDepthTolerance() { - return softTreeDepthTolerance; - } - - public Double getDownsampleFactor() { - return downsampleFactor; - } - - public Integer getMaxOptimizationRoundsPerHyperparameter() { - return maxOptimizationRoundsPerHyperparameter; - } - - public Boolean getEarlyStoppingEnabled() { - return earlyStoppingEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DEPENDENT_VARIABLE.getPreferredName(), dependentVariable); - if (lambda != null) { - builder.field(LAMBDA.getPreferredName(), lambda); - } - if (gamma != null) { - builder.field(GAMMA.getPreferredName(), gamma); - } - if (eta != null) { - builder.field(ETA.getPreferredName(), eta); - } - if (maxTrees != null) { - builder.field(MAX_TREES.getPreferredName(), maxTrees); - } - if (featureBagFraction != null) { - builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); - } - if (numTopFeatureImportanceValues != null) { - builder.field(NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName(), numTopFeatureImportanceValues); - } - if (predictionFieldName != null) { - builder.field(PREDICTION_FIELD_NAME.getPreferredName(), predictionFieldName); - } - if (trainingPercent != null) { - builder.field(TRAINING_PERCENT.getPreferredName(), trainingPercent); - } - if (randomizeSeed != null) { - builder.field(RANDOMIZE_SEED.getPreferredName(), randomizeSeed); - } - if (lossFunction != null) { - builder.field(LOSS_FUNCTION.getPreferredName(), lossFunction); - } - if (lossFunctionParameter != null) { - builder.field(LOSS_FUNCTION_PARAMETER.getPreferredName(), lossFunctionParameter); - } - if (featureProcessors != null) { - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, FEATURE_PROCESSORS.getPreferredName(), featureProcessors); - } - if (alpha != null) { - builder.field(ALPHA.getPreferredName(), alpha); - } - if (etaGrowthRatePerTree != null) { - builder.field(ETA_GROWTH_RATE_PER_TREE.getPreferredName(), etaGrowthRatePerTree); - } - if (softTreeDepthLimit != null) { - builder.field(SOFT_TREE_DEPTH_LIMIT.getPreferredName(), softTreeDepthLimit); - } - if (softTreeDepthTolerance != null) { - builder.field(SOFT_TREE_DEPTH_TOLERANCE.getPreferredName(), softTreeDepthTolerance); - } - if (downsampleFactor != null) { - builder.field(DOWNSAMPLE_FACTOR.getPreferredName(), downsampleFactor); - } - if (maxOptimizationRoundsPerHyperparameter != null) { - builder.field(MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER.getPreferredName(), maxOptimizationRoundsPerHyperparameter); - } - if (earlyStoppingEnabled != null) { - builder.field(EARLY_STOPPING_ENABLED.getPreferredName(), earlyStoppingEnabled); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash( - dependentVariable, - lambda, - gamma, - eta, - maxTrees, - featureBagFraction, - numTopFeatureImportanceValues, - predictionFieldName, - trainingPercent, - randomizeSeed, - lossFunction, - lossFunctionParameter, - featureProcessors, - alpha, - etaGrowthRatePerTree, - softTreeDepthLimit, - softTreeDepthTolerance, - downsampleFactor, - maxOptimizationRoundsPerHyperparameter, - earlyStoppingEnabled - ); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Regression that = (Regression) o; - return Objects.equals(dependentVariable, that.dependentVariable) - && Objects.equals(lambda, that.lambda) - && Objects.equals(gamma, that.gamma) - && Objects.equals(eta, that.eta) - && Objects.equals(maxTrees, that.maxTrees) - && Objects.equals(featureBagFraction, that.featureBagFraction) - && Objects.equals(numTopFeatureImportanceValues, that.numTopFeatureImportanceValues) - && Objects.equals(predictionFieldName, that.predictionFieldName) - && Objects.equals(trainingPercent, that.trainingPercent) - && Objects.equals(randomizeSeed, that.randomizeSeed) - && Objects.equals(lossFunction, that.lossFunction) - && Objects.equals(lossFunctionParameter, that.lossFunctionParameter) - && Objects.equals(featureProcessors, that.featureProcessors) - && Objects.equals(alpha, that.alpha) - && Objects.equals(etaGrowthRatePerTree, that.etaGrowthRatePerTree) - && Objects.equals(softTreeDepthLimit, that.softTreeDepthLimit) - && Objects.equals(softTreeDepthTolerance, that.softTreeDepthTolerance) - && Objects.equals(downsampleFactor, that.downsampleFactor) - && Objects.equals(maxOptimizationRoundsPerHyperparameter, that.maxOptimizationRoundsPerHyperparameter) - && Objects.equals(earlyStoppingEnabled, that.earlyStoppingEnabled); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static class Builder { - private String dependentVariable; - private Double lambda; - private Double gamma; - private Double eta; - private Integer maxTrees; - private Double featureBagFraction; - private Integer numTopFeatureImportanceValues; - private String predictionFieldName; - private Double trainingPercent; - private Long randomizeSeed; - private LossFunction lossFunction; - private Double lossFunctionParameter; - private List featureProcessors; - private Double alpha; - private Double etaGrowthRatePerTree; - private Double softTreeDepthLimit; - private Double softTreeDepthTolerance; - private Double downsampleFactor; - private Integer maxOptimizationRoundsPerHyperparameter; - private Boolean earlyStoppingEnabled; - - private Builder(String dependentVariable) { - this.dependentVariable = Objects.requireNonNull(dependentVariable); - } - - public Builder setLambda(Double lambda) { - this.lambda = lambda; - return this; - } - - public Builder setGamma(Double gamma) { - this.gamma = gamma; - return this; - } - - public Builder setEta(Double eta) { - this.eta = eta; - return this; - } - - public Builder setMaxTrees(Integer maxTrees) { - this.maxTrees = maxTrees; - return this; - } - - public Builder setFeatureBagFraction(Double featureBagFraction) { - this.featureBagFraction = featureBagFraction; - return this; - } - - public Builder setNumTopFeatureImportanceValues(Integer numTopFeatureImportanceValues) { - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - return this; - } - - public Builder setPredictionFieldName(String predictionFieldName) { - this.predictionFieldName = predictionFieldName; - return this; - } - - public Builder setTrainingPercent(Double trainingPercent) { - this.trainingPercent = trainingPercent; - return this; - } - - public Builder setRandomizeSeed(Long randomizeSeed) { - this.randomizeSeed = randomizeSeed; - return this; - } - - public Builder setLossFunction(LossFunction lossFunction) { - this.lossFunction = lossFunction; - return this; - } - - public Builder setLossFunctionParameter(Double lossFunctionParameter) { - this.lossFunctionParameter = lossFunctionParameter; - return this; - } - - public Builder setFeatureProcessors(List featureProcessors) { - this.featureProcessors = featureProcessors; - return this; - } - - public Builder setAlpha(Double alpha) { - this.alpha = alpha; - return this; - } - - public Builder setEtaGrowthRatePerTree(Double etaGrowthRatePerTree) { - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - return this; - } - - public Builder setSoftTreeDepthLimit(Double softTreeDepthLimit) { - this.softTreeDepthLimit = softTreeDepthLimit; - return this; - } - - public Builder setSoftTreeDepthTolerance(Double softTreeDepthTolerance) { - this.softTreeDepthTolerance = softTreeDepthTolerance; - return this; - } - - public Builder setDownsampleFactor(Double downsampleFactor) { - this.downsampleFactor = downsampleFactor; - return this; - } - - public Builder setMaxOptimizationRoundsPerHyperparameter(Integer maxOptimizationRoundsPerHyperparameter) { - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - return this; - } - - public Builder setEarlyStoppingEnabled(Boolean earlyStoppingEnabled) { - this.earlyStoppingEnabled = earlyStoppingEnabled; - return this; - } - - public Regression build() { - return new Regression( - dependentVariable, - lambda, - gamma, - eta, - maxTrees, - featureBagFraction, - numTopFeatureImportanceValues, - predictionFieldName, - trainingPercent, - randomizeSeed, - lossFunction, - lossFunctionParameter, - featureProcessors, - alpha, - etaGrowthRatePerTree, - softTreeDepthLimit, - softTreeDepthTolerance, - downsampleFactor, - maxOptimizationRoundsPerHyperparameter, - earlyStoppingEnabled - ); - } - } - - public enum LossFunction { - MSE, - MSLE, - HUBER; - - private static LossFunction fromString(String value) { - return LossFunction.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/Evaluation.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/Evaluation.java deleted file mode 100644 index e5f3189a5920f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/Evaluation.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation; - -import org.elasticsearch.xcontent.ToXContentObject; - -/** - * Defines an evaluation - */ -public interface Evaluation extends ToXContentObject { - - /** - * Returns the evaluation name - */ - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/EvaluationMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/EvaluationMetric.java deleted file mode 100644 index daa1051a92b9b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/EvaluationMetric.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation; - -import org.elasticsearch.xcontent.ToXContentObject; - -/** - * Defines an evaluation metric - */ -public interface EvaluationMetric extends ToXContentObject { - - /** - * Returns the name of the metric - */ - String getName(); - - /** - * The result of an evaluation metric - */ - interface Result extends ToXContentObject { - - /** - * Returns the name of the metric - */ - String getMetricName(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/MlEvaluationNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/MlEvaluationNamedXContentProvider.java deleted file mode 100644 index da1d66785f386..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/MlEvaluationNamedXContentProvider.java +++ /dev/null @@ -1,216 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation; - -import org.elasticsearch.client.ml.dataframe.evaluation.classification.AccuracyMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.AucRocMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.Classification; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.MulticlassConfusionMatrixMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.PrecisionMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.classification.RecallMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.common.AucRocResult; -import org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.ConfusionMatrixMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.OutlierDetection; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.HuberMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.MeanSquaredErrorMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.MeanSquaredLogarithmicErrorMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.RSquaredMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.regression.Regression; -import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; - -import java.util.Arrays; -import java.util.List; - -public class MlEvaluationNamedXContentProvider implements NamedXContentProvider { - - /** - * Constructs the name under which a metric (or metric result) is registered. - * The name is prefixed with evaluation name so that registered names are unique. - * - * @param evaluationName name of the evaluation - * @param metricName name of the metric - * @return name appropriate for registering a metric (or metric result) in {@link NamedXContentRegistry} - */ - public static String registeredMetricName(String evaluationName, String metricName) { - return evaluationName + "." + metricName; - } - - @Override - public List getNamedXContentParsers() { - return Arrays.asList( - // Evaluations - new NamedXContentRegistry.Entry(Evaluation.class, new ParseField(OutlierDetection.NAME), OutlierDetection::fromXContent), - new NamedXContentRegistry.Entry(Evaluation.class, new ParseField(Classification.NAME), Classification::fromXContent), - new NamedXContentRegistry.Entry(Evaluation.class, new ParseField(Regression.NAME), Regression::fromXContent), - // Evaluation metrics - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.AucRocMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.AucRocMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.PrecisionMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.PrecisionMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.RecallMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.RecallMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(OutlierDetection.NAME, ConfusionMatrixMetric.NAME)), - ConfusionMatrixMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, AucRocMetric.NAME)), - AucRocMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, AccuracyMetric.NAME)), - AccuracyMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, PrecisionMetric.NAME)), - PrecisionMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, RecallMetric.NAME)), - RecallMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Classification.NAME, MulticlassConfusionMatrixMetric.NAME)), - MulticlassConfusionMatrixMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Regression.NAME, MeanSquaredErrorMetric.NAME)), - MeanSquaredErrorMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Regression.NAME, MeanSquaredLogarithmicErrorMetric.NAME)), - MeanSquaredLogarithmicErrorMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Regression.NAME, HuberMetric.NAME)), - HuberMetric::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.class, - new ParseField(registeredMetricName(Regression.NAME, RSquaredMetric.NAME)), - RSquaredMetric::fromXContent - ), - // Evaluation metrics results - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.AucRocMetric.NAME - ) - ), - AucRocResult::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.PrecisionMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.PrecisionMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField( - registeredMetricName( - OutlierDetection.NAME, - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.RecallMetric.NAME - ) - ), - org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection.RecallMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(OutlierDetection.NAME, ConfusionMatrixMetric.NAME)), - ConfusionMatrixMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, AucRocMetric.NAME)), - AucRocResult::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, AccuracyMetric.NAME)), - AccuracyMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, PrecisionMetric.NAME)), - PrecisionMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, RecallMetric.NAME)), - RecallMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Classification.NAME, MulticlassConfusionMatrixMetric.NAME)), - MulticlassConfusionMatrixMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Regression.NAME, MeanSquaredErrorMetric.NAME)), - MeanSquaredErrorMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Regression.NAME, MeanSquaredLogarithmicErrorMetric.NAME)), - MeanSquaredLogarithmicErrorMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Regression.NAME, HuberMetric.NAME)), - HuberMetric.Result::fromXContent - ), - new NamedXContentRegistry.Entry( - EvaluationMetric.Result.class, - new ParseField(registeredMetricName(Regression.NAME, RSquaredMetric.NAME)), - RSquaredMetric.Result::fromXContent - ) - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AccuracyMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AccuracyMetric.java deleted file mode 100644 index f95b8a0b77344..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AccuracyMetric.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * {@link AccuracyMetric} is a metric that answers the following two questions: - * - * 1. What is the fraction of documents for which predicted class equals the actual class? - * - * equation: overall_accuracy = 1/n * Σ(y == y') - * where: n = total number of documents - * y = document's actual class - * y' = document's predicted class - * - * 2. For any given class X, what is the fraction of documents for which either - * a) both actual and predicted class are equal to X (true positives) - * or - * b) both actual and predicted class are not equal to X (true negatives) - * - * equation: accuracy(X) = 1/n * (TP(X) + TN(X)) - * where: X = class being examined - * n = total number of documents - * TP(X) = number of true positives wrt X - * TN(X) = number of true negatives wrt X - */ -public class AccuracyMetric implements EvaluationMetric { - - public static final String NAME = "accuracy"; - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, AccuracyMetric::new); - - public static AccuracyMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public AccuracyMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - private static final ParseField CLASSES = new ParseField("classes"); - private static final ParseField OVERALL_ACCURACY = new ParseField("overall_accuracy"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "accuracy_result", - true, - a -> new Result((List) a[0], (double) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), PerClassSingleValue.PARSER, CLASSES); - PARSER.declareDouble(constructorArg(), OVERALL_ACCURACY); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** List of per-class results. */ - private final List classes; - /** Fraction of documents for which predicted class equals the actual class. */ - private final double overallAccuracy; - - public Result(List classes, double overallAccuracy) { - this.classes = Collections.unmodifiableList(Objects.requireNonNull(classes)); - this.overallAccuracy = overallAccuracy; - } - - @Override - public String getMetricName() { - return NAME; - } - - public List getClasses() { - return classes; - } - - public double getOverallAccuracy() { - return overallAccuracy; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASSES.getPreferredName(), classes); - builder.field(OVERALL_ACCURACY.getPreferredName(), overallAccuracy); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(this.classes, that.classes) && this.overallAccuracy == that.overallAccuracy; - } - - @Override - public int hashCode() { - return Objects.hash(classes, overallAccuracy); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AucRocMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AucRocMetric.java deleted file mode 100644 index f8a85d7d665b7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/AucRocMetric.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.common.AucRocResult; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Area under the curve (AUC) of the receiver operating characteristic (ROC). - * The ROC curve is a plot of the TPR (true positive rate) against - * the FPR (false positive rate) over a varying threshold. - */ -public class AucRocMetric implements EvaluationMetric { - - public static final String NAME = AucRocResult.NAME; - - public static final ParseField CLASS_NAME = new ParseField("class_name"); - public static final ParseField INCLUDE_CURVE = new ParseField("include_curve"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new AucRocMetric((String) args[0], (Boolean) args[1]) - ); - - static { - PARSER.declareString(constructorArg(), CLASS_NAME); - PARSER.declareBoolean(optionalConstructorArg(), INCLUDE_CURVE); - } - - public static AucRocMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static AucRocMetric forClass(String className) { - return new AucRocMetric(className, false); - } - - public static AucRocMetric forClassWithCurve(String className) { - return new AucRocMetric(className, true); - } - - private final String className; - private final Boolean includeCurve; - - public AucRocMetric(String className, Boolean includeCurve) { - this.className = Objects.requireNonNull(className); - this.includeCurve = includeCurve; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(CLASS_NAME.getPreferredName(), className); - if (includeCurve != null) { - builder.field(INCLUDE_CURVE.getPreferredName(), includeCurve); - } - builder.endObject(); - return builder; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AucRocMetric that = (AucRocMetric) o; - return Objects.equals(className, that.className) && Objects.equals(includeCurve, that.includeCurve); - } - - @Override - public int hashCode() { - return Objects.hash(className, includeCurve); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/Classification.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/Classification.java deleted file mode 100644 index 7f394ff30a046..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/Classification.java +++ /dev/null @@ -1,148 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Evaluation of classification results. - */ -public class Classification implements Evaluation { - - public static final String NAME = "classification"; - - private static final ParseField ACTUAL_FIELD = new ParseField("actual_field"); - private static final ParseField PREDICTED_FIELD = new ParseField("predicted_field"); - private static final ParseField TOP_CLASSES_FIELD = new ParseField("top_classes_field"); - - private static final ParseField METRICS = new ParseField("metrics"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new Classification((String) a[0], (String) a[1], (String) a[2], (List) a[3]) - ); - - static { - PARSER.declareString(constructorArg(), ACTUAL_FIELD); - PARSER.declareString(optionalConstructorArg(), PREDICTED_FIELD); - PARSER.declareString(optionalConstructorArg(), TOP_CLASSES_FIELD); - PARSER.declareNamedObjects( - optionalConstructorArg(), - (p, c, n) -> p.namedObject(EvaluationMetric.class, registeredMetricName(NAME, n), c), - METRICS - ); - } - - public static Classification fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** - * The field containing the actual value - */ - private final String actualField; - - /** - * The field containing the predicted value - */ - private final String predictedField; - - /** - * The field containing the array of top classes - */ - private final String topClassesField; - - /** - * The list of metrics to calculate - */ - private final List metrics; - - public Classification(String actualField, String predictedField, String topClassesField) { - this(actualField, predictedField, topClassesField, (List) null); - } - - public Classification(String actualField, String predictedField, String topClassesField, EvaluationMetric... metrics) { - this(actualField, predictedField, topClassesField, Arrays.asList(metrics)); - } - - public Classification( - String actualField, - @Nullable String predictedField, - @Nullable String topClassesField, - @Nullable List metrics - ) { - this.actualField = Objects.requireNonNull(actualField); - this.predictedField = predictedField; - this.topClassesField = topClassesField; - if (metrics != null) { - metrics.sort(Comparator.comparing(EvaluationMetric::getName)); - } - this.metrics = metrics; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ACTUAL_FIELD.getPreferredName(), actualField); - if (predictedField != null) { - builder.field(PREDICTED_FIELD.getPreferredName(), predictedField); - } - if (topClassesField != null) { - builder.field(TOP_CLASSES_FIELD.getPreferredName(), topClassesField); - } - if (metrics != null) { - builder.startObject(METRICS.getPreferredName()); - for (EvaluationMetric metric : metrics) { - builder.field(metric.getName(), metric); - } - builder.endObject(); - } - - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Classification that = (Classification) o; - return Objects.equals(that.actualField, this.actualField) - && Objects.equals(that.predictedField, this.predictedField) - && Objects.equals(that.topClassesField, this.topClassesField) - && Objects.equals(that.metrics, this.metrics); - } - - @Override - public int hashCode() { - return Objects.hash(actualField, predictedField, topClassesField, metrics); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixMetric.java deleted file mode 100644 index ae55246c11dc5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/MulticlassConfusionMatrixMetric.java +++ /dev/null @@ -1,289 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Calculates the multiclass confusion matrix. - */ -public class MulticlassConfusionMatrixMetric implements EvaluationMetric { - - public static final String NAME = "multiclass_confusion_matrix"; - - public static final ParseField SIZE = new ParseField("size"); - - private static final ConstructingObjectParser PARSER = createParser(); - - private static ConstructingObjectParser createParser() { - ConstructingObjectParser parser = new ConstructingObjectParser<>( - NAME, - true, - args -> new MulticlassConfusionMatrixMetric((Integer) args[0]) - ); - parser.declareInt(optionalConstructorArg(), SIZE); - return parser; - } - - public static MulticlassConfusionMatrixMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Integer size; - - public MulticlassConfusionMatrixMetric() { - this(null); - } - - public MulticlassConfusionMatrixMetric(@Nullable Integer size) { - this.size = size; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (size != null) { - builder.field(SIZE.getPreferredName(), size); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - MulticlassConfusionMatrixMetric that = (MulticlassConfusionMatrixMetric) o; - return Objects.equals(this.size, that.size); - } - - @Override - public int hashCode() { - return Objects.hash(size); - } - - public static class Result implements EvaluationMetric.Result { - - private static final ParseField CONFUSION_MATRIX = new ParseField("confusion_matrix"); - private static final ParseField OTHER_ACTUAL_CLASS_COUNT = new ParseField("other_actual_class_count"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "multiclass_confusion_matrix_result", - true, - a -> new Result((List) a[0], (Long) a[1]) - ); - - static { - PARSER.declareObjectArray(optionalConstructorArg(), ActualClass.PARSER, CONFUSION_MATRIX); - PARSER.declareLong(optionalConstructorArg(), OTHER_ACTUAL_CLASS_COUNT); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List confusionMatrix; - private final Long otherActualClassCount; - - public Result(@Nullable List confusionMatrix, @Nullable Long otherActualClassCount) { - this.confusionMatrix = confusionMatrix != null ? Collections.unmodifiableList(Objects.requireNonNull(confusionMatrix)) : null; - this.otherActualClassCount = otherActualClassCount; - } - - @Override - public String getMetricName() { - return NAME; - } - - public List getConfusionMatrix() { - return confusionMatrix; - } - - public Long getOtherActualClassCount() { - return otherActualClassCount; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (confusionMatrix != null) { - builder.field(CONFUSION_MATRIX.getPreferredName(), confusionMatrix); - } - if (otherActualClassCount != null) { - builder.field(OTHER_ACTUAL_CLASS_COUNT.getPreferredName(), otherActualClassCount); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(this.confusionMatrix, that.confusionMatrix) - && Objects.equals(this.otherActualClassCount, that.otherActualClassCount); - } - - @Override - public int hashCode() { - return Objects.hash(confusionMatrix, otherActualClassCount); - } - } - - public static class ActualClass implements ToXContentObject { - - private static final ParseField ACTUAL_CLASS = new ParseField("actual_class"); - private static final ParseField ACTUAL_CLASS_DOC_COUNT = new ParseField("actual_class_doc_count"); - private static final ParseField PREDICTED_CLASSES = new ParseField("predicted_classes"); - private static final ParseField OTHER_PREDICTED_CLASS_DOC_COUNT = new ParseField("other_predicted_class_doc_count"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "multiclass_confusion_matrix_actual_class", - true, - a -> new ActualClass((String) a[0], (Long) a[1], (List) a[2], (Long) a[3]) - ); - - static { - PARSER.declareString(optionalConstructorArg(), ACTUAL_CLASS); - PARSER.declareLong(optionalConstructorArg(), ACTUAL_CLASS_DOC_COUNT); - PARSER.declareObjectArray(optionalConstructorArg(), PredictedClass.PARSER, PREDICTED_CLASSES); - PARSER.declareLong(optionalConstructorArg(), OTHER_PREDICTED_CLASS_DOC_COUNT); - } - - private final String actualClass; - private final Long actualClassDocCount; - private final List predictedClasses; - private final Long otherPredictedClassDocCount; - - public ActualClass( - @Nullable String actualClass, - @Nullable Long actualClassDocCount, - @Nullable List predictedClasses, - @Nullable Long otherPredictedClassDocCount - ) { - this.actualClass = actualClass; - this.actualClassDocCount = actualClassDocCount; - this.predictedClasses = predictedClasses != null ? Collections.unmodifiableList(predictedClasses) : null; - this.otherPredictedClassDocCount = otherPredictedClassDocCount; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (actualClass != null) { - builder.field(ACTUAL_CLASS.getPreferredName(), actualClass); - } - if (actualClassDocCount != null) { - builder.field(ACTUAL_CLASS_DOC_COUNT.getPreferredName(), actualClassDocCount); - } - if (predictedClasses != null) { - builder.field(PREDICTED_CLASSES.getPreferredName(), predictedClasses); - } - if (otherPredictedClassDocCount != null) { - builder.field(OTHER_PREDICTED_CLASS_DOC_COUNT.getPreferredName(), otherPredictedClassDocCount); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ActualClass that = (ActualClass) o; - return Objects.equals(this.actualClass, that.actualClass) - && Objects.equals(this.actualClassDocCount, that.actualClassDocCount) - && Objects.equals(this.predictedClasses, that.predictedClasses) - && Objects.equals(this.otherPredictedClassDocCount, that.otherPredictedClassDocCount); - } - - @Override - public int hashCode() { - return Objects.hash(actualClass, actualClassDocCount, predictedClasses, otherPredictedClassDocCount); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } - - public static class PredictedClass implements ToXContentObject { - - private static final ParseField PREDICTED_CLASS = new ParseField("predicted_class"); - private static final ParseField COUNT = new ParseField("count"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "multiclass_confusion_matrix_predicted_class", - true, - a -> new PredictedClass((String) a[0], (Long) a[1]) - ); - - static { - PARSER.declareString(optionalConstructorArg(), PREDICTED_CLASS); - PARSER.declareLong(optionalConstructorArg(), COUNT); - } - - private final String predictedClass; - private final Long count; - - public PredictedClass(@Nullable String predictedClass, @Nullable Long count) { - this.predictedClass = predictedClass; - this.count = count; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (predictedClass != null) { - builder.field(PREDICTED_CLASS.getPreferredName(), predictedClass); - } - if (count != null) { - builder.field(COUNT.getPreferredName(), count); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PredictedClass that = (PredictedClass) o; - return Objects.equals(this.predictedClass, that.predictedClass) && Objects.equals(this.count, that.count); - } - - @Override - public int hashCode() { - return Objects.hash(predictedClass, count); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PerClassSingleValue.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PerClassSingleValue.java deleted file mode 100644 index 703468b5ec282..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PerClassSingleValue.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class PerClassSingleValue implements ToXContentObject { - private static final ParseField CLASS_NAME = new ParseField("class_name"); - private static final ParseField VALUE = new ParseField("value"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "per_class_result", - true, - a -> new PerClassSingleValue((String) a[0], (double) a[1]) - ); - - static { - PARSER.declareString(constructorArg(), CLASS_NAME); - PARSER.declareDouble(constructorArg(), VALUE); - } - - private final String className; - private final double value; - - public PerClassSingleValue(String className, double value) { - this.className = Objects.requireNonNull(className); - this.value = value; - } - - public String getClassName() { - return className; - } - - public double getValue() { - return value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASS_NAME.getPreferredName(), className); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PerClassSingleValue that = (PerClassSingleValue) o; - return Objects.equals(this.className, that.className) && this.value == that.value; - } - - @Override - public int hashCode() { - return Objects.hash(className, value); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PrecisionMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PrecisionMetric.java deleted file mode 100644 index 168eeed66d67d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/PrecisionMetric.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * {@link PrecisionMetric} is a metric that answers the question: - * "What fraction of documents classified as X actually belongs to X?" - * for any given class X - * - * equation: precision(X) = TP(X) / (TP(X) + FP(X)) - * where: TP(X) - number of true positives wrt X - * FP(X) - number of false positives wrt X - */ -public class PrecisionMetric implements EvaluationMetric { - - public static final String NAME = "precision"; - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, PrecisionMetric::new); - - public static PrecisionMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public PrecisionMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - private static final ParseField CLASSES = new ParseField("classes"); - private static final ParseField AVG_PRECISION = new ParseField("avg_precision"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "precision_result", - true, - a -> new Result((List) a[0], (double) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), PerClassSingleValue.PARSER, CLASSES); - PARSER.declareDouble(constructorArg(), AVG_PRECISION); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** List of per-class results. */ - private final List classes; - /** Average of per-class precisions. */ - private final double avgPrecision; - - public Result(List classes, double avgPrecision) { - this.classes = Collections.unmodifiableList(Objects.requireNonNull(classes)); - this.avgPrecision = avgPrecision; - } - - @Override - public String getMetricName() { - return NAME; - } - - public List getClasses() { - return classes; - } - - public double getAvgPrecision() { - return avgPrecision; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASSES.getPreferredName(), classes); - builder.field(AVG_PRECISION.getPreferredName(), avgPrecision); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(this.classes, that.classes) && this.avgPrecision == that.avgPrecision; - } - - @Override - public int hashCode() { - return Objects.hash(classes, avgPrecision); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/RecallMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/RecallMetric.java deleted file mode 100644 index 689d441944e7a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/classification/RecallMetric.java +++ /dev/null @@ -1,135 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.classification; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * {@link RecallMetric} is a metric that answers the question: - * "What fraction of documents belonging to X have been predicted as X by the classifier?" - * for any given class X - * - * equation: recall(X) = TP(X) / (TP(X) + FN(X)) - * where: TP(X) - number of true positives wrt X - * FN(X) - number of false negatives wrt X - */ -public class RecallMetric implements EvaluationMetric { - - public static final String NAME = "recall"; - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, RecallMetric::new); - - public static RecallMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public RecallMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - private static final ParseField CLASSES = new ParseField("classes"); - private static final ParseField AVG_RECALL = new ParseField("avg_recall"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "recall_result", - true, - a -> new Result((List) a[0], (double) a[1]) - ); - - static { - PARSER.declareObjectArray(constructorArg(), PerClassSingleValue.PARSER, CLASSES); - PARSER.declareDouble(constructorArg(), AVG_RECALL); - } - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** List of per-class results. */ - private final List classes; - /** Average of per-class recalls. */ - private final double avgRecall; - - public Result(List classes, double avgRecall) { - this.classes = Collections.unmodifiableList(Objects.requireNonNull(classes)); - this.avgRecall = avgRecall; - } - - @Override - public String getMetricName() { - return NAME; - } - - public List getClasses() { - return classes; - } - - public double getAvgRecall() { - return avgRecall; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASSES.getPreferredName(), classes); - builder.field(AVG_RECALL.getPreferredName(), avgRecall); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(this.classes, that.classes) && this.avgRecall == that.avgRecall; - } - - @Override - public int hashCode() { - return Objects.hash(classes, avgRecall); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocPoint.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocPoint.java deleted file mode 100644 index 08e5122181269..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocPoint.java +++ /dev/null @@ -1,92 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.common; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class AucRocPoint implements ToXContentObject { - - public static AucRocPoint fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ParseField TPR = new ParseField("tpr"); - private static final ParseField FPR = new ParseField("fpr"); - private static final ParseField THRESHOLD = new ParseField("threshold"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "auc_roc_point", - true, - args -> new AucRocPoint((double) args[0], (double) args[1], (double) args[2]) - ); - - static { - PARSER.declareDouble(constructorArg(), TPR); - PARSER.declareDouble(constructorArg(), FPR); - PARSER.declareDouble(constructorArg(), THRESHOLD); - } - - private final double tpr; - private final double fpr; - private final double threshold; - - public AucRocPoint(double tpr, double fpr, double threshold) { - this.tpr = tpr; - this.fpr = fpr; - this.threshold = threshold; - } - - public double getTruePositiveRate() { - return tpr; - } - - public double getFalsePositiveRate() { - return fpr; - } - - public double getThreshold() { - return threshold; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject() - .field(TPR.getPreferredName(), tpr) - .field(FPR.getPreferredName(), fpr) - .field(THRESHOLD.getPreferredName(), threshold) - .endObject(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AucRocPoint that = (AucRocPoint) o; - return tpr == that.tpr && fpr == that.fpr && threshold == that.threshold; - } - - @Override - public int hashCode() { - return Objects.hash(tpr, fpr, threshold); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocResult.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocResult.java deleted file mode 100644 index d661115b67291..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/common/AucRocResult.java +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.common; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class AucRocResult implements EvaluationMetric.Result { - - public static final String NAME = "auc_roc"; - - public static AucRocResult fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ParseField VALUE = new ParseField("value"); - private static final ParseField CURVE = new ParseField("curve"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new AucRocResult((double) args[0], (List) args[1]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> AucRocPoint.fromXContent(p), CURVE); - } - - private final double value; - private final List curve; - - public AucRocResult(double value, @Nullable List curve) { - this.value = value; - this.curve = curve; - } - - @Override - public String getMetricName() { - return NAME; - } - - public double getValue() { - return value; - } - - public List getCurve() { - return curve == null ? null : Collections.unmodifiableList(curve); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - if (curve != null && curve.isEmpty() == false) { - builder.field(CURVE.getPreferredName(), curve); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AucRocResult that = (AucRocResult) o; - return value == that.value && Objects.equals(curve, that.curve); - } - - @Override - public int hashCode() { - return Objects.hash(value, curve); - } - - @Override - public String toString() { - return Strings.toString(this); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java deleted file mode 100644 index e39af0d143c4b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AbstractConfusionMatrixMetric.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -abstract class AbstractConfusionMatrixMetric implements EvaluationMetric { - - protected static final ParseField AT = new ParseField("at"); - - protected final double[] thresholds; - - protected AbstractConfusionMatrixMetric(List at) { - this.thresholds = Objects.requireNonNull(at).stream().mapToDouble(Double::doubleValue).toArray(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.startObject().field(AT.getPreferredName(), thresholds).endObject(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AucRocMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AucRocMetric.java deleted file mode 100644 index 7c8ea07ab6c77..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/AucRocMetric.java +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.client.ml.dataframe.evaluation.common.AucRocResult; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Area under the curve (AUC) of the receiver operating characteristic (ROC). - * The ROC curve is a plot of the TPR (true positive rate) against - * the FPR (false positive rate) over a varying threshold. - */ -public class AucRocMetric implements EvaluationMetric { - - public static final String NAME = AucRocResult.NAME; - - public static final ParseField INCLUDE_CURVE = new ParseField("include_curve"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new AucRocMetric((Boolean) args[0]) - ); - - static { - PARSER.declareBoolean(optionalConstructorArg(), INCLUDE_CURVE); - } - - public static AucRocMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static AucRocMetric withCurve() { - return new AucRocMetric(true); - } - - private final Boolean includeCurve; - - public AucRocMetric(Boolean includeCurve) { - this.includeCurve = includeCurve; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (includeCurve != null) { - builder.field(INCLUDE_CURVE.getPreferredName(), includeCurve); - } - builder.endObject(); - return builder; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AucRocMetric that = (AucRocMetric) o; - return Objects.equals(includeCurve, that.includeCurve); - } - - @Override - public int hashCode() { - return Objects.hash(includeCurve); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixMetric.java deleted file mode 100644 index 0d4617baeb56f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/ConfusionMatrixMetric.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class ConfusionMatrixMetric extends AbstractConfusionMatrixMetric { - - public static final String NAME = "confusion_matrix"; - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - args -> new ConfusionMatrixMetric((List) args[0]) - ); - - static { - PARSER.declareDoubleArray(constructorArg(), AT); - } - - public static ConfusionMatrixMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static ConfusionMatrixMetric at(Double... at) { - return new ConfusionMatrixMetric(Arrays.asList(at)); - } - - public ConfusionMatrixMetric(List at) { - super(at); - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ConfusionMatrixMetric that = (ConfusionMatrixMetric) o; - return Arrays.equals(thresholds, that.thresholds); - } - - @Override - public int hashCode() { - return Arrays.hashCode(thresholds); - } - - public static class Result implements EvaluationMetric.Result { - - public static Result fromXContent(XContentParser parser) throws IOException { - return new Result(parser.map(LinkedHashMap::new, ConfusionMatrix::fromXContent)); - } - - private final Map results; - - public Result(Map results) { - this.results = Objects.requireNonNull(results); - } - - @Override - public String getMetricName() { - return NAME; - } - - public ConfusionMatrix getScoreByThreshold(String threshold) { - return results.get(threshold); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.map(results); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(results, that.results); - } - - @Override - public int hashCode() { - return Objects.hash(results); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } - - public static final class ConfusionMatrix implements ToXContentObject { - - public static ConfusionMatrix fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ParseField TP = new ParseField("tp"); - private static final ParseField FP = new ParseField("fp"); - private static final ParseField TN = new ParseField("tn"); - private static final ParseField FN = new ParseField("fn"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "confusion_matrix", - true, - args -> new ConfusionMatrix((long) args[0], (long) args[1], (long) args[2], (long) args[3]) - ); - - static { - PARSER.declareLong(constructorArg(), TP); - PARSER.declareLong(constructorArg(), FP); - PARSER.declareLong(constructorArg(), TN); - PARSER.declareLong(constructorArg(), FN); - } - - private final long tp; - private final long fp; - private final long tn; - private final long fn; - - public ConfusionMatrix(long tp, long fp, long tn, long fn) { - this.tp = tp; - this.fp = fp; - this.tn = tn; - this.fn = fn; - } - - public long getTruePositives() { - return tp; - } - - public long getFalsePositives() { - return fp; - } - - public long getTrueNegatives() { - return tn; - } - - public long getFalseNegatives() { - return fn; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject() - .field(TP.getPreferredName(), tp) - .field(FP.getPreferredName(), fp) - .field(TN.getPreferredName(), tn) - .field(FN.getPreferredName(), fn) - .endObject(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ConfusionMatrix that = (ConfusionMatrix) o; - return tp == that.tp && fp == that.fp && tn == that.tn && fn == that.fn; - } - - @Override - public int hashCode() { - return Objects.hash(tp, fp, tn, fn); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java deleted file mode 100644 index 7372e85d0bf05..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/OutlierDetection.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Evaluation of outlier detection results. - */ -public class OutlierDetection implements Evaluation { - - public static final String NAME = "outlier_detection"; - - private static final ParseField ACTUAL_FIELD = new ParseField("actual_field"); - private static final ParseField PREDICTED_PROBABILITY_FIELD = new ParseField("predicted_probability_field"); - private static final ParseField METRICS = new ParseField("metrics"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new OutlierDetection((String) args[0], (String) args[1], (List) args[2]) - ); - - static { - PARSER.declareString(constructorArg(), ACTUAL_FIELD); - PARSER.declareString(constructorArg(), PREDICTED_PROBABILITY_FIELD); - PARSER.declareNamedObjects( - optionalConstructorArg(), - (p, c, n) -> p.namedObject(EvaluationMetric.class, registeredMetricName(NAME, n), null), - METRICS - ); - } - - public static OutlierDetection fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** - * The field where the actual class is marked up. - * The value of this field is assumed to either be 1 or 0, or true or false. - */ - private final String actualField; - - /** - * The field of the predicted probability in [0.0, 1.0]. - */ - private final String predictedProbabilityField; - - /** - * The list of metrics to calculate - */ - private final List metrics; - - public OutlierDetection(String actualField, String predictedField) { - this(actualField, predictedField, (List) null); - } - - public OutlierDetection(String actualField, String predictedProbabilityField, EvaluationMetric... metric) { - this(actualField, predictedProbabilityField, Arrays.asList(metric)); - } - - public OutlierDetection(String actualField, String predictedProbabilityField, @Nullable List metrics) { - this.actualField = Objects.requireNonNull(actualField); - this.predictedProbabilityField = Objects.requireNonNull(predictedProbabilityField); - if (metrics != null) { - metrics.sort(Comparator.comparing(EvaluationMetric::getName)); - } - this.metrics = metrics; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(ACTUAL_FIELD.getPreferredName(), actualField); - builder.field(PREDICTED_PROBABILITY_FIELD.getPreferredName(), predictedProbabilityField); - - if (metrics != null) { - builder.startObject(METRICS.getPreferredName()); - for (EvaluationMetric metric : metrics) { - builder.field(metric.getName(), metric); - } - builder.endObject(); - } - - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - OutlierDetection that = (OutlierDetection) o; - return Objects.equals(actualField, that.actualField) - && Objects.equals(predictedProbabilityField, that.predictedProbabilityField) - && Objects.equals(metrics, that.metrics); - } - - @Override - public int hashCode() { - return Objects.hash(actualField, predictedProbabilityField, metrics); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/PrecisionMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/PrecisionMetric.java deleted file mode 100644 index 4f992615d79af..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/PrecisionMetric.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class PrecisionMetric extends AbstractConfusionMatrixMetric { - - public static final String NAME = "precision"; - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - args -> new PrecisionMetric((List) args[0]) - ); - - static { - PARSER.declareDoubleArray(constructorArg(), AT); - } - - public static PrecisionMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static PrecisionMetric at(Double... at) { - return new PrecisionMetric(Arrays.asList(at)); - } - - public PrecisionMetric(List at) { - super(at); - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - PrecisionMetric that = (PrecisionMetric) o; - return Arrays.equals(thresholds, that.thresholds); - } - - @Override - public int hashCode() { - return Arrays.hashCode(thresholds); - } - - public static class Result implements EvaluationMetric.Result { - - public static Result fromXContent(XContentParser parser) throws IOException { - return new Result(parser.map(LinkedHashMap::new, p -> p.doubleValue())); - } - - private final Map results; - - public Result(Map results) { - this.results = Objects.requireNonNull(results); - } - - @Override - public String getMetricName() { - return NAME; - } - - public Double getScoreByThreshold(String threshold) { - return results.get(threshold); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.map(results); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(results, that.results); - } - - @Override - public int hashCode() { - return Objects.hash(results); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/RecallMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/RecallMetric.java deleted file mode 100644 index 531c62f825722..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/outlierdetection/RecallMetric.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.outlierdetection; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class RecallMetric extends AbstractConfusionMatrixMetric { - - public static final String NAME = "recall"; - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - args -> new RecallMetric((List) args[0]) - ); - - static { - PARSER.declareDoubleArray(constructorArg(), AT); - } - - public static RecallMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public static RecallMetric at(Double... at) { - return new RecallMetric(Arrays.asList(at)); - } - - public RecallMetric(List at) { - super(at); - } - - @Override - public String getName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - RecallMetric that = (RecallMetric) o; - return Arrays.equals(thresholds, that.thresholds); - } - - @Override - public int hashCode() { - return Arrays.hashCode(thresholds); - } - - public static class Result implements EvaluationMetric.Result { - - public static Result fromXContent(XContentParser parser) throws IOException { - return new Result(parser.map(LinkedHashMap::new, p -> p.doubleValue())); - } - - private final Map results; - - public Result(Map results) { - this.results = Objects.requireNonNull(results); - } - - @Override - public String getMetricName() { - return NAME; - } - - public Double getScoreByThreshold(String threshold) { - return results.get(threshold); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - return builder.map(results); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return Objects.equals(results, that.results); - } - - @Override - public int hashCode() { - return Objects.hash(results); - } - - @Override - public String toString() { - return Strings.toString(this); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/HuberMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/HuberMetric.java deleted file mode 100644 index 038c659324da4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/HuberMetric.java +++ /dev/null @@ -1,138 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.Regression.LossFunction; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Calculates the pseudo Huber loss function. - * - * equation: huber = 1/n * Σ(δ^2 * sqrt(1 + a^2 / δ^2) - 1) - * where: a = y - y´ - * δ - parameter that controls the steepness - */ -public class HuberMetric implements EvaluationMetric { - - public static final String NAME = LossFunction.HUBER.toString(); - - public static final ParseField DELTA = new ParseField("delta"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new HuberMetric((Double) args[0]) - ); - - static { - PARSER.declareDouble(optionalConstructorArg(), DELTA); - } - - public static HuberMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Double delta; - - public HuberMetric(@Nullable Double delta) { - this.delta = delta; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (delta != null) { - builder.field(DELTA.getPreferredName(), delta); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - HuberMetric that = (HuberMetric) o; - return Objects.equals(this.delta, that.delta); - } - - @Override - public int hashCode() { - return Objects.hash(delta); - } - - public static class Result implements EvaluationMetric.Result { - - public static final ParseField VALUE = new ParseField("value"); - private final double value; - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME + "_result", - true, - args -> new Result((double) args[0]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - } - - public Result(double value) { - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public double getValue() { - return value; - } - - @Override - public String getMetricName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return this.value == that.value; - } - - @Override - public int hashCode() { - return Double.hashCode(value); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredErrorMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredErrorMetric.java deleted file mode 100644 index 4c593dc75db4e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredErrorMetric.java +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.Regression.LossFunction; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Calculates the mean squared error between two known numerical fields. - * - * equation: mse = 1/n * Σ(y - y´)^2 - */ -public class MeanSquaredErrorMetric implements EvaluationMetric { - - public static final String NAME = LossFunction.MSE.toString(); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, MeanSquaredErrorMetric::new); - - public static MeanSquaredErrorMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public MeanSquaredErrorMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - // create static hash code from name as there are currently no unique fields per class instance - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - public static final ParseField VALUE = new ParseField("value"); - private final double value; - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME + "_result", - true, - args -> new Result((double) args[0]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - } - - public Result(double value) { - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public double getValue() { - return value; - } - - @Override - public String getMetricName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return this.value == that.value; - } - - @Override - public int hashCode() { - return Double.hashCode(value); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorMetric.java deleted file mode 100644 index 676ee74cb3f83..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/MeanSquaredLogarithmicErrorMetric.java +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.Regression.LossFunction; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Calculates the mean squared error between two known numerical fields. - * - * equation: msle = 1/n * Σ(log(y + offset) - log(y´ + offset))^2 - * where offset is used to make sure the argument to log function is always positive - */ -public class MeanSquaredLogarithmicErrorMetric implements EvaluationMetric { - - public static final String NAME = LossFunction.MSLE.toString(); - - public static final ParseField OFFSET = new ParseField("offset"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - args -> new MeanSquaredLogarithmicErrorMetric((Double) args[0]) - ); - - static { - PARSER.declareDouble(optionalConstructorArg(), OFFSET); - } - - public static MeanSquaredLogarithmicErrorMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Double offset; - - public MeanSquaredLogarithmicErrorMetric(@Nullable Double offset) { - this.offset = offset; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (offset != null) { - builder.field(OFFSET.getPreferredName(), offset); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - MeanSquaredLogarithmicErrorMetric that = (MeanSquaredLogarithmicErrorMetric) o; - return Objects.equals(this.offset, that.offset); - } - - @Override - public int hashCode() { - return Objects.hash(offset); - } - - public static class Result implements EvaluationMetric.Result { - - public static final ParseField VALUE = new ParseField("value"); - private final double value; - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME + "_result", - true, - args -> new Result((double) args[0]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - } - - public Result(double value) { - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public double getValue() { - return value; - } - - @Override - public String getMetricName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return this.value == that.value; - } - - @Override - public int hashCode() { - return Double.hashCode(value); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/RSquaredMetric.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/RSquaredMetric.java deleted file mode 100644 index 496a3d55c0e51..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/RSquaredMetric.java +++ /dev/null @@ -1,120 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Calculates R-Squared between two known numerical fields. - * - * equation: mse = 1 - SSres/SStot - * such that, - * SSres = Σ(y - y´)^2 - * SStot = Σ(y - y_mean)^2 - */ -public class RSquaredMetric implements EvaluationMetric { - - public static final String NAME = "r_squared"; - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, RSquaredMetric::new); - - public static RSquaredMetric fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public RSquaredMetric() {} - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - return true; - } - - @Override - public int hashCode() { - // create static hash code from name as there are currently no unique fields per class instance - return Objects.hashCode(NAME); - } - - public static class Result implements EvaluationMetric.Result { - - public static final ParseField VALUE = new ParseField("value"); - private final double value; - - public static Result fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME + "_result", - true, - args -> new Result((double) args[0]) - ); - - static { - PARSER.declareDouble(constructorArg(), VALUE); - } - - public Result(double value) { - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(VALUE.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public double getValue() { - return value; - } - - @Override - public String getMetricName() { - return NAME; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Result that = (Result) o; - return this.value == that.value; - } - - @Override - public int hashCode() { - return Double.hashCode(value); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/Regression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/Regression.java deleted file mode 100644 index 622013957281e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/evaluation/regression/Regression.java +++ /dev/null @@ -1,132 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.evaluation.regression; - -import org.elasticsearch.client.ml.dataframe.evaluation.Evaluation; -import org.elasticsearch.client.ml.dataframe.evaluation.EvaluationMetric; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Comparator; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider.registeredMetricName; -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Evaluation of regression results. - */ -public class Regression implements Evaluation { - - public static final String NAME = "regression"; - - private static final ParseField ACTUAL_FIELD = new ParseField("actual_field"); - private static final ParseField PREDICTED_FIELD = new ParseField("predicted_field"); - private static final ParseField METRICS = new ParseField("metrics"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new Regression((String) a[0], (String) a[1], (List) a[2]) - ); - - static { - PARSER.declareString(constructorArg(), ACTUAL_FIELD); - PARSER.declareString(constructorArg(), PREDICTED_FIELD); - PARSER.declareNamedObjects( - optionalConstructorArg(), - (p, c, n) -> p.namedObject(EvaluationMetric.class, registeredMetricName(NAME, n), c), - METRICS - ); - } - - public static Regression fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - /** - * The field containing the actual value - * The value of this field is assumed to be numeric - */ - private final String actualField; - - /** - * The field containing the predicted value - * The value of this field is assumed to be numeric - */ - private final String predictedField; - - /** - * The list of metrics to calculate - */ - private final List metrics; - - public Regression(String actualField, String predictedField) { - this(actualField, predictedField, (List) null); - } - - public Regression(String actualField, String predictedField, EvaluationMetric... metrics) { - this(actualField, predictedField, Arrays.asList(metrics)); - } - - public Regression(String actualField, String predictedField, @Nullable List metrics) { - this.actualField = Objects.requireNonNull(actualField); - this.predictedField = Objects.requireNonNull(predictedField); - if (metrics != null) { - metrics.sort(Comparator.comparing(EvaluationMetric::getName)); - } - this.metrics = metrics; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(ACTUAL_FIELD.getPreferredName(), actualField); - builder.field(PREDICTED_FIELD.getPreferredName(), predictedField); - - if (metrics != null) { - builder.startObject(METRICS.getPreferredName()); - for (EvaluationMetric metric : metrics) { - builder.field(metric.getName(), metric); - } - builder.endObject(); - } - - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Regression that = (Regression) o; - return Objects.equals(that.actualField, this.actualField) - && Objects.equals(that.predictedField, this.predictedField) - && Objects.equals(that.metrics, this.metrics); - } - - @Override - public int hashCode() { - return Objects.hash(actualField, predictedField, metrics); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/FieldSelection.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/FieldSelection.java deleted file mode 100644 index e6a0362e3c0ca..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/FieldSelection.java +++ /dev/null @@ -1,161 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.explain; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Locale; -import java.util.Objects; -import java.util.Set; - -public class FieldSelection implements ToXContentObject { - - private static final ParseField NAME = new ParseField("name"); - private static final ParseField MAPPING_TYPES = new ParseField("mapping_types"); - private static final ParseField IS_INCLUDED = new ParseField("is_included"); - private static final ParseField IS_REQUIRED = new ParseField("is_required"); - private static final ParseField FEATURE_TYPE = new ParseField("feature_type"); - private static final ParseField REASON = new ParseField("reason"); - - public enum FeatureType { - CATEGORICAL, - NUMERICAL; - - public static FeatureType fromString(String value) { - return FeatureType.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "field_selection", - true, - a -> new FieldSelection( - (String) a[0], - new HashSet<>((List) a[1]), - (boolean) a[2], - (boolean) a[3], - (FeatureType) a[4], - (String) a[5] - ) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), MAPPING_TYPES); - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), IS_INCLUDED); - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), IS_REQUIRED); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FeatureType::fromString, FEATURE_TYPE); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), REASON); - } - - private final String name; - private final Set mappingTypes; - private final boolean isIncluded; - private final boolean isRequired; - private final FeatureType featureType; - private final String reason; - - public static FieldSelection included(String name, Set mappingTypes, boolean isRequired, FeatureType featureType) { - return new FieldSelection(name, mappingTypes, true, isRequired, featureType, null); - } - - public static FieldSelection excluded(String name, Set mappingTypes, String reason) { - return new FieldSelection(name, mappingTypes, false, false, null, reason); - } - - FieldSelection( - String name, - Set mappingTypes, - boolean isIncluded, - boolean isRequired, - @Nullable FeatureType featureType, - @Nullable String reason - ) { - this.name = Objects.requireNonNull(name); - this.mappingTypes = Collections.unmodifiableSet(mappingTypes); - this.isIncluded = isIncluded; - this.isRequired = isRequired; - this.featureType = featureType; - this.reason = reason; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(NAME.getPreferredName(), name); - builder.field(MAPPING_TYPES.getPreferredName(), mappingTypes); - builder.field(IS_INCLUDED.getPreferredName(), isIncluded); - builder.field(IS_REQUIRED.getPreferredName(), isRequired); - if (featureType != null) { - builder.field(FEATURE_TYPE.getPreferredName(), featureType); - } - if (reason != null) { - builder.field(REASON.getPreferredName(), reason); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - FieldSelection that = (FieldSelection) o; - return Objects.equals(name, that.name) - && Objects.equals(mappingTypes, that.mappingTypes) - && isIncluded == that.isIncluded - && isRequired == that.isRequired - && Objects.equals(featureType, that.featureType) - && Objects.equals(reason, that.reason); - } - - @Override - public int hashCode() { - return Objects.hash(name, mappingTypes, isIncluded, isRequired, featureType, reason); - } - - public String getName() { - return name; - } - - public Set getMappingTypes() { - return mappingTypes; - } - - public boolean isIncluded() { - return isIncluded; - } - - public boolean isRequired() { - return isRequired; - } - - @Nullable - public FeatureType getFeatureType() { - return featureType; - } - - @Nullable - public String getReason() { - return reason; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/MemoryEstimation.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/MemoryEstimation.java deleted file mode 100644 index 54525134853aa..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/explain/MemoryEstimation.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.explain; - -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class MemoryEstimation implements ToXContentObject { - - public static final ParseField EXPECTED_MEMORY_WITHOUT_DISK = new ParseField("expected_memory_without_disk"); - public static final ParseField EXPECTED_MEMORY_WITH_DISK = new ParseField("expected_memory_with_disk"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "memory_estimation", - true, - a -> new MemoryEstimation((ByteSizeValue) a[0], (ByteSizeValue) a[1]) - ); - - static { - PARSER.declareField( - optionalConstructorArg(), - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), EXPECTED_MEMORY_WITHOUT_DISK.getPreferredName()), - EXPECTED_MEMORY_WITHOUT_DISK, - ObjectParser.ValueType.VALUE - ); - PARSER.declareField( - optionalConstructorArg(), - (p, c) -> ByteSizeValue.parseBytesSizeValue(p.text(), EXPECTED_MEMORY_WITH_DISK.getPreferredName()), - EXPECTED_MEMORY_WITH_DISK, - ObjectParser.ValueType.VALUE - ); - } - - private final ByteSizeValue expectedMemoryWithoutDisk; - private final ByteSizeValue expectedMemoryWithDisk; - - public MemoryEstimation(@Nullable ByteSizeValue expectedMemoryWithoutDisk, @Nullable ByteSizeValue expectedMemoryWithDisk) { - this.expectedMemoryWithoutDisk = expectedMemoryWithoutDisk; - this.expectedMemoryWithDisk = expectedMemoryWithDisk; - } - - public ByteSizeValue getExpectedMemoryWithoutDisk() { - return expectedMemoryWithoutDisk; - } - - public ByteSizeValue getExpectedMemoryWithDisk() { - return expectedMemoryWithDisk; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (expectedMemoryWithoutDisk != null) { - builder.field(EXPECTED_MEMORY_WITHOUT_DISK.getPreferredName(), expectedMemoryWithoutDisk.getStringRep()); - } - if (expectedMemoryWithDisk != null) { - builder.field(EXPECTED_MEMORY_WITH_DISK.getPreferredName(), expectedMemoryWithDisk.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - if (other == null || getClass() != other.getClass()) { - return false; - } - - MemoryEstimation that = (MemoryEstimation) other; - return Objects.equals(expectedMemoryWithoutDisk, that.expectedMemoryWithoutDisk) - && Objects.equals(expectedMemoryWithDisk, that.expectedMemoryWithDisk); - } - - @Override - public int hashCode() { - return Objects.hash(expectedMemoryWithoutDisk, expectedMemoryWithDisk); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStats.java deleted file mode 100644 index dcd21d6f6b3e1..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStats.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats; - -import org.elasticsearch.xcontent.ToXContentObject; - -/** - * Statistics for the data frame analysis - */ -public interface AnalysisStats extends ToXContentObject { - - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStatsNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStatsNamedXContentProvider.java deleted file mode 100644 index 4da0981fa87d3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/AnalysisStatsNamedXContentProvider.java +++ /dev/null @@ -1,37 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats; - -import org.elasticsearch.client.ml.dataframe.stats.classification.ClassificationStats; -import org.elasticsearch.client.ml.dataframe.stats.outlierdetection.OutlierDetectionStats; -import org.elasticsearch.client.ml.dataframe.stats.regression.RegressionStats; -import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.xcontent.NamedXContentRegistry; - -import java.util.Arrays; -import java.util.List; - -public class AnalysisStatsNamedXContentProvider implements NamedXContentProvider { - - @Override - public List getNamedXContentParsers() { - return Arrays.asList( - new NamedXContentRegistry.Entry( - AnalysisStats.class, - ClassificationStats.NAME, - (p, c) -> ClassificationStats.PARSER.apply(p, null) - ), - new NamedXContentRegistry.Entry( - AnalysisStats.class, - OutlierDetectionStats.NAME, - (p, c) -> OutlierDetectionStats.PARSER.apply(p, null) - ), - new NamedXContentRegistry.Entry(AnalysisStats.class, RegressionStats.NAME, (p, c) -> RegressionStats.PARSER.apply(p, null)) - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStats.java deleted file mode 100644 index e8367ae13c95e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ClassificationStats.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.classification; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class ClassificationStats implements AnalysisStats { - - public static final ParseField NAME = new ParseField("classification_stats"); - - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField ITERATION = new ParseField("iteration"); - public static final ParseField HYPERPARAMETERS = new ParseField("hyperparameters"); - public static final ParseField TIMING_STATS = new ParseField("timing_stats"); - public static final ParseField VALIDATION_LOSS = new ParseField("validation_loss"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new ClassificationStats((Instant) a[0], (Integer) a[1], (Hyperparameters) a[2], (TimingStats) a[3], (ValidationLoss) a[4]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), ITERATION); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), Hyperparameters.PARSER, HYPERPARAMETERS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), TimingStats.PARSER, TIMING_STATS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), ValidationLoss.PARSER, VALIDATION_LOSS); - } - - private final Instant timestamp; - private final Integer iteration; - private final Hyperparameters hyperparameters; - private final TimingStats timingStats; - private final ValidationLoss validationLoss; - - public ClassificationStats( - Instant timestamp, - Integer iteration, - Hyperparameters hyperparameters, - TimingStats timingStats, - ValidationLoss validationLoss - ) { - this.timestamp = Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli()); - this.iteration = iteration; - this.hyperparameters = Objects.requireNonNull(hyperparameters); - this.timingStats = Objects.requireNonNull(timingStats); - this.validationLoss = Objects.requireNonNull(validationLoss); - } - - public Instant getTimestamp() { - return timestamp; - } - - public Integer getIteration() { - return iteration; - } - - public Hyperparameters getHyperparameters() { - return hyperparameters; - } - - public TimingStats getTimingStats() { - return timingStats; - } - - public ValidationLoss getValidationLoss() { - return validationLoss; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); - if (iteration != null) { - builder.field(ITERATION.getPreferredName(), iteration); - } - builder.field(HYPERPARAMETERS.getPreferredName(), hyperparameters); - builder.field(TIMING_STATS.getPreferredName(), timingStats); - builder.field(VALIDATION_LOSS.getPreferredName(), validationLoss); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ClassificationStats that = (ClassificationStats) o; - return Objects.equals(timestamp, that.timestamp) - && Objects.equals(iteration, that.iteration) - && Objects.equals(hyperparameters, that.hyperparameters) - && Objects.equals(timingStats, that.timingStats) - && Objects.equals(validationLoss, that.validationLoss); - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, iteration, hyperparameters, timingStats, validationLoss); - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/Hyperparameters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/Hyperparameters.java deleted file mode 100644 index c136928aeb76f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/Hyperparameters.java +++ /dev/null @@ -1,285 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.classification; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class Hyperparameters implements ToXContentObject { - - public static final ParseField CLASS_ASSIGNMENT_OBJECTIVE = new ParseField("class_assignment_objective"); - public static final ParseField ALPHA = new ParseField("alpha"); - public static final ParseField DOWNSAMPLE_FACTOR = new ParseField("downsample_factor"); - public static final ParseField ETA = new ParseField("eta"); - public static final ParseField ETA_GROWTH_RATE_PER_TREE = new ParseField("eta_growth_rate_per_tree"); - public static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); - public static final ParseField GAMMA = new ParseField("gamma"); - public static final ParseField LAMBDA = new ParseField("lambda"); - public static final ParseField MAX_ATTEMPTS_TO_ADD_TREE = new ParseField("max_attempts_to_add_tree"); - public static final ParseField MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER = new ParseField( - "max_optimization_rounds_per_hyperparameter" - ); - public static final ParseField MAX_TREES = new ParseField("max_trees"); - public static final ParseField NUM_FOLDS = new ParseField("num_folds"); - public static final ParseField NUM_SPLITS_PER_FEATURE = new ParseField("num_splits_per_feature"); - public static final ParseField SOFT_TREE_DEPTH_LIMIT = new ParseField("soft_tree_depth_limit"); - public static final ParseField SOFT_TREE_DEPTH_TOLERANCE = new ParseField("soft_tree_depth_tolerance"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "classification_hyperparameters", - true, - a -> new Hyperparameters( - (String) a[0], - (Double) a[1], - (Double) a[2], - (Double) a[3], - (Double) a[4], - (Double) a[5], - (Double) a[6], - (Double) a[7], - (Integer) a[8], - (Integer) a[9], - (Integer) a[10], - (Integer) a[11], - (Integer) a[12], - (Double) a[13], - (Double) a[14] - ) - ); - - static { - PARSER.declareString(optionalConstructorArg(), CLASS_ASSIGNMENT_OBJECTIVE); - PARSER.declareDouble(optionalConstructorArg(), ALPHA); - PARSER.declareDouble(optionalConstructorArg(), DOWNSAMPLE_FACTOR); - PARSER.declareDouble(optionalConstructorArg(), ETA); - PARSER.declareDouble(optionalConstructorArg(), ETA_GROWTH_RATE_PER_TREE); - PARSER.declareDouble(optionalConstructorArg(), FEATURE_BAG_FRACTION); - PARSER.declareDouble(optionalConstructorArg(), GAMMA); - PARSER.declareDouble(optionalConstructorArg(), LAMBDA); - PARSER.declareInt(optionalConstructorArg(), MAX_ATTEMPTS_TO_ADD_TREE); - PARSER.declareInt(optionalConstructorArg(), MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER); - PARSER.declareInt(optionalConstructorArg(), MAX_TREES); - PARSER.declareInt(optionalConstructorArg(), NUM_FOLDS); - PARSER.declareInt(optionalConstructorArg(), NUM_SPLITS_PER_FEATURE); - PARSER.declareDouble(optionalConstructorArg(), SOFT_TREE_DEPTH_LIMIT); - PARSER.declareDouble(optionalConstructorArg(), SOFT_TREE_DEPTH_TOLERANCE); - } - - private final String classAssignmentObjective; - private final Double alpha; - private final Double downsampleFactor; - private final Double eta; - private final Double etaGrowthRatePerTree; - private final Double featureBagFraction; - private final Double gamma; - private final Double lambda; - private final Integer maxAttemptsToAddTree; - private final Integer maxOptimizationRoundsPerHyperparameter; - private final Integer maxTrees; - private final Integer numFolds; - private final Integer numSplitsPerFeature; - private final Double softTreeDepthLimit; - private final Double softTreeDepthTolerance; - - public Hyperparameters( - String classAssignmentObjective, - Double alpha, - Double downsampleFactor, - Double eta, - Double etaGrowthRatePerTree, - Double featureBagFraction, - Double gamma, - Double lambda, - Integer maxAttemptsToAddTree, - Integer maxOptimizationRoundsPerHyperparameter, - Integer maxTrees, - Integer numFolds, - Integer numSplitsPerFeature, - Double softTreeDepthLimit, - Double softTreeDepthTolerance - ) { - this.classAssignmentObjective = classAssignmentObjective; - this.alpha = alpha; - this.downsampleFactor = downsampleFactor; - this.eta = eta; - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - this.featureBagFraction = featureBagFraction; - this.gamma = gamma; - this.lambda = lambda; - this.maxAttemptsToAddTree = maxAttemptsToAddTree; - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - this.maxTrees = maxTrees; - this.numFolds = numFolds; - this.numSplitsPerFeature = numSplitsPerFeature; - this.softTreeDepthLimit = softTreeDepthLimit; - this.softTreeDepthTolerance = softTreeDepthTolerance; - } - - public String getClassAssignmentObjective() { - return classAssignmentObjective; - } - - public Double getAlpha() { - return alpha; - } - - public Double getDownsampleFactor() { - return downsampleFactor; - } - - public Double getEta() { - return eta; - } - - public Double getEtaGrowthRatePerTree() { - return etaGrowthRatePerTree; - } - - public Double getFeatureBagFraction() { - return featureBagFraction; - } - - public Double getGamma() { - return gamma; - } - - public Double getLambda() { - return lambda; - } - - public Integer getMaxAttemptsToAddTree() { - return maxAttemptsToAddTree; - } - - public Integer getMaxOptimizationRoundsPerHyperparameter() { - return maxOptimizationRoundsPerHyperparameter; - } - - public Integer getMaxTrees() { - return maxTrees; - } - - public Integer getNumFolds() { - return numFolds; - } - - public Integer getNumSplitsPerFeature() { - return numSplitsPerFeature; - } - - public Double getSoftTreeDepthLimit() { - return softTreeDepthLimit; - } - - public Double getSoftTreeDepthTolerance() { - return softTreeDepthTolerance; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (classAssignmentObjective != null) { - builder.field(CLASS_ASSIGNMENT_OBJECTIVE.getPreferredName(), classAssignmentObjective); - } - if (alpha != null) { - builder.field(ALPHA.getPreferredName(), alpha); - } - if (downsampleFactor != null) { - builder.field(DOWNSAMPLE_FACTOR.getPreferredName(), downsampleFactor); - } - if (eta != null) { - builder.field(ETA.getPreferredName(), eta); - } - if (etaGrowthRatePerTree != null) { - builder.field(ETA_GROWTH_RATE_PER_TREE.getPreferredName(), etaGrowthRatePerTree); - } - if (featureBagFraction != null) { - builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); - } - if (gamma != null) { - builder.field(GAMMA.getPreferredName(), gamma); - } - if (lambda != null) { - builder.field(LAMBDA.getPreferredName(), lambda); - } - if (maxAttemptsToAddTree != null) { - builder.field(MAX_ATTEMPTS_TO_ADD_TREE.getPreferredName(), maxAttemptsToAddTree); - } - if (maxOptimizationRoundsPerHyperparameter != null) { - builder.field(MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER.getPreferredName(), maxOptimizationRoundsPerHyperparameter); - } - if (maxTrees != null) { - builder.field(MAX_TREES.getPreferredName(), maxTrees); - } - if (numFolds != null) { - builder.field(NUM_FOLDS.getPreferredName(), numFolds); - } - if (numSplitsPerFeature != null) { - builder.field(NUM_SPLITS_PER_FEATURE.getPreferredName(), numSplitsPerFeature); - } - if (softTreeDepthLimit != null) { - builder.field(SOFT_TREE_DEPTH_LIMIT.getPreferredName(), softTreeDepthLimit); - } - if (softTreeDepthTolerance != null) { - builder.field(SOFT_TREE_DEPTH_TOLERANCE.getPreferredName(), softTreeDepthTolerance); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Hyperparameters that = (Hyperparameters) o; - return Objects.equals(classAssignmentObjective, that.classAssignmentObjective) - && Objects.equals(alpha, that.alpha) - && Objects.equals(downsampleFactor, that.downsampleFactor) - && Objects.equals(eta, that.eta) - && Objects.equals(etaGrowthRatePerTree, that.etaGrowthRatePerTree) - && Objects.equals(featureBagFraction, that.featureBagFraction) - && Objects.equals(gamma, that.gamma) - && Objects.equals(lambda, that.lambda) - && Objects.equals(maxAttemptsToAddTree, that.maxAttemptsToAddTree) - && Objects.equals(maxOptimizationRoundsPerHyperparameter, that.maxOptimizationRoundsPerHyperparameter) - && Objects.equals(maxTrees, that.maxTrees) - && Objects.equals(numFolds, that.numFolds) - && Objects.equals(numSplitsPerFeature, that.numSplitsPerFeature) - && Objects.equals(softTreeDepthLimit, that.softTreeDepthLimit) - && Objects.equals(softTreeDepthTolerance, that.softTreeDepthTolerance); - } - - @Override - public int hashCode() { - return Objects.hash( - classAssignmentObjective, - alpha, - downsampleFactor, - eta, - etaGrowthRatePerTree, - featureBagFraction, - gamma, - lambda, - maxAttemptsToAddTree, - maxOptimizationRoundsPerHyperparameter, - maxTrees, - numFolds, - numSplitsPerFeature, - softTreeDepthLimit, - softTreeDepthTolerance - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStats.java deleted file mode 100644 index 9afeeeeb3a4f8..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/TimingStats.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.classification; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class TimingStats implements ToXContentObject { - - public static final ParseField ELAPSED_TIME = new ParseField("elapsed_time"); - public static final ParseField ITERATION_TIME = new ParseField("iteration_time"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "classification_timing_stats", - true, - a -> new TimingStats( - a[0] == null ? null : TimeValue.timeValueMillis((long) a[0]), - a[1] == null ? null : TimeValue.timeValueMillis((long) a[1]) - ) - ); - - static { - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ELAPSED_TIME); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ITERATION_TIME); - } - - private final TimeValue elapsedTime; - private final TimeValue iterationTime; - - public TimingStats(TimeValue elapsedTime, TimeValue iterationTime) { - this.elapsedTime = elapsedTime; - this.iterationTime = iterationTime; - } - - public TimeValue getElapsedTime() { - return elapsedTime; - } - - public TimeValue getIterationTime() { - return iterationTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (elapsedTime != null) { - builder.humanReadableField(ELAPSED_TIME.getPreferredName(), ELAPSED_TIME.getPreferredName() + "_string", elapsedTime); - } - if (iterationTime != null) { - builder.humanReadableField(ITERATION_TIME.getPreferredName(), ITERATION_TIME.getPreferredName() + "_string", iterationTime); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TimingStats that = (TimingStats) o; - return Objects.equals(elapsedTime, that.elapsedTime) && Objects.equals(iterationTime, that.iterationTime); - } - - @Override - public int hashCode() { - return Objects.hash(elapsedTime, iterationTime); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLoss.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLoss.java deleted file mode 100644 index ca781c8205300..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/classification/ValidationLoss.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.classification; - -import org.elasticsearch.client.ml.dataframe.stats.common.FoldValues; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class ValidationLoss implements ToXContentObject { - - public static final ParseField LOSS_TYPE = new ParseField("loss_type"); - public static final ParseField FOLD_VALUES = new ParseField("fold_values"); - - @SuppressWarnings("unchecked") - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "classification_validation_loss", - true, - a -> new ValidationLoss((String) a[0], (List) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), LOSS_TYPE); - PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), FoldValues.PARSER, FOLD_VALUES); - } - - private final String lossType; - private final List foldValues; - - public ValidationLoss(String lossType, List values) { - this.lossType = lossType; - this.foldValues = values; - } - - public String getLossType() { - return lossType; - } - - public List getFoldValues() { - return foldValues; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (lossType != null) { - builder.field(LOSS_TYPE.getPreferredName(), lossType); - } - if (foldValues != null) { - builder.field(FOLD_VALUES.getPreferredName(), foldValues); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ValidationLoss that = (ValidationLoss) o; - return Objects.equals(lossType, that.lossType) && Objects.equals(foldValues, that.foldValues); - } - - @Override - public int hashCode() { - return Objects.hash(lossType, foldValues); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCounts.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCounts.java deleted file mode 100644 index 82c4fccb09c8f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/DataCounts.java +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.dataframe.stats.common; - -import org.elasticsearch.common.inject.internal.ToStringBuilder; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class DataCounts implements ToXContentObject { - - public static final String TYPE_VALUE = "analytics_data_counts"; - - public static final ParseField TRAINING_DOCS_COUNT = new ParseField("training_docs_count"); - public static final ParseField TEST_DOCS_COUNT = new ParseField("test_docs_count"); - public static final ParseField SKIPPED_DOCS_COUNT = new ParseField("skipped_docs_count"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(TYPE_VALUE, true, a -> { - Long trainingDocsCount = (Long) a[0]; - Long testDocsCount = (Long) a[1]; - Long skippedDocsCount = (Long) a[2]; - return new DataCounts(getOrDefault(trainingDocsCount, 0L), getOrDefault(testDocsCount, 0L), getOrDefault(skippedDocsCount, 0L)); - }); - - static { - PARSER.declareLong(optionalConstructorArg(), TRAINING_DOCS_COUNT); - PARSER.declareLong(optionalConstructorArg(), TEST_DOCS_COUNT); - PARSER.declareLong(optionalConstructorArg(), SKIPPED_DOCS_COUNT); - } - - private final long trainingDocsCount; - private final long testDocsCount; - private final long skippedDocsCount; - - public DataCounts(long trainingDocsCount, long testDocsCount, long skippedDocsCount) { - this.trainingDocsCount = trainingDocsCount; - this.testDocsCount = testDocsCount; - this.skippedDocsCount = skippedDocsCount; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(TRAINING_DOCS_COUNT.getPreferredName(), trainingDocsCount); - builder.field(TEST_DOCS_COUNT.getPreferredName(), testDocsCount); - builder.field(SKIPPED_DOCS_COUNT.getPreferredName(), skippedDocsCount); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DataCounts that = (DataCounts) o; - return trainingDocsCount == that.trainingDocsCount - && testDocsCount == that.testDocsCount - && skippedDocsCount == that.skippedDocsCount; - } - - @Override - public int hashCode() { - return Objects.hash(trainingDocsCount, testDocsCount, skippedDocsCount); - } - - @Override - public String toString() { - return new ToStringBuilder(getClass()).add(TRAINING_DOCS_COUNT.getPreferredName(), trainingDocsCount) - .add(TEST_DOCS_COUNT.getPreferredName(), testDocsCount) - .add(SKIPPED_DOCS_COUNT.getPreferredName(), skippedDocsCount) - .toString(); - } - - public long getTrainingDocsCount() { - return trainingDocsCount; - } - - public long getTestDocsCount() { - return testDocsCount; - } - - public long getSkippedDocsCount() { - return skippedDocsCount; - } - - private static T getOrDefault(@Nullable T value, T defaultValue) { - return value != null ? value : defaultValue; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValues.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValues.java deleted file mode 100644 index d9f9fbc74fe70..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/FoldValues.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.common; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -public class FoldValues implements ToXContentObject { - - public static final ParseField FOLD = new ParseField("fold"); - public static final ParseField VALUES = new ParseField("values"); - - @SuppressWarnings("unchecked") - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "fold_values", - true, - a -> new FoldValues((int) a[0], (List) a[1]) - ); - - static { - PARSER.declareInt(ConstructingObjectParser.constructorArg(), FOLD); - PARSER.declareDoubleArray(ConstructingObjectParser.constructorArg(), VALUES); - } - - private final int fold; - private final double[] values; - - private FoldValues(int fold, List values) { - this(fold, values.stream().mapToDouble(Double::doubleValue).toArray()); - } - - public FoldValues(int fold, double[] values) { - this.fold = fold; - this.values = values; - } - - public int getFold() { - return fold; - } - - public double[] getValues() { - return values; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FOLD.getPreferredName(), fold); - builder.array(VALUES.getPreferredName(), values); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - FoldValues other = (FoldValues) o; - return fold == other.fold && Arrays.equals(values, other.values); - } - - @Override - public int hashCode() { - return Objects.hash(fold, Arrays.hashCode(values)); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsage.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsage.java deleted file mode 100644 index a856df9c3130b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/common/MemoryUsage.java +++ /dev/null @@ -1,133 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.common; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.common.inject.internal.ToStringBuilder; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Locale; -import java.util.Objects; - -public class MemoryUsage implements ToXContentObject { - - static final ParseField TIMESTAMP = new ParseField("timestamp"); - static final ParseField PEAK_USAGE_BYTES = new ParseField("peak_usage_bytes"); - static final ParseField STATUS = new ParseField("status"); - static final ParseField MEMORY_REESTIMATE_BYTES = new ParseField("memory_reestimate_bytes"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "analytics_memory_usage", - true, - a -> new MemoryUsage((Instant) a[0], (long) a[1], (Status) a[2], (Long) a[3]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), PEAK_USAGE_BYTES); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), Status::fromString, STATUS); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), MEMORY_REESTIMATE_BYTES); - } - - @Nullable - private final Instant timestamp; - private final long peakUsageBytes; - private final Status status; - private final Long memoryReestimateBytes; - - public MemoryUsage(@Nullable Instant timestamp, long peakUsageBytes, Status status, @Nullable Long memoryReestimateBytes) { - this.timestamp = timestamp == null ? null : Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli()); - this.peakUsageBytes = peakUsageBytes; - this.status = status; - this.memoryReestimateBytes = memoryReestimateBytes; - } - - @Nullable - public Instant getTimestamp() { - return timestamp; - } - - public long getPeakUsageBytes() { - return peakUsageBytes; - } - - public Status getStatus() { - return status; - } - - public Long getMemoryReestimateBytes() { - return memoryReestimateBytes; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (timestamp != null) { - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); - } - builder.field(PEAK_USAGE_BYTES.getPreferredName(), peakUsageBytes); - builder.field(STATUS.getPreferredName(), status); - if (memoryReestimateBytes != null) { - builder.field(MEMORY_REESTIMATE_BYTES.getPreferredName(), memoryReestimateBytes); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - - MemoryUsage other = (MemoryUsage) o; - return Objects.equals(timestamp, other.timestamp) - && peakUsageBytes == other.peakUsageBytes - && Objects.equals(status, other.status) - && Objects.equals(memoryReestimateBytes, other.memoryReestimateBytes); - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, peakUsageBytes, status, memoryReestimateBytes); - } - - @Override - public String toString() { - return new ToStringBuilder(getClass()).add(TIMESTAMP.getPreferredName(), timestamp == null ? null : timestamp.getEpochSecond()) - .add(PEAK_USAGE_BYTES.getPreferredName(), peakUsageBytes) - .add(STATUS.getPreferredName(), status) - .add(MEMORY_REESTIMATE_BYTES.getPreferredName(), memoryReestimateBytes) - .toString(); - } - - public enum Status { - OK, - HARD_LIMIT; - - public static Status fromString(String value) { - return valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java deleted file mode 100644 index 8481aecf808a0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/OutlierDetectionStats.java +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.outlierdetection; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class OutlierDetectionStats implements AnalysisStats { - - public static final ParseField NAME = new ParseField("outlier_detection_stats"); - - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField PARAMETERS = new ParseField("parameters"); - public static final ParseField TIMING_STATS = new ParseField("timing_stats"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new OutlierDetectionStats((Instant) a[0], (Parameters) a[1], (TimingStats) a[2]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), Parameters.PARSER, PARAMETERS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), TimingStats.PARSER, TIMING_STATS); - } - - private final Instant timestamp; - private final Parameters parameters; - private final TimingStats timingStats; - - public OutlierDetectionStats(Instant timestamp, Parameters parameters, TimingStats timingStats) { - this.timestamp = Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli()); - this.parameters = Objects.requireNonNull(parameters); - this.timingStats = Objects.requireNonNull(timingStats); - } - - public Instant getTimestamp() { - return timestamp; - } - - public Parameters getParameters() { - return parameters; - } - - public TimingStats getTimingStats() { - return timingStats; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); - builder.field(PARAMETERS.getPreferredName(), parameters); - builder.field(TIMING_STATS.getPreferredName(), timingStats); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - OutlierDetectionStats that = (OutlierDetectionStats) o; - return Objects.equals(timestamp, that.timestamp) - && Objects.equals(parameters, that.parameters) - && Objects.equals(timingStats, that.timingStats); - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, parameters, timingStats); - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/Parameters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/Parameters.java deleted file mode 100644 index aef6ad0833d42..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/Parameters.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.outlierdetection; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class Parameters implements ToXContentObject { - - public static final ParseField N_NEIGHBORS = new ParseField("n_neighbors"); - public static final ParseField METHOD = new ParseField("method"); - public static final ParseField FEATURE_INFLUENCE_THRESHOLD = new ParseField("feature_influence_threshold"); - public static final ParseField COMPUTE_FEATURE_INFLUENCE = new ParseField("compute_feature_influence"); - public static final ParseField OUTLIER_FRACTION = new ParseField("outlier_fraction"); - public static final ParseField STANDARDIZATION_ENABLED = new ParseField("standardization_enabled"); - - @SuppressWarnings("unchecked") - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "outlier_detection_parameters", - true, - a -> new Parameters((Integer) a[0], (String) a[1], (Boolean) a[2], (Double) a[3], (Double) a[4], (Boolean) a[5]) - ); - - static { - PARSER.declareInt(optionalConstructorArg(), N_NEIGHBORS); - PARSER.declareString(optionalConstructorArg(), METHOD); - PARSER.declareBoolean(optionalConstructorArg(), COMPUTE_FEATURE_INFLUENCE); - PARSER.declareDouble(optionalConstructorArg(), FEATURE_INFLUENCE_THRESHOLD); - PARSER.declareDouble(optionalConstructorArg(), OUTLIER_FRACTION); - PARSER.declareBoolean(optionalConstructorArg(), STANDARDIZATION_ENABLED); - } - - private final Integer nNeighbors; - private final String method; - private final Boolean computeFeatureInfluence; - private final Double featureInfluenceThreshold; - private final Double outlierFraction; - private final Boolean standardizationEnabled; - - public Parameters( - Integer nNeighbors, - String method, - Boolean computeFeatureInfluence, - Double featureInfluenceThreshold, - Double outlierFraction, - Boolean standardizationEnabled - ) { - this.nNeighbors = nNeighbors; - this.method = method; - this.computeFeatureInfluence = computeFeatureInfluence; - this.featureInfluenceThreshold = featureInfluenceThreshold; - this.outlierFraction = outlierFraction; - this.standardizationEnabled = standardizationEnabled; - } - - public Integer getnNeighbors() { - return nNeighbors; - } - - public String getMethod() { - return method; - } - - public Boolean getComputeFeatureInfluence() { - return computeFeatureInfluence; - } - - public Double getFeatureInfluenceThreshold() { - return featureInfluenceThreshold; - } - - public Double getOutlierFraction() { - return outlierFraction; - } - - public Boolean getStandardizationEnabled() { - return standardizationEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (nNeighbors != null) { - builder.field(N_NEIGHBORS.getPreferredName(), nNeighbors); - } - if (method != null) { - builder.field(METHOD.getPreferredName(), method); - } - if (computeFeatureInfluence != null) { - builder.field(COMPUTE_FEATURE_INFLUENCE.getPreferredName(), computeFeatureInfluence); - } - if (featureInfluenceThreshold != null) { - builder.field(FEATURE_INFLUENCE_THRESHOLD.getPreferredName(), featureInfluenceThreshold); - } - if (outlierFraction != null) { - builder.field(OUTLIER_FRACTION.getPreferredName(), outlierFraction); - } - if (standardizationEnabled != null) { - builder.field(STANDARDIZATION_ENABLED.getPreferredName(), standardizationEnabled); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Parameters that = (Parameters) o; - return Objects.equals(nNeighbors, that.nNeighbors) - && Objects.equals(method, that.method) - && Objects.equals(computeFeatureInfluence, that.computeFeatureInfluence) - && Objects.equals(featureInfluenceThreshold, that.featureInfluenceThreshold) - && Objects.equals(outlierFraction, that.outlierFraction) - && Objects.equals(standardizationEnabled, that.standardizationEnabled); - } - - @Override - public int hashCode() { - return Objects.hash( - nNeighbors, - method, - computeFeatureInfluence, - featureInfluenceThreshold, - outlierFraction, - standardizationEnabled - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStats.java deleted file mode 100644 index 72d96fa4d71cf..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/outlierdetection/TimingStats.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.outlierdetection; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class TimingStats implements ToXContentObject { - - public static final ParseField ELAPSED_TIME = new ParseField("elapsed_time"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "outlier_detection_timing_stats", - true, - a -> new TimingStats(a[0] == null ? null : TimeValue.timeValueMillis((long) a[0])) - ); - - static { - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ELAPSED_TIME); - } - - private final TimeValue elapsedTime; - - public TimingStats(TimeValue elapsedTime) { - this.elapsedTime = elapsedTime; - } - - public TimeValue getElapsedTime() { - return elapsedTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (elapsedTime != null) { - builder.humanReadableField(ELAPSED_TIME.getPreferredName(), ELAPSED_TIME.getPreferredName() + "_string", elapsedTime); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TimingStats that = (TimingStats) o; - return Objects.equals(elapsedTime, that.elapsedTime); - } - - @Override - public int hashCode() { - return Objects.hash(elapsedTime); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/Hyperparameters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/Hyperparameters.java deleted file mode 100644 index bd89928f035c8..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/Hyperparameters.java +++ /dev/null @@ -1,270 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.regression; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class Hyperparameters implements ToXContentObject { - - public static final ParseField ALPHA = new ParseField("alpha"); - public static final ParseField DOWNSAMPLE_FACTOR = new ParseField("downsample_factor"); - public static final ParseField ETA = new ParseField("eta"); - public static final ParseField ETA_GROWTH_RATE_PER_TREE = new ParseField("eta_growth_rate_per_tree"); - public static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); - public static final ParseField GAMMA = new ParseField("gamma"); - public static final ParseField LAMBDA = new ParseField("lambda"); - public static final ParseField MAX_ATTEMPTS_TO_ADD_TREE = new ParseField("max_attempts_to_add_tree"); - public static final ParseField MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER = new ParseField( - "max_optimization_rounds_per_hyperparameter" - ); - public static final ParseField MAX_TREES = new ParseField("max_trees"); - public static final ParseField NUM_FOLDS = new ParseField("num_folds"); - public static final ParseField NUM_SPLITS_PER_FEATURE = new ParseField("num_splits_per_feature"); - public static final ParseField SOFT_TREE_DEPTH_LIMIT = new ParseField("soft_tree_depth_limit"); - public static final ParseField SOFT_TREE_DEPTH_TOLERANCE = new ParseField("soft_tree_depth_tolerance"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "regression_hyperparameters", - true, - a -> new Hyperparameters( - (Double) a[0], - (Double) a[1], - (Double) a[2], - (Double) a[3], - (Double) a[4], - (Double) a[5], - (Double) a[6], - (Integer) a[7], - (Integer) a[8], - (Integer) a[9], - (Integer) a[10], - (Integer) a[11], - (Double) a[12], - (Double) a[13] - ) - ); - - static { - PARSER.declareDouble(optionalConstructorArg(), ALPHA); - PARSER.declareDouble(optionalConstructorArg(), DOWNSAMPLE_FACTOR); - PARSER.declareDouble(optionalConstructorArg(), ETA); - PARSER.declareDouble(optionalConstructorArg(), ETA_GROWTH_RATE_PER_TREE); - PARSER.declareDouble(optionalConstructorArg(), FEATURE_BAG_FRACTION); - PARSER.declareDouble(optionalConstructorArg(), GAMMA); - PARSER.declareDouble(optionalConstructorArg(), LAMBDA); - PARSER.declareInt(optionalConstructorArg(), MAX_ATTEMPTS_TO_ADD_TREE); - PARSER.declareInt(optionalConstructorArg(), MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER); - PARSER.declareInt(optionalConstructorArg(), MAX_TREES); - PARSER.declareInt(optionalConstructorArg(), NUM_FOLDS); - PARSER.declareInt(optionalConstructorArg(), NUM_SPLITS_PER_FEATURE); - PARSER.declareDouble(optionalConstructorArg(), SOFT_TREE_DEPTH_LIMIT); - PARSER.declareDouble(optionalConstructorArg(), SOFT_TREE_DEPTH_TOLERANCE); - } - - private final Double alpha; - private final Double downsampleFactor; - private final Double eta; - private final Double etaGrowthRatePerTree; - private final Double featureBagFraction; - private final Double gamma; - private final Double lambda; - private final Integer maxAttemptsToAddTree; - private final Integer maxOptimizationRoundsPerHyperparameter; - private final Integer maxTrees; - private final Integer numFolds; - private final Integer numSplitsPerFeature; - private final Double softTreeDepthLimit; - private final Double softTreeDepthTolerance; - - public Hyperparameters( - Double alpha, - Double downsampleFactor, - Double eta, - Double etaGrowthRatePerTree, - Double featureBagFraction, - Double gamma, - Double lambda, - Integer maxAttemptsToAddTree, - Integer maxOptimizationRoundsPerHyperparameter, - Integer maxTrees, - Integer numFolds, - Integer numSplitsPerFeature, - Double softTreeDepthLimit, - Double softTreeDepthTolerance - ) { - this.alpha = alpha; - this.downsampleFactor = downsampleFactor; - this.eta = eta; - this.etaGrowthRatePerTree = etaGrowthRatePerTree; - this.featureBagFraction = featureBagFraction; - this.gamma = gamma; - this.lambda = lambda; - this.maxAttemptsToAddTree = maxAttemptsToAddTree; - this.maxOptimizationRoundsPerHyperparameter = maxOptimizationRoundsPerHyperparameter; - this.maxTrees = maxTrees; - this.numFolds = numFolds; - this.numSplitsPerFeature = numSplitsPerFeature; - this.softTreeDepthLimit = softTreeDepthLimit; - this.softTreeDepthTolerance = softTreeDepthTolerance; - } - - public Double getAlpha() { - return alpha; - } - - public Double getDownsampleFactor() { - return downsampleFactor; - } - - public Double getEta() { - return eta; - } - - public Double getEtaGrowthRatePerTree() { - return etaGrowthRatePerTree; - } - - public Double getFeatureBagFraction() { - return featureBagFraction; - } - - public Double getGamma() { - return gamma; - } - - public Double getLambda() { - return lambda; - } - - public Integer getMaxAttemptsToAddTree() { - return maxAttemptsToAddTree; - } - - public Integer getMaxOptimizationRoundsPerHyperparameter() { - return maxOptimizationRoundsPerHyperparameter; - } - - public Integer getMaxTrees() { - return maxTrees; - } - - public Integer getNumFolds() { - return numFolds; - } - - public Integer getNumSplitsPerFeature() { - return numSplitsPerFeature; - } - - public Double getSoftTreeDepthLimit() { - return softTreeDepthLimit; - } - - public Double getSoftTreeDepthTolerance() { - return softTreeDepthTolerance; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (alpha != null) { - builder.field(ALPHA.getPreferredName(), alpha); - } - if (downsampleFactor != null) { - builder.field(DOWNSAMPLE_FACTOR.getPreferredName(), downsampleFactor); - } - if (eta != null) { - builder.field(ETA.getPreferredName(), eta); - } - if (etaGrowthRatePerTree != null) { - builder.field(ETA_GROWTH_RATE_PER_TREE.getPreferredName(), etaGrowthRatePerTree); - } - if (featureBagFraction != null) { - builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); - } - if (gamma != null) { - builder.field(GAMMA.getPreferredName(), gamma); - } - if (lambda != null) { - builder.field(LAMBDA.getPreferredName(), lambda); - } - if (maxAttemptsToAddTree != null) { - builder.field(MAX_ATTEMPTS_TO_ADD_TREE.getPreferredName(), maxAttemptsToAddTree); - } - if (maxOptimizationRoundsPerHyperparameter != null) { - builder.field(MAX_OPTIMIZATION_ROUNDS_PER_HYPERPARAMETER.getPreferredName(), maxOptimizationRoundsPerHyperparameter); - } - if (maxTrees != null) { - builder.field(MAX_TREES.getPreferredName(), maxTrees); - } - if (numFolds != null) { - builder.field(NUM_FOLDS.getPreferredName(), numFolds); - } - if (numSplitsPerFeature != null) { - builder.field(NUM_SPLITS_PER_FEATURE.getPreferredName(), numSplitsPerFeature); - } - if (softTreeDepthLimit != null) { - builder.field(SOFT_TREE_DEPTH_LIMIT.getPreferredName(), softTreeDepthLimit); - } - if (softTreeDepthTolerance != null) { - builder.field(SOFT_TREE_DEPTH_TOLERANCE.getPreferredName(), softTreeDepthTolerance); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - - Hyperparameters that = (Hyperparameters) o; - return Objects.equals(alpha, that.alpha) - && Objects.equals(downsampleFactor, that.downsampleFactor) - && Objects.equals(eta, that.eta) - && Objects.equals(etaGrowthRatePerTree, that.etaGrowthRatePerTree) - && Objects.equals(featureBagFraction, that.featureBagFraction) - && Objects.equals(gamma, that.gamma) - && Objects.equals(lambda, that.lambda) - && Objects.equals(maxAttemptsToAddTree, that.maxAttemptsToAddTree) - && Objects.equals(maxOptimizationRoundsPerHyperparameter, that.maxOptimizationRoundsPerHyperparameter) - && Objects.equals(maxTrees, that.maxTrees) - && Objects.equals(numFolds, that.numFolds) - && Objects.equals(numSplitsPerFeature, that.numSplitsPerFeature) - && Objects.equals(softTreeDepthLimit, that.softTreeDepthLimit) - && Objects.equals(softTreeDepthTolerance, that.softTreeDepthTolerance); - } - - @Override - public int hashCode() { - return Objects.hash( - alpha, - downsampleFactor, - eta, - etaGrowthRatePerTree, - featureBagFraction, - gamma, - lambda, - maxAttemptsToAddTree, - maxOptimizationRoundsPerHyperparameter, - maxTrees, - numFolds, - numSplitsPerFeature, - softTreeDepthLimit, - softTreeDepthTolerance - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStats.java deleted file mode 100644 index 8507a2c88f3a9..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/RegressionStats.java +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.regression; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.dataframe.stats.AnalysisStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class RegressionStats implements AnalysisStats { - - public static final ParseField NAME = new ParseField("regression_stats"); - - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField ITERATION = new ParseField("iteration"); - public static final ParseField HYPERPARAMETERS = new ParseField("hyperparameters"); - public static final ParseField TIMING_STATS = new ParseField("timing_stats"); - public static final ParseField VALIDATION_LOSS = new ParseField("validation_loss"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new RegressionStats((Instant) a[0], (Integer) a[1], (Hyperparameters) a[2], (TimingStats) a[3], (ValidationLoss) a[4]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), ITERATION); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), Hyperparameters.PARSER, HYPERPARAMETERS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), TimingStats.PARSER, TIMING_STATS); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), ValidationLoss.PARSER, VALIDATION_LOSS); - } - - private final Instant timestamp; - private final Integer iteration; - private final Hyperparameters hyperparameters; - private final TimingStats timingStats; - private final ValidationLoss validationLoss; - - public RegressionStats( - Instant timestamp, - Integer iteration, - Hyperparameters hyperparameters, - TimingStats timingStats, - ValidationLoss validationLoss - ) { - this.timestamp = Instant.ofEpochMilli(Objects.requireNonNull(timestamp).toEpochMilli()); - this.iteration = iteration; - this.hyperparameters = Objects.requireNonNull(hyperparameters); - this.timingStats = Objects.requireNonNull(timingStats); - this.validationLoss = Objects.requireNonNull(validationLoss); - } - - public Instant getTimestamp() { - return timestamp; - } - - public Integer getIteration() { - return iteration; - } - - public Hyperparameters getHyperparameters() { - return hyperparameters; - } - - public TimingStats getTimingStats() { - return timingStats; - } - - public ValidationLoss getValidationLoss() { - return validationLoss; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.toEpochMilli()); - if (iteration != null) { - builder.field(ITERATION.getPreferredName(), iteration); - } - builder.field(HYPERPARAMETERS.getPreferredName(), hyperparameters); - builder.field(TIMING_STATS.getPreferredName(), timingStats); - builder.field(VALIDATION_LOSS.getPreferredName(), validationLoss); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - RegressionStats that = (RegressionStats) o; - return Objects.equals(timestamp, that.timestamp) - && Objects.equals(iteration, that.iteration) - && Objects.equals(hyperparameters, that.hyperparameters) - && Objects.equals(timingStats, that.timingStats) - && Objects.equals(validationLoss, that.validationLoss); - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, iteration, hyperparameters, timingStats, validationLoss); - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStats.java deleted file mode 100644 index 7a06a2aa3b4d5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/TimingStats.java +++ /dev/null @@ -1,79 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.regression; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class TimingStats implements ToXContentObject { - - public static final ParseField ELAPSED_TIME = new ParseField("elapsed_time"); - public static final ParseField ITERATION_TIME = new ParseField("iteration_time"); - - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "regression_timing_stats", - true, - a -> new TimingStats( - a[0] == null ? null : TimeValue.timeValueMillis((long) a[0]), - a[1] == null ? null : TimeValue.timeValueMillis((long) a[1]) - ) - ); - - static { - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ELAPSED_TIME); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), ITERATION_TIME); - } - - private final TimeValue elapsedTime; - private final TimeValue iterationTime; - - public TimingStats(TimeValue elapsedTime, TimeValue iterationTime) { - this.elapsedTime = elapsedTime; - this.iterationTime = iterationTime; - } - - public TimeValue getElapsedTime() { - return elapsedTime; - } - - public TimeValue getIterationTime() { - return iterationTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (elapsedTime != null) { - builder.humanReadableField(ELAPSED_TIME.getPreferredName(), ELAPSED_TIME.getPreferredName() + "_string", elapsedTime); - } - if (iterationTime != null) { - builder.humanReadableField(ITERATION_TIME.getPreferredName(), ITERATION_TIME.getPreferredName() + "_string", iterationTime); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TimingStats that = (TimingStats) o; - return Objects.equals(elapsedTime, that.elapsedTime) && Objects.equals(iterationTime, that.iterationTime); - } - - @Override - public int hashCode() { - return Objects.hash(elapsedTime, iterationTime); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLoss.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLoss.java deleted file mode 100644 index 2fabaad16ffc5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/stats/regression/ValidationLoss.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.dataframe.stats.regression; - -import org.elasticsearch.client.ml.dataframe.stats.common.FoldValues; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class ValidationLoss implements ToXContentObject { - - public static final ParseField LOSS_TYPE = new ParseField("loss_type"); - public static final ParseField FOLD_VALUES = new ParseField("fold_values"); - - @SuppressWarnings("unchecked") - public static ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "regression_validation_loss", - true, - a -> new ValidationLoss((String) a[0], (List) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), LOSS_TYPE); - PARSER.declareObjectArray(ConstructingObjectParser.optionalConstructorArg(), FoldValues.PARSER, FOLD_VALUES); - } - - private final String lossType; - private final List foldValues; - - public ValidationLoss(String lossType, List values) { - this.lossType = lossType; - this.foldValues = values; - } - - public String getLossType() { - return lossType; - } - - public List getFoldValues() { - return foldValues; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (lossType != null) { - builder.field(LOSS_TYPE.getPreferredName(), lossType); - } - if (foldValues != null) { - builder.field(FOLD_VALUES.getPreferredName(), foldValues); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ValidationLoss that = (ValidationLoss) o; - return Objects.equals(lossType, that.lossType) && Objects.equals(foldValues, that.foldValues); - } - - @Override - public int hashCode() { - return Objects.hash(lossType, foldValues); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/InferenceToXContentCompressor.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/InferenceToXContentCompressor.java deleted file mode 100644 index af06d177d9bf9..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/InferenceToXContentCompressor.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.Streams; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.xcontent.DeprecationHandler; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentType; - -import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; -import java.nio.charset.StandardCharsets; -import java.util.Base64; -import java.util.zip.GZIPInputStream; -import java.util.zip.GZIPOutputStream; - -/** - * Collection of helper methods. Similar to CompressedXContent, but this utilizes GZIP. - */ -public final class InferenceToXContentCompressor { - private static final int BUFFER_SIZE = 4096; - private static final long MAX_INFLATED_BYTES = 1_000_000_000; // 1 gb maximum - - private InferenceToXContentCompressor() {} - - public static String deflate(T objectToCompress) throws IOException { - BytesReference reference = XContentHelper.toXContent(objectToCompress, XContentType.JSON, false); - return deflate(reference); - } - - public static T inflate( - String compressedString, - CheckedFunction parserFunction, - NamedXContentRegistry xContentRegistry - ) throws IOException { - try ( - XContentParser parser = XContentHelper.createParser( - xContentRegistry, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - inflate(compressedString, MAX_INFLATED_BYTES), - XContentType.JSON - ) - ) { - return parserFunction.apply(parser); - } - } - - static BytesReference inflate(String compressedString, long streamSize) throws IOException { - byte[] compressedBytes = Base64.getDecoder().decode(compressedString.getBytes(StandardCharsets.UTF_8)); - InputStream gzipStream = new GZIPInputStream(new BytesArray(compressedBytes).streamInput(), BUFFER_SIZE); - InputStream inflateStream = new SimpleBoundedInputStream(gzipStream, streamSize); - return Streams.readFully(inflateStream); - } - - private static String deflate(BytesReference reference) throws IOException { - BytesStreamOutput out = new BytesStreamOutput(); - try (OutputStream compressedOutput = new GZIPOutputStream(out, BUFFER_SIZE)) { - reference.writeTo(compressedOutput); - } - return new String(Base64.getEncoder().encode(BytesReference.toBytes(out.bytes())), StandardCharsets.UTF_8); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java deleted file mode 100644 index 271b882f697e3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/MlInferenceNamedXContentProvider.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.client.ml.inference.preprocessing.CustomWordEmbedding; -import org.elasticsearch.client.ml.inference.preprocessing.FrequencyEncoding; -import org.elasticsearch.client.ml.inference.preprocessing.Multi; -import org.elasticsearch.client.ml.inference.preprocessing.NGram; -import org.elasticsearch.client.ml.inference.preprocessing.OneHotEncoding; -import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; -import org.elasticsearch.client.ml.inference.preprocessing.TargetMeanEncoding; -import org.elasticsearch.client.ml.inference.trainedmodel.ClassificationConfig; -import org.elasticsearch.client.ml.inference.trainedmodel.IndexLocation; -import org.elasticsearch.client.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.client.ml.inference.trainedmodel.RegressionConfig; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModelLocation; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.Ensemble; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.Exponent; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.LogisticRegression; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.OutputAggregator; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.WeightedMode; -import org.elasticsearch.client.ml.inference.trainedmodel.ensemble.WeightedSum; -import org.elasticsearch.client.ml.inference.trainedmodel.langident.LangIdentNeuralNetwork; -import org.elasticsearch.client.ml.inference.trainedmodel.tree.Tree; -import org.elasticsearch.plugins.spi.NamedXContentProvider; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ParseField; - -import java.util.ArrayList; -import java.util.List; - -public class MlInferenceNamedXContentProvider implements NamedXContentProvider { - - @Override - public List getNamedXContentParsers() { - List namedXContent = new ArrayList<>(); - - // PreProcessing - namedXContent.add( - new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(OneHotEncoding.NAME), OneHotEncoding::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(TargetMeanEncoding.NAME), TargetMeanEncoding::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(FrequencyEncoding.NAME), FrequencyEncoding::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(CustomWordEmbedding.NAME), CustomWordEmbedding::fromXContent) - ); - namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(NGram.NAME), NGram::fromXContent)); - namedXContent.add(new NamedXContentRegistry.Entry(PreProcessor.class, new ParseField(Multi.NAME), Multi::fromXContent)); - - // Model - namedXContent.add(new NamedXContentRegistry.Entry(TrainedModel.class, new ParseField(Tree.NAME), Tree::fromXContent)); - namedXContent.add(new NamedXContentRegistry.Entry(TrainedModel.class, new ParseField(Ensemble.NAME), Ensemble::fromXContent)); - namedXContent.add( - new NamedXContentRegistry.Entry( - TrainedModel.class, - new ParseField(LangIdentNeuralNetwork.NAME), - LangIdentNeuralNetwork::fromXContent - ) - ); - - // Inference Config - namedXContent.add( - new NamedXContentRegistry.Entry(InferenceConfig.class, ClassificationConfig.NAME, ClassificationConfig::fromXContent) - ); - namedXContent.add(new NamedXContentRegistry.Entry(InferenceConfig.class, RegressionConfig.NAME, RegressionConfig::fromXContent)); - - // Aggregating output - namedXContent.add( - new NamedXContentRegistry.Entry(OutputAggregator.class, new ParseField(WeightedMode.NAME), WeightedMode::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry(OutputAggregator.class, new ParseField(WeightedSum.NAME), WeightedSum::fromXContent) - ); - namedXContent.add( - new NamedXContentRegistry.Entry( - OutputAggregator.class, - new ParseField(LogisticRegression.NAME), - LogisticRegression::fromXContent - ) - ); - namedXContent.add(new NamedXContentRegistry.Entry(OutputAggregator.class, new ParseField(Exponent.NAME), Exponent::fromXContent)); - - // location - namedXContent.add( - new NamedXContentRegistry.Entry(TrainedModelLocation.class, new ParseField(IndexLocation.INDEX), IndexLocation::fromXContent) - ); - - return namedXContent; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObject.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObject.java deleted file mode 100644 index 1a6eb8afdac24..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObject.java +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.xcontent.ToXContentObject; - -/** - * Simple interface for XContent Objects that are named. - * - * This affords more general handling when serializing and de-serializing this type of XContent when it is used in a NamedObjects - * parser. - */ -public interface NamedXContentObject extends ToXContentObject { - /** - * @return The name of the XContentObject that is to be serialized - */ - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObjectHelper.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObjectHelper.java deleted file mode 100644 index b0c4015e186a0..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/NamedXContentObjectHelper.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; - -public final class NamedXContentObjectHelper { - - private NamedXContentObjectHelper() {} - - public static XContentBuilder writeNamedObjects( - XContentBuilder builder, - ToXContent.Params params, - boolean useExplicitOrder, - String namedObjectsName, - List namedObjects - ) throws IOException { - if (useExplicitOrder) { - builder.startArray(namedObjectsName); - } else { - builder.startObject(namedObjectsName); - } - for (NamedXContentObject object : namedObjects) { - if (useExplicitOrder) { - builder.startObject(); - } - builder.field(object.getName(), object, params); - if (useExplicitOrder) { - builder.endObject(); - } - } - if (useExplicitOrder) { - builder.endArray(); - } else { - builder.endObject(); - } - return builder; - } - - public static XContentBuilder writeNamedObject( - XContentBuilder builder, - ToXContent.Params params, - String namedObjectName, - NamedXContentObject namedObject - ) throws IOException { - builder.startObject(namedObjectName); - builder.field(namedObject.getName(), namedObject, params); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/SimpleBoundedInputStream.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/SimpleBoundedInputStream.java deleted file mode 100644 index f0c274d49592a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/SimpleBoundedInputStream.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import java.io.IOException; -import java.io.InputStream; -import java.util.Objects; - -/** - * This is a pared down bounded input stream. - * Only read is specifically enforced. - */ -final class SimpleBoundedInputStream extends InputStream { - - private final InputStream in; - private final long maxBytes; - private long numBytes; - - SimpleBoundedInputStream(InputStream inputStream, long maxBytes) { - this.in = Objects.requireNonNull(inputStream, "inputStream"); - if (maxBytes < 0) { - throw new IllegalArgumentException("[maxBytes] must be greater than or equal to 0"); - } - this.maxBytes = maxBytes; - } - - /** - * A simple wrapper around the injected input stream that restricts the total number of bytes able to be read. - * @return The byte read. -1 on internal stream completion or when maxBytes is exceeded. - * @throws IOException on failure - */ - @Override - public int read() throws IOException { - // We have reached the maximum, signal stream completion. - if (numBytes >= maxBytes) { - return -1; - } - numBytes++; - return in.read(); - } - - /** - * Delegates `close` to the wrapped InputStream - * @throws IOException on failure - */ - @Override - public void close() throws IOException { - in.close(); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java deleted file mode 100644 index 8defbcfce2e83..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java +++ /dev/null @@ -1,504 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.Version; -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.inference.trainedmodel.InferenceConfig; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModelLocation; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.time.Instant; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.client.ml.inference.NamedXContentObjectHelper.writeNamedObject; - -public class TrainedModelConfig implements ToXContentObject { - - public static final String NAME = "trained_model_config"; - - public static final ParseField MODEL_ID = new ParseField("model_id"); - public static final ParseField MODEL_TYPE = new ParseField("model_type"); - public static final ParseField CREATED_BY = new ParseField("created_by"); - public static final ParseField VERSION = new ParseField("version"); - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField CREATE_TIME = new ParseField("create_time"); - public static final ParseField DEFINITION = new ParseField("definition"); - public static final ParseField COMPRESSED_DEFINITION = new ParseField("compressed_definition"); - public static final ParseField TAGS = new ParseField("tags"); - public static final ParseField METADATA = new ParseField("metadata"); - public static final ParseField INPUT = new ParseField("input"); - @Deprecated - public static final ParseField ESTIMATED_HEAP_MEMORY_USAGE_BYTES = new ParseField("estimated_heap_memory_usage_bytes"); - public static final ParseField MODEL_SIZE_BYTES = new ParseField("model_size_bytes", "estimated_heap_memory_usage_bytes"); - public static final ParseField ESTIMATED_OPERATIONS = new ParseField("estimated_operations"); - public static final ParseField LICENSE_LEVEL = new ParseField("license_level"); - public static final ParseField DEFAULT_FIELD_MAP = new ParseField("default_field_map"); - public static final ParseField INFERENCE_CONFIG = new ParseField("inference_config"); - public static final ParseField LOCATION = new ParseField("location"); - - public static final ObjectParser PARSER = new ObjectParser<>(NAME, true, TrainedModelConfig.Builder::new); - static { - PARSER.declareString(TrainedModelConfig.Builder::setModelId, MODEL_ID); - PARSER.declareString(TrainedModelConfig.Builder::setModelType, MODEL_TYPE); - PARSER.declareString(TrainedModelConfig.Builder::setCreatedBy, CREATED_BY); - PARSER.declareString(TrainedModelConfig.Builder::setVersion, VERSION); - PARSER.declareString(TrainedModelConfig.Builder::setDescription, DESCRIPTION); - PARSER.declareField( - TrainedModelConfig.Builder::setCreateTime, - (p, c) -> TimeUtil.parseTimeFieldToInstant(p, CREATE_TIME.getPreferredName()), - CREATE_TIME, - ObjectParser.ValueType.VALUE - ); - PARSER.declareObject(TrainedModelConfig.Builder::setDefinition, (p, c) -> TrainedModelDefinition.fromXContent(p), DEFINITION); - PARSER.declareString(TrainedModelConfig.Builder::setCompressedDefinition, COMPRESSED_DEFINITION); - PARSER.declareStringArray(TrainedModelConfig.Builder::setTags, TAGS); - PARSER.declareObject(TrainedModelConfig.Builder::setMetadata, (p, c) -> p.map(), METADATA); - PARSER.declareObject(TrainedModelConfig.Builder::setInput, (p, c) -> TrainedModelInput.fromXContent(p), INPUT); - PARSER.declareLong(TrainedModelConfig.Builder::setModelSize, MODEL_SIZE_BYTES); - PARSER.declareLong(TrainedModelConfig.Builder::setEstimatedOperations, ESTIMATED_OPERATIONS); - PARSER.declareString(TrainedModelConfig.Builder::setLicenseLevel, LICENSE_LEVEL); - PARSER.declareObject(TrainedModelConfig.Builder::setDefaultFieldMap, (p, c) -> p.mapStrings(), DEFAULT_FIELD_MAP); - PARSER.declareNamedObject( - TrainedModelConfig.Builder::setInferenceConfig, - (p, c, n) -> p.namedObject(InferenceConfig.class, n, null), - INFERENCE_CONFIG - ); - PARSER.declareNamedObject( - TrainedModelConfig.Builder::setLocation, - (p, c, n) -> p.namedObject(TrainedModelLocation.class, n, null), - LOCATION - ); - } - - public static TrainedModelConfig fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null).build(); - } - - private final String modelId; - private final TrainedModelType modelType; - private final String createdBy; - private final Version version; - private final String description; - private final Instant createTime; - private final TrainedModelDefinition definition; - private final String compressedDefinition; - private final List tags; - private final Map metadata; - private final TrainedModelInput input; - private final Long modelSize; - private final Long estimatedOperations; - private final String licenseLevel; - private final Map defaultFieldMap; - private final InferenceConfig inferenceConfig; - private final TrainedModelLocation location; - - TrainedModelConfig( - String modelId, - TrainedModelType modelType, - String createdBy, - Version version, - String description, - Instant createTime, - TrainedModelDefinition definition, - String compressedDefinition, - List tags, - Map metadata, - TrainedModelInput input, - Long modelSize, - Long estimatedOperations, - String licenseLevel, - Map defaultFieldMap, - InferenceConfig inferenceConfig, - TrainedModelLocation location - ) { - this.modelId = modelId; - this.modelType = modelType; - this.createdBy = createdBy; - this.version = version; - this.createTime = createTime == null ? null : Instant.ofEpochMilli(createTime.toEpochMilli()); - this.definition = definition; - this.compressedDefinition = compressedDefinition; - this.description = description; - this.tags = tags == null ? null : Collections.unmodifiableList(tags); - this.metadata = metadata == null ? null : Collections.unmodifiableMap(metadata); - this.input = input; - this.modelSize = modelSize; - this.estimatedOperations = estimatedOperations; - this.licenseLevel = licenseLevel; - this.defaultFieldMap = defaultFieldMap == null ? null : Collections.unmodifiableMap(defaultFieldMap); - this.inferenceConfig = inferenceConfig; - this.location = location; - } - - public String getModelId() { - return modelId; - } - - public TrainedModelType getModelType() { - return modelType; - } - - public String getCreatedBy() { - return createdBy; - } - - public Version getVersion() { - return version; - } - - public String getDescription() { - return description; - } - - public Instant getCreateTime() { - return createTime; - } - - public List getTags() { - return tags; - } - - public Map getMetadata() { - return metadata; - } - - public TrainedModelDefinition getDefinition() { - return definition; - } - - public String getCompressedDefinition() { - return compressedDefinition; - } - - @Nullable - public TrainedModelLocation getLocation() { - return location; - } - - public TrainedModelInput getInput() { - return input; - } - - /** - * @deprecated use {@link TrainedModelConfig#getModelSize()} instead - * @return the {@link ByteSizeValue} of the model size if available. - */ - @Deprecated - public ByteSizeValue getEstimatedHeapMemory() { - return modelSize == null ? null : new ByteSizeValue(modelSize); - } - - /** - * @deprecated use {@link TrainedModelConfig#getModelSizeBytes()} instead - * @return the model size in bytes if available. - */ - @Deprecated - public Long getEstimatedHeapMemoryBytes() { - return modelSize; - } - - /** - * @return the {@link ByteSizeValue} of the model size if available. - */ - public ByteSizeValue getModelSize() { - return modelSize == null ? null : new ByteSizeValue(modelSize); - } - - /** - * @return the model size in bytes if available. - */ - public Long getModelSizeBytes() { - return modelSize; - } - - public String getLicenseLevel() { - return licenseLevel; - } - - public Map getDefaultFieldMap() { - return defaultFieldMap; - } - - public InferenceConfig getInferenceConfig() { - return inferenceConfig; - } - - public static Builder builder() { - return new Builder(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (modelId != null) { - builder.field(MODEL_ID.getPreferredName(), modelId); - } - if (modelType != null) { - builder.field(MODEL_TYPE.getPreferredName(), modelType.toString()); - } - if (createdBy != null) { - builder.field(CREATED_BY.getPreferredName(), createdBy); - } - if (version != null) { - builder.field(VERSION.getPreferredName(), version.toString()); - } - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - if (createTime != null) { - builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + "_string", createTime.toEpochMilli()); - } - if (definition != null) { - builder.field(DEFINITION.getPreferredName(), definition); - } - if (tags != null) { - builder.field(TAGS.getPreferredName(), tags); - } - if (metadata != null) { - builder.field(METADATA.getPreferredName(), metadata); - } - if (input != null) { - builder.field(INPUT.getPreferredName(), input); - } - if (modelSize != null) { - builder.field(MODEL_SIZE_BYTES.getPreferredName(), modelSize); - } - if (estimatedOperations != null) { - builder.field(ESTIMATED_OPERATIONS.getPreferredName(), estimatedOperations); - } - if (compressedDefinition != null) { - builder.field(COMPRESSED_DEFINITION.getPreferredName(), compressedDefinition); - } - if (licenseLevel != null) { - builder.field(LICENSE_LEVEL.getPreferredName(), licenseLevel); - } - if (defaultFieldMap != null) { - builder.field(DEFAULT_FIELD_MAP.getPreferredName(), defaultFieldMap); - } - if (inferenceConfig != null) { - writeNamedObject(builder, params, INFERENCE_CONFIG.getPreferredName(), inferenceConfig); - } - if (location != null) { - writeNamedObject(builder, params, LOCATION.getPreferredName(), location); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TrainedModelConfig that = (TrainedModelConfig) o; - return Objects.equals(modelId, that.modelId) - && Objects.equals(modelType, that.modelType) - && Objects.equals(createdBy, that.createdBy) - && Objects.equals(version, that.version) - && Objects.equals(description, that.description) - && Objects.equals(createTime, that.createTime) - && Objects.equals(definition, that.definition) - && Objects.equals(compressedDefinition, that.compressedDefinition) - && Objects.equals(tags, that.tags) - && Objects.equals(input, that.input) - && Objects.equals(modelSize, that.modelSize) - && Objects.equals(estimatedOperations, that.estimatedOperations) - && Objects.equals(licenseLevel, that.licenseLevel) - && Objects.equals(defaultFieldMap, that.defaultFieldMap) - && Objects.equals(inferenceConfig, that.inferenceConfig) - && Objects.equals(metadata, that.metadata) - && Objects.equals(location, that.location); - } - - @Override - public int hashCode() { - return Objects.hash( - modelId, - modelType, - createdBy, - version, - createTime, - definition, - compressedDefinition, - description, - tags, - modelSize, - estimatedOperations, - metadata, - licenseLevel, - input, - inferenceConfig, - defaultFieldMap, - location - ); - } - - public static class Builder { - - private String modelId; - private TrainedModelType modelType; - private String createdBy; - private Version version; - private String description; - private Instant createTime; - private Map metadata; - private List tags; - private TrainedModelDefinition definition; - private String compressedDefinition; - private TrainedModelInput input; - private Long modelSize; - private Long estimatedOperations; - private String licenseLevel; - private Map defaultFieldMap; - private InferenceConfig inferenceConfig; - private TrainedModelLocation location; - - public Builder setModelId(String modelId) { - this.modelId = modelId; - return this; - } - - public Builder setModelType(String modelType) { - this.modelType = TrainedModelType.fromString(modelType); - return this; - } - - public Builder setModelType(TrainedModelType modelType) { - this.modelType = modelType; - return this; - } - - private Builder setCreatedBy(String createdBy) { - this.createdBy = createdBy; - return this; - } - - private Builder setVersion(Version version) { - this.version = version; - return this; - } - - private Builder setVersion(String version) { - return this.setVersion(Version.fromString(version)); - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - private Builder setCreateTime(Instant createTime) { - this.createTime = createTime; - return this; - } - - public Builder setTags(List tags) { - this.tags = tags; - return this; - } - - public Builder setTags(String... tags) { - return setTags(Arrays.asList(tags)); - } - - public Builder setMetadata(Map metadata) { - this.metadata = metadata; - return this; - } - - public Builder setDefinition(TrainedModelDefinition.Builder definition) { - this.definition = definition == null ? null : definition.build(); - return this; - } - - public Builder setCompressedDefinition(String compressedDefinition) { - this.compressedDefinition = compressedDefinition; - return this; - } - - public Builder setDefinition(TrainedModelDefinition definition) { - this.definition = definition; - return this; - } - - public Builder setLocation(TrainedModelLocation location) { - this.location = location; - return this; - } - - public Builder setInput(TrainedModelInput input) { - this.input = input; - return this; - } - - private Builder setModelSize(Long modelSize) { - this.modelSize = modelSize; - return this; - } - - private Builder setEstimatedOperations(Long estimatedOperations) { - this.estimatedOperations = estimatedOperations; - return this; - } - - private Builder setLicenseLevel(String licenseLevel) { - this.licenseLevel = licenseLevel; - return this; - } - - public Builder setDefaultFieldMap(Map defaultFieldMap) { - this.defaultFieldMap = defaultFieldMap; - return this; - } - - public Builder setInferenceConfig(InferenceConfig inferenceConfig) { - this.inferenceConfig = inferenceConfig; - return this; - } - - public TrainedModelConfig build() { - return new TrainedModelConfig( - modelId, - modelType, - createdBy, - version, - description, - createTime, - definition, - compressedDefinition, - tags, - metadata, - input, - modelSize, - estimatedOperations, - licenseLevel, - defaultFieldMap, - inferenceConfig, - location - ); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java deleted file mode 100644 index 3ca84bc62cbd5..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.client.ml.inference.preprocessing.PreProcessor; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -public class TrainedModelDefinition implements ToXContentObject { - - public static final String NAME = "trained_model_doc"; - - public static final ParseField TRAINED_MODEL = new ParseField("trained_model"); - public static final ParseField PREPROCESSORS = new ParseField("preprocessors"); - - public static final ObjectParser PARSER = new ObjectParser<>(NAME, true, TrainedModelDefinition.Builder::new); - static { - PARSER.declareNamedObject( - TrainedModelDefinition.Builder::setTrainedModel, - (p, c, n) -> p.namedObject(TrainedModel.class, n, null), - TRAINED_MODEL - ); - PARSER.declareNamedObjects( - TrainedModelDefinition.Builder::setPreProcessors, - (p, c, n) -> p.namedObject(PreProcessor.class, n, null), - (trainedModelDefBuilder) -> {/* Does not matter client side*/ }, - PREPROCESSORS - ); - } - - public static TrainedModelDefinition.Builder fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final TrainedModel trainedModel; - private final List preProcessors; - - TrainedModelDefinition(TrainedModel trainedModel, List preProcessors) { - this.trainedModel = trainedModel; - this.preProcessors = preProcessors == null ? Collections.emptyList() : Collections.unmodifiableList(preProcessors); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - NamedXContentObjectHelper.writeNamedObjects( - builder, - params, - false, - TRAINED_MODEL.getPreferredName(), - Collections.singletonList(trainedModel) - ); - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, PREPROCESSORS.getPreferredName(), preProcessors); - builder.endObject(); - return builder; - } - - public TrainedModel getTrainedModel() { - return trainedModel; - } - - public List getPreProcessors() { - return preProcessors; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TrainedModelDefinition that = (TrainedModelDefinition) o; - return Objects.equals(trainedModel, that.trainedModel) && Objects.equals(preProcessors, that.preProcessors); - } - - @Override - public int hashCode() { - return Objects.hash(trainedModel, preProcessors); - } - - public static class Builder { - - private List preProcessors; - private TrainedModel trainedModel; - - public Builder setPreProcessors(List preProcessors) { - this.preProcessors = preProcessors; - return this; - } - - public Builder setTrainedModel(TrainedModel trainedModel) { - this.trainedModel = trainedModel; - return this; - } - - public TrainedModelDefinition build() { - return new TrainedModelDefinition(this.trainedModel, this.preProcessors); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelInput.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelInput.java deleted file mode 100644 index d6e2d0559396c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelInput.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -public class TrainedModelInput implements ToXContentObject { - - public static final String NAME = "trained_model_config_input"; - public static final ParseField FIELD_NAMES = new ParseField("field_names"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new TrainedModelInput((List) a[0]) - ); - - static { - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), FIELD_NAMES); - } - - private final List fieldNames; - - public TrainedModelInput(List fieldNames) { - this.fieldNames = fieldNames; - } - - public TrainedModelInput(String... fieldNames) { - this(Arrays.asList(fieldNames)); - } - - public static TrainedModelInput fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - public List getFieldNames() { - return fieldNames; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (fieldNames != null) { - builder.field(FIELD_NAMES.getPreferredName(), fieldNames); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TrainedModelInput that = (TrainedModelInput) o; - return Objects.equals(fieldNames, that.fieldNames); - } - - @Override - public int hashCode() { - return Objects.hash(fieldNames); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelStats.java deleted file mode 100644 index bd45805e70603..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelStats.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference; - -import org.elasticsearch.client.ml.inference.trainedmodel.InferenceStats; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.ingest.IngestStats; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class TrainedModelStats implements ToXContentObject { - - public static final ParseField MODEL_ID = new ParseField("model_id"); - public static final ParseField PIPELINE_COUNT = new ParseField("pipeline_count"); - public static final ParseField INGEST_STATS = new ParseField("ingest"); - public static final ParseField INFERENCE_STATS = new ParseField("inference_stats"); - - private final String modelId; - private final Map ingestStats; - private final int pipelineCount; - private final InferenceStats inferenceStats; - - @SuppressWarnings("unchecked") - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "trained_model_stats", - true, - args -> new TrainedModelStats((String) args[0], (Map) args[1], (Integer) args[2], (InferenceStats) args[3]) - ); - - static { - PARSER.declareString(constructorArg(), MODEL_ID); - PARSER.declareObject(optionalConstructorArg(), (p, c) -> p.mapOrdered(), INGEST_STATS); - PARSER.declareInt(constructorArg(), PIPELINE_COUNT); - PARSER.declareObject(optionalConstructorArg(), InferenceStats.PARSER, INFERENCE_STATS); - } - - public static TrainedModelStats fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public TrainedModelStats(String modelId, Map ingestStats, int pipelineCount, InferenceStats inferenceStats) { - this.modelId = modelId; - this.ingestStats = ingestStats; - this.pipelineCount = pipelineCount; - this.inferenceStats = inferenceStats; - } - - /** - * The model id for which the stats apply - */ - public String getModelId() { - return modelId; - } - - /** - * Ingest level statistics. See {@link IngestStats#toXContent(XContentBuilder, Params)} for fields and format - * If there are no ingest pipelines referencing the model, then the ingest statistics could be null. - */ - @Nullable - public Map getIngestStats() { - return ingestStats; - } - - /** - * The total number of pipelines that reference the trained model - */ - public int getPipelineCount() { - return pipelineCount; - } - - /** - * Inference statistics - */ - public InferenceStats getInferenceStats() { - return inferenceStats; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(MODEL_ID.getPreferredName(), modelId); - builder.field(PIPELINE_COUNT.getPreferredName(), pipelineCount); - if (ingestStats != null) { - builder.field(INGEST_STATS.getPreferredName(), ingestStats); - } - if (inferenceStats != null) { - builder.field(INFERENCE_STATS.getPreferredName(), inferenceStats); - } - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(modelId, ingestStats, pipelineCount, inferenceStats); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (getClass() != obj.getClass()) { - return false; - } - TrainedModelStats other = (TrainedModelStats) obj; - return Objects.equals(this.modelId, other.modelId) - && Objects.equals(this.ingestStats, other.ingestStats) - && Objects.equals(this.pipelineCount, other.pipelineCount) - && Objects.equals(this.inferenceStats, other.inferenceStats); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelType.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelType.java deleted file mode 100644 index e34c01d880b87..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelType.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference; - -import java.util.Locale; - -public enum TrainedModelType { - TREE_ENSEMBLE, - LANG_IDENT, - PYTORCH; - - public static TrainedModelType fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/CustomWordEmbedding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/CustomWordEmbedding.java deleted file mode 100644 index 3ad8170b3ce9f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/CustomWordEmbedding.java +++ /dev/null @@ -1,151 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -/** - * This is a pre-processor that embeds text into a numerical vector. - * - * It calculates a set of features based on script type, ngram hashes, and most common script values. - * - * The features are then concatenated with specific quantization scales and weights into a vector of length 80. - * - * This is a fork and a port of: https://github.com/google/cld3/blob/06f695f1c8ee530104416aab5dcf2d6a1414a56a/src/embedding_network.cc - */ -public class CustomWordEmbedding implements PreProcessor { - - public static final String NAME = "custom_word_embedding"; - static final ParseField FIELD = new ParseField("field"); - static final ParseField DEST_FIELD = new ParseField("dest_field"); - static final ParseField EMBEDDING_WEIGHTS = new ParseField("embedding_weights"); - static final ParseField EMBEDDING_QUANT_SCALES = new ParseField("embedding_quant_scales"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new CustomWordEmbedding((short[][]) a[0], (byte[][]) a[1], (String) a[2], (String) a[3]) - ); - static { - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> { - List> listOfListOfShorts = parseArrays(EMBEDDING_QUANT_SCALES.getPreferredName(), XContentParser::shortValue, p); - short[][] primitiveShorts = new short[listOfListOfShorts.size()][]; - int i = 0; - for (List shorts : listOfListOfShorts) { - short[] innerShorts = new short[shorts.size()]; - for (int j = 0; j < shorts.size(); j++) { - innerShorts[j] = shorts.get(j); - } - primitiveShorts[i++] = innerShorts; - } - return primitiveShorts; - }, EMBEDDING_QUANT_SCALES, ObjectParser.ValueType.VALUE_ARRAY); - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> { - List values = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_ARRAY) { - values.add(p.binaryValue()); - } - byte[][] primitiveBytes = new byte[values.size()][]; - int i = 0; - for (byte[] bytes : values) { - primitiveBytes[i++] = bytes; - } - return primitiveBytes; - }, EMBEDDING_WEIGHTS, ObjectParser.ValueType.VALUE_ARRAY); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), DEST_FIELD); - } - - private static List> parseArrays( - String fieldName, - CheckedFunction fromParser, - XContentParser p - ) throws IOException { - if (p.currentToken() != XContentParser.Token.START_ARRAY) { - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" + fieldName + "]"); - } - List> values = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_ARRAY) { - if (p.currentToken() != XContentParser.Token.START_ARRAY) { - throw new IllegalArgumentException("unexpected token [" + p.currentToken() + "] for [" + fieldName + "]"); - } - List innerList = new ArrayList<>(); - while (p.nextToken() != XContentParser.Token.END_ARRAY) { - if (p.currentToken().isValue() == false) { - throw new IllegalStateException( - "expected non-null value but got [" + p.currentToken() + "] " + "for [" + fieldName + "]" - ); - } - innerList.add(fromParser.apply(p)); - } - values.add(innerList); - } - return values; - } - - public static CustomWordEmbedding fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final short[][] embeddingsQuantScales; - private final byte[][] embeddingsWeights; - private final String fieldName; - private final String destField; - - CustomWordEmbedding(short[][] embeddingsQuantScales, byte[][] embeddingsWeights, String fieldName, String destField) { - this.embeddingsQuantScales = embeddingsQuantScales; - this.embeddingsWeights = embeddingsWeights; - this.fieldName = fieldName; - this.destField = destField; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(FIELD.getPreferredName(), fieldName); - builder.field(DEST_FIELD.getPreferredName(), destField); - builder.field(EMBEDDING_QUANT_SCALES.getPreferredName(), embeddingsQuantScales); - builder.field(EMBEDDING_WEIGHTS.getPreferredName(), embeddingsWeights); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CustomWordEmbedding that = (CustomWordEmbedding) o; - return Objects.equals(fieldName, that.fieldName) - && Objects.equals(destField, that.destField) - && Arrays.deepEquals(embeddingsWeights, that.embeddingsWeights) - && Arrays.deepEquals(embeddingsQuantScales, that.embeddingsQuantScales); - } - - @Override - public int hashCode() { - return Objects.hash(fieldName, destField, Arrays.deepHashCode(embeddingsQuantScales), Arrays.deepHashCode(embeddingsWeights)); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java deleted file mode 100644 index 81d3cfa05cb45..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/FrequencyEncoding.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * PreProcessor for frequency encoding a set of categorical values for a given field. - */ -public class FrequencyEncoding implements PreProcessor { - - public static final String NAME = "frequency_encoding"; - public static final ParseField FIELD = new ParseField("field"); - public static final ParseField FEATURE_NAME = new ParseField("feature_name"); - public static final ParseField FREQUENCY_MAP = new ParseField("frequency_map"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new FrequencyEncoding((String) a[0], (String) a[1], (Map) a[2], (Boolean) a[3]) - ); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME); - PARSER.declareObject( - ConstructingObjectParser.constructorArg(), - (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), - FREQUENCY_MAP - ); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - } - - public static FrequencyEncoding fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String field; - private final String featureName; - private final Map frequencyMap; - private final Boolean custom; - - FrequencyEncoding(String field, String featureName, Map frequencyMap, Boolean custom) { - this.field = Objects.requireNonNull(field); - this.featureName = Objects.requireNonNull(featureName); - this.frequencyMap = Collections.unmodifiableMap(Objects.requireNonNull(frequencyMap)); - this.custom = custom; - } - - /** - * @return Field name on which to frequency encode - */ - public String getField() { - return field; - } - - /** - * @return Map of Value: frequency for the frequency encoding - */ - public Map getFrequencyMap() { - return frequencyMap; - } - - /** - * @return The encoded feature name - */ - public String getFeatureName() { - return featureName; - } - - @Override - public String getName() { - return NAME; - } - - public Boolean getCustom() { - return custom; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(FIELD.getPreferredName(), field); - builder.field(FEATURE_NAME.getPreferredName(), featureName); - builder.field(FREQUENCY_MAP.getPreferredName(), frequencyMap); - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - FrequencyEncoding that = (FrequencyEncoding) o; - return Objects.equals(field, that.field) - && Objects.equals(featureName, that.featureName) - && Objects.equals(custom, that.custom) - && Objects.equals(frequencyMap, that.frequencyMap); - } - - @Override - public int hashCode() { - return Objects.hash(field, featureName, frequencyMap, custom); - } - - public Builder builder(String fieldName) { - return new Builder(fieldName); - } - - public static class Builder { - - private String field; - private String featureName; - private Map frequencyMap = new HashMap<>(); - private Boolean custom; - - public Builder(String field) { - this.field = field; - } - - public Builder setField(String field) { - this.field = field; - return this; - } - - public Builder setFeatureName(String featureName) { - this.featureName = featureName; - return this; - } - - public Builder setFrequencyMap(Map frequencyMap) { - this.frequencyMap = new HashMap<>(frequencyMap); - return this; - } - - public Builder addFrequency(String valueName, double frequency) { - this.frequencyMap.put(valueName, frequency); - return this; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public FrequencyEncoding build() { - return new FrequencyEncoding(field, featureName, frequencyMap, custom); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/Multi.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/Multi.java deleted file mode 100644 index bf9f1aba2c057..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/Multi.java +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -/** - * Multi-PreProcessor for chaining together multiple processors - */ -public class Multi implements PreProcessor { - - public static final String NAME = "multi_encoding"; - public static final ParseField PROCESSORS = new ParseField("processors"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new Multi((List) a[0], (Boolean) a[1]) - ); - static { - PARSER.declareNamedObjects( - ConstructingObjectParser.constructorArg(), - (p, c, n) -> p.namedObject(PreProcessor.class, n, null), - (_unused) -> {/* Does not matter client side*/ }, - PROCESSORS - ); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - } - - public static Multi fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List processors; - private final Boolean custom; - - Multi(List processors, Boolean custom) { - this.processors = Objects.requireNonNull(processors, PROCESSORS.getPreferredName()); - this.custom = custom; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, PROCESSORS.getPreferredName(), processors); - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Multi multi = (Multi) o; - return Objects.equals(multi.processors, processors) && Objects.equals(custom, multi.custom); - } - - @Override - public int hashCode() { - return Objects.hash(custom, processors); - } - - public static Builder builder(List processors) { - return new Builder(processors); - } - - public static class Builder { - private final List processors; - private Boolean custom; - - public Builder(List processors) { - this.processors = processors; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public Multi build() { - return new Multi(processors, custom); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/NGram.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/NGram.java deleted file mode 100644 index bd831a6bf8d54..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/NGram.java +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.function.IntFunction; -import java.util.stream.IntStream; - -/** - * PreProcessor for n-gram encoding a string - */ -public class NGram implements PreProcessor { - - public static final String NAME = "n_gram_encoding"; - public static final ParseField FIELD = new ParseField("field"); - public static final ParseField FEATURE_PREFIX = new ParseField("feature_prefix"); - public static final ParseField NGRAMS = new ParseField("n_grams"); - public static final ParseField START = new ParseField("start"); - public static final ParseField LENGTH = new ParseField("length"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser( - NAME, - true, - a -> new NGram((String) a[0], (List) a[1], (Integer) a[2], (Integer) a[3], (Boolean) a[4], (String) a[5]) - ); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareIntArray(ConstructingObjectParser.constructorArg(), NGRAMS); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), START); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), LENGTH); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FEATURE_PREFIX); - } - - public static NGram fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String field; - private final String featurePrefix; - private final List nGrams; - private final Integer start; - private final Integer length; - private final Boolean custom; - - NGram(String field, List nGrams, Integer start, Integer length, Boolean custom, String featurePrefix) { - this.field = field; - this.featurePrefix = featurePrefix; - this.nGrams = nGrams; - this.start = start; - this.length = length; - this.custom = custom; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (field != null) { - builder.field(FIELD.getPreferredName(), field); - } - if (featurePrefix != null) { - builder.field(FEATURE_PREFIX.getPreferredName(), featurePrefix); - } - if (nGrams != null) { - builder.field(NGRAMS.getPreferredName(), nGrams); - } - if (start != null) { - builder.field(START.getPreferredName(), start); - } - if (length != null) { - builder.field(LENGTH.getPreferredName(), length); - } - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - public String getField() { - return field; - } - - public String getFeaturePrefix() { - return featurePrefix; - } - - public List getnGrams() { - return nGrams; - } - - public Integer getStart() { - return start; - } - - public Integer getLength() { - return length; - } - - public Boolean getCustom() { - return custom; - } - - public List outputFields() { - return allPossibleNGramOutputFeatureNames(); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - NGram nGram = (NGram) o; - return Objects.equals(field, nGram.field) - && Objects.equals(featurePrefix, nGram.featurePrefix) - && Objects.equals(nGrams, nGram.nGrams) - && Objects.equals(start, nGram.start) - && Objects.equals(length, nGram.length) - && Objects.equals(custom, nGram.custom); - } - - @Override - public int hashCode() { - return Objects.hash(field, featurePrefix, start, length, custom, nGrams); - } - - private String nGramFeature(int nGram, int pos) { - return featurePrefix + "." + nGram + pos; - } - - private List allPossibleNGramOutputFeatureNames() { - int totalNgrams = 0; - for (int nGram : nGrams) { - totalNgrams += (length - (nGram - 1)); - } - if (totalNgrams <= 0) { - return Collections.emptyList(); - } - List ngramOutputs = new ArrayList<>(totalNgrams); - - for (int nGram : nGrams) { - IntFunction func = i -> nGramFeature(nGram, i); - IntStream.range(0, (length - (nGram - 1))).mapToObj(func).forEach(ngramOutputs::add); - } - return ngramOutputs; - } - - public static Builder builder(String field) { - return new Builder(field); - } - - public static class Builder { - - private String field; - private String featurePrefix; - private List nGrams; - private Integer start; - private Integer length; - private Boolean custom; - - public Builder(String field) { - this.field = field; - } - - public Builder setField(String field) { - this.field = field; - return this; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public Builder setFeaturePrefix(String featurePrefix) { - this.featurePrefix = featurePrefix; - return this; - } - - public Builder setnGrams(List nGrams) { - this.nGrams = nGrams; - return this; - } - - public Builder setStart(Integer start) { - this.start = start; - return this; - } - - public Builder setLength(Integer length) { - this.length = length; - return this; - } - - public Builder setCustom(Boolean custom) { - this.custom = custom; - return this; - } - - public NGram build() { - return new NGram(field, nGrams, start, length, custom, featurePrefix); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java deleted file mode 100644 index 461c62fd54c0d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/OneHotEncoding.java +++ /dev/null @@ -1,144 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * PreProcessor for one hot encoding a set of categorical values for a given field. - */ -public class OneHotEncoding implements PreProcessor { - - public static final String NAME = "one_hot_encoding"; - public static final ParseField FIELD = new ParseField("field"); - public static final ParseField HOT_MAP = new ParseField("hot_map"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new OneHotEncoding((String) a[0], (Map) a[1], (Boolean) a[2]) - ); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HOT_MAP); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - } - - public static OneHotEncoding fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String field; - private final Map hotMap; - private final Boolean custom; - - OneHotEncoding(String field, Map hotMap, Boolean custom) { - this.field = Objects.requireNonNull(field); - this.hotMap = Collections.unmodifiableMap(Objects.requireNonNull(hotMap)); - this.custom = custom; - } - - /** - * @return Field name on which to one hot encode - */ - public String getField() { - return field; - } - - /** - * @return Map of Value: ColumnName for the one hot encoding - */ - public Map getHotMap() { - return hotMap; - } - - @Override - public String getName() { - return NAME; - } - - public Boolean getCustom() { - return custom; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(FIELD.getPreferredName(), field); - builder.field(HOT_MAP.getPreferredName(), hotMap); - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - OneHotEncoding that = (OneHotEncoding) o; - return Objects.equals(field, that.field) && Objects.equals(hotMap, that.hotMap) && Objects.equals(custom, that.custom); - } - - @Override - public int hashCode() { - return Objects.hash(field, hotMap, custom); - } - - public static Builder builder(String field) { - return new Builder(field); - } - - public static class Builder { - - private String field; - private Map hotMap = new HashMap<>(); - private Boolean custom; - - public Builder(String field) { - this.field = field; - } - - public Builder setField(String field) { - this.field = field; - return this; - } - - public Builder setHotMap(Map hotMap) { - this.hotMap = new HashMap<>(hotMap); - return this; - } - - public Builder addOneHot(String valueName, String oneHotFeatureName) { - this.hotMap.put(valueName, oneHotFeatureName); - return this; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public OneHotEncoding build() { - return new OneHotEncoding(field, hotMap, custom); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java deleted file mode 100644 index a9e21874313ad..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/PreProcessor.java +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.client.ml.inference.NamedXContentObject; - -/** - * Describes a pre-processor for a defined machine learning model - */ -public interface PreProcessor extends NamedXContentObject { - - /** - * @return The name of the pre-processor - */ - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java deleted file mode 100644 index 713b2a67ba3de..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/preprocessing/TargetMeanEncoding.java +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.preprocessing; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * PreProcessor for target mean encoding a set of categorical values for a given field. - */ -public class TargetMeanEncoding implements PreProcessor { - - public static final String NAME = "target_mean_encoding"; - public static final ParseField FIELD = new ParseField("field"); - public static final ParseField FEATURE_NAME = new ParseField("feature_name"); - public static final ParseField TARGET_MAP = new ParseField("target_map"); - public static final ParseField DEFAULT_VALUE = new ParseField("default_value"); - public static final ParseField CUSTOM = new ParseField("custom"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new TargetMeanEncoding((String) a[0], (String) a[1], (Map) a[2], (Double) a[3], (Boolean) a[4]) - ); - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), FEATURE_NAME); - PARSER.declareObject( - ConstructingObjectParser.constructorArg(), - (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), - TARGET_MAP - ); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), DEFAULT_VALUE); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), CUSTOM); - } - - public static TargetMeanEncoding fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String field; - private final String featureName; - private final Map meanMap; - private final double defaultValue; - private final Boolean custom; - - TargetMeanEncoding(String field, String featureName, Map meanMap, Double defaultValue, Boolean custom) { - this.field = Objects.requireNonNull(field); - this.featureName = Objects.requireNonNull(featureName); - this.meanMap = Collections.unmodifiableMap(Objects.requireNonNull(meanMap)); - this.defaultValue = Objects.requireNonNull(defaultValue); - this.custom = custom; - } - - /** - * @return Field name on which to target mean encode - */ - public String getField() { - return field; - } - - /** - * @return Map of Value: targetMean for the target mean encoding - */ - public Map getMeanMap() { - return meanMap; - } - - /** - * @return The default value to set when a previously unobserved value is seen - */ - public double getDefaultValue() { - return defaultValue; - } - - /** - * @return The feature name for the encoded value - */ - public String getFeatureName() { - return featureName; - } - - public Boolean getCustom() { - return custom; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(FIELD.getPreferredName(), field); - builder.field(FEATURE_NAME.getPreferredName(), featureName); - builder.field(TARGET_MAP.getPreferredName(), meanMap); - builder.field(DEFAULT_VALUE.getPreferredName(), defaultValue); - if (custom != null) { - builder.field(CUSTOM.getPreferredName(), custom); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TargetMeanEncoding that = (TargetMeanEncoding) o; - return Objects.equals(field, that.field) - && Objects.equals(featureName, that.featureName) - && Objects.equals(meanMap, that.meanMap) - && Objects.equals(defaultValue, that.defaultValue) - && Objects.equals(custom, that.custom); - } - - @Override - public int hashCode() { - return Objects.hash(field, featureName, meanMap, defaultValue, custom); - } - - public Builder builder(String fieldName) { - return new Builder(fieldName); - } - - public static class Builder { - - private String field; - private String featureName; - private Map meanMap = new HashMap<>(); - private double defaultValue; - private Boolean custom; - - public Builder(String field) { - this.field = field; - } - - public String getField() { - return field; - } - - public Builder setField(String field) { - this.field = field; - return this; - } - - public Builder setFeatureName(String featureName) { - this.featureName = featureName; - return this; - } - - public Builder setMeanMap(Map meanMap) { - this.meanMap = meanMap; - return this; - } - - public Builder addMeanMapEntry(String valueName, double meanEncoding) { - this.meanMap.put(valueName, meanEncoding); - return this; - } - - public Builder setDefaultValue(double defaultValue) { - this.defaultValue = defaultValue; - return this; - } - - public Builder setCustom(boolean custom) { - this.custom = custom; - return this; - } - - public TargetMeanEncoding build() { - return new TargetMeanEncoding(field, featureName, meanMap, defaultValue, custom); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/FeatureImportance.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/FeatureImportance.java deleted file mode 100644 index d68dfc88488a7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/FeatureImportance.java +++ /dev/null @@ -1,175 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.results; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class FeatureImportance implements ToXContentObject { - - public static final String IMPORTANCE = "importance"; - public static final String FEATURE_NAME = "feature_name"; - public static final String CLASSES = "classes"; - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "feature_importance", - true, - a -> new FeatureImportance((String) a[0], (Double) a[1], (List) a[2]) - ); - - static { - PARSER.declareString(constructorArg(), new ParseField(FeatureImportance.FEATURE_NAME)); - PARSER.declareDouble(optionalConstructorArg(), new ParseField(FeatureImportance.IMPORTANCE)); - PARSER.declareObjectArray( - optionalConstructorArg(), - (p, c) -> ClassImportance.fromXContent(p), - new ParseField(FeatureImportance.CLASSES) - ); - } - - public static FeatureImportance fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List classImportance; - private final Double importance; - private final String featureName; - - public FeatureImportance(String featureName, Double importance, List classImportance) { - this.featureName = Objects.requireNonNull(featureName); - this.importance = importance; - this.classImportance = classImportance == null ? null : Collections.unmodifiableList(classImportance); - } - - public List getClassImportance() { - return classImportance; - } - - public Double getImportance() { - return importance; - } - - public String getFeatureName() { - return featureName; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FEATURE_NAME, featureName); - if (importance != null) { - builder.field(IMPORTANCE, importance); - } - if (classImportance != null && classImportance.isEmpty() == false) { - builder.field(CLASSES, classImportance); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object object) { - if (object == this) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - FeatureImportance that = (FeatureImportance) object; - return Objects.equals(featureName, that.featureName) - && Objects.equals(importance, that.importance) - && Objects.equals(classImportance, that.classImportance); - } - - @Override - public int hashCode() { - return Objects.hash(featureName, importance, classImportance); - } - - public static class ClassImportance implements ToXContentObject { - - static final String CLASS_NAME = "class_name"; - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "feature_importance_class_importance", - true, - a -> new ClassImportance(a[0], (Double) a[1]) - ); - - static { - PARSER.declareField(ConstructingObjectParser.constructorArg(), (p, c) -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return p.text(); - } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return p.numberValue(); - } else if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { - return p.booleanValue(); - } - throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); - }, new ParseField(CLASS_NAME), ObjectParser.ValueType.VALUE); - PARSER.declareDouble(constructorArg(), new ParseField(FeatureImportance.IMPORTANCE)); - } - - public static ClassImportance fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Object className; - private final double importance; - - public ClassImportance(Object className, double importance) { - this.className = className; - this.importance = importance; - } - - public Object getClassName() { - return className; - } - - public double getImportance() { - return importance; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(CLASS_NAME, className); - builder.field(IMPORTANCE, importance); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ClassImportance that = (ClassImportance) o; - return Double.compare(that.importance, importance) == 0 && Objects.equals(className, that.className); - } - - @Override - public int hashCode() { - return Objects.hash(className, importance); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/TopClassEntry.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/TopClassEntry.java deleted file mode 100644 index 889677f6dd99f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/results/TopClassEntry.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.results; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParseException; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -public class TopClassEntry implements ToXContentObject { - - public static final ParseField CLASS_NAME = new ParseField("class_name"); - public static final ParseField CLASS_PROBABILITY = new ParseField("class_probability"); - public static final ParseField CLASS_SCORE = new ParseField("class_score"); - - public static final String NAME = "top_class"; - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new TopClassEntry(a[0], (Double) a[1], (Double) a[2]) - ); - - static { - PARSER.declareField(constructorArg(), (p, n) -> { - Object o; - XContentParser.Token token = p.currentToken(); - if (token == XContentParser.Token.VALUE_STRING) { - o = p.text(); - } else if (token == XContentParser.Token.VALUE_BOOLEAN) { - o = p.booleanValue(); - } else if (token == XContentParser.Token.VALUE_NUMBER) { - o = p.doubleValue(); - } else { - throw new XContentParseException( - p.getTokenLocation(), - "[" + NAME + "] failed to parse field [" + CLASS_NAME + "] value [" + token + "] is not a string, boolean or number" - ); - } - return o; - }, CLASS_NAME, ObjectParser.ValueType.VALUE); - PARSER.declareDouble(constructorArg(), CLASS_PROBABILITY); - PARSER.declareDouble(constructorArg(), CLASS_SCORE); - } - - public static TopClassEntry fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final Object classification; - private final double probability; - private final double score; - - public TopClassEntry(Object classification, double probability, double score) { - this.classification = Objects.requireNonNull(classification); - this.probability = probability; - this.score = score; - } - - public Object getClassification() { - return classification; - } - - public double getProbability() { - return probability; - } - - public double getScore() { - return score; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(CLASS_NAME.getPreferredName(), classification); - builder.field(CLASS_PROBABILITY.getPreferredName(), probability); - builder.field(CLASS_SCORE.getPreferredName(), score); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object object) { - if (object == this) { - return true; - } - if (object == null || getClass() != object.getClass()) { - return false; - } - TopClassEntry that = (TopClassEntry) object; - return Objects.equals(classification, that.classification) && probability == that.probability && score == that.score; - } - - @Override - public int hashCode() { - return Objects.hash(classification, probability, score); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ClassificationConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ClassificationConfig.java deleted file mode 100644 index 1e63677bfd70b..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ClassificationConfig.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class ClassificationConfig implements InferenceConfig { - - public static final ParseField NAME = new ParseField("classification"); - - public static final ParseField RESULTS_FIELD = new ParseField("results_field"); - public static final ParseField NUM_TOP_CLASSES = new ParseField("num_top_classes"); - public static final ParseField TOP_CLASSES_RESULTS_FIELD = new ParseField("top_classes_results_field"); - public static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); - - private final Integer numTopClasses; - private final String topClassesResultsField; - private final String resultsField; - private final Integer numTopFeatureImportanceValues; - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - args -> new ClassificationConfig((Integer) args[0], (String) args[1], (String) args[2], (Integer) args[3]) - ); - - static { - PARSER.declareInt(optionalConstructorArg(), NUM_TOP_CLASSES); - PARSER.declareString(optionalConstructorArg(), RESULTS_FIELD); - PARSER.declareString(optionalConstructorArg(), TOP_CLASSES_RESULTS_FIELD); - PARSER.declareInt(optionalConstructorArg(), NUM_TOP_FEATURE_IMPORTANCE_VALUES); - } - - public static ClassificationConfig fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public ClassificationConfig() { - this(null, null, null, null); - } - - public ClassificationConfig(Integer numTopClasses, String resultsField, String topClassesResultsField, Integer featureImportance) { - this.numTopClasses = numTopClasses; - this.topClassesResultsField = topClassesResultsField; - this.resultsField = resultsField; - this.numTopFeatureImportanceValues = featureImportance; - } - - public Integer getNumTopClasses() { - return numTopClasses; - } - - public String getTopClassesResultsField() { - return topClassesResultsField; - } - - public String getResultsField() { - return resultsField; - } - - public Integer getNumTopFeatureImportanceValues() { - return numTopFeatureImportanceValues; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - ClassificationConfig that = (ClassificationConfig) o; - return Objects.equals(numTopClasses, that.numTopClasses) - && Objects.equals(topClassesResultsField, that.topClassesResultsField) - && Objects.equals(resultsField, that.resultsField) - && Objects.equals(numTopFeatureImportanceValues, that.numTopFeatureImportanceValues); - } - - @Override - public int hashCode() { - return Objects.hash(numTopClasses, topClassesResultsField, resultsField, numTopFeatureImportanceValues); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (numTopClasses != null) { - builder.field(NUM_TOP_CLASSES.getPreferredName(), numTopClasses); - } - if (topClassesResultsField != null) { - builder.field(TOP_CLASSES_RESULTS_FIELD.getPreferredName(), topClassesResultsField); - } - if (resultsField != null) { - builder.field(RESULTS_FIELD.getPreferredName(), resultsField); - } - if (numTopFeatureImportanceValues != null) { - builder.field(NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName(), numTopFeatureImportanceValues); - } - builder.endObject(); - return builder; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/IndexLocation.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/IndexLocation.java deleted file mode 100644 index 44bcac9e67554..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/IndexLocation.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -public class IndexLocation implements TrainedModelLocation { - - public static final String INDEX = "index"; - private static final ParseField NAME = new ParseField("name"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - INDEX, - true, - a -> new IndexLocation((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), NAME); - } - - public static IndexLocation fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - private final String index; - - public IndexLocation(String index) { - this.index = Objects.requireNonNull(index); - } - - public String getIndex() { - return index; - } - - @Override - public String getName() { - return INDEX; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(NAME.getPreferredName(), index); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - IndexLocation that = (IndexLocation) o; - return Objects.equals(index, that.index); - } - - @Override - public int hashCode() { - return Objects.hash(index); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceConfig.java deleted file mode 100644 index 1b444cc14cbe2..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceConfig.java +++ /dev/null @@ -1,14 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.client.ml.inference.NamedXContentObject; - -public interface InferenceConfig extends NamedXContentObject { - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceStats.java deleted file mode 100644 index d668f7a2aa6fc..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/InferenceStats.java +++ /dev/null @@ -1,160 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Objects; - -public class InferenceStats implements ToXContentObject { - - public static final String NAME = "inference_stats"; - public static final ParseField MISSING_ALL_FIELDS_COUNT = new ParseField("missing_all_fields_count"); - public static final ParseField INFERENCE_COUNT = new ParseField("inference_count"); - public static final ParseField CACHE_MISS_COUNT = new ParseField("cache_miss_count"); - public static final ParseField FAILURE_COUNT = new ParseField("failure_count"); - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new InferenceStats((Long) a[0], (Long) a[1], (Long) a[2], (Long) a[3], (Instant) a[4]) - ); - static { - PARSER.declareLong(ConstructingObjectParser.constructorArg(), MISSING_ALL_FIELDS_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), INFERENCE_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), FAILURE_COUNT); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), CACHE_MISS_COUNT); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - } - - private final long missingAllFieldsCount; - private final long inferenceCount; - private final long failureCount; - private final long cacheMissCount; - private final Instant timeStamp; - - private InferenceStats(Long missingAllFieldsCount, Long inferenceCount, Long failureCount, Long cacheMissCount, Instant instant) { - this( - unboxOrZero(missingAllFieldsCount), - unboxOrZero(inferenceCount), - unboxOrZero(failureCount), - unboxOrZero(cacheMissCount), - instant - ); - } - - public InferenceStats(long missingAllFieldsCount, long inferenceCount, long failureCount, long cacheMissCount, Instant timeStamp) { - this.missingAllFieldsCount = missingAllFieldsCount; - this.inferenceCount = inferenceCount; - this.failureCount = failureCount; - this.cacheMissCount = cacheMissCount; - this.timeStamp = timeStamp == null - ? Instant.ofEpochMilli(Instant.now().toEpochMilli()) - : Instant.ofEpochMilli(timeStamp.toEpochMilli()); - } - - /** - * How many times this model attempted to infer with all its fields missing - */ - public long getMissingAllFieldsCount() { - return missingAllFieldsCount; - } - - /** - * How many inference calls were made against this model - */ - public long getInferenceCount() { - return inferenceCount; - } - - /** - * How many inference failures occurred. - */ - public long getFailureCount() { - return failureCount; - } - - /** - * How many cache misses occurred when inferring this model - */ - public long getCacheMissCount() { - return cacheMissCount; - } - - /** - * The timestamp of these statistics. - */ - public Instant getTimeStamp() { - return timeStamp; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FAILURE_COUNT.getPreferredName(), failureCount); - builder.field(INFERENCE_COUNT.getPreferredName(), inferenceCount); - builder.field(CACHE_MISS_COUNT.getPreferredName(), cacheMissCount); - builder.field(MISSING_ALL_FIELDS_COUNT.getPreferredName(), missingAllFieldsCount); - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timeStamp.toEpochMilli()); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - InferenceStats that = (InferenceStats) o; - return missingAllFieldsCount == that.missingAllFieldsCount - && inferenceCount == that.inferenceCount - && failureCount == that.failureCount - && cacheMissCount == that.cacheMissCount - && Objects.equals(timeStamp, that.timeStamp); - } - - @Override - public int hashCode() { - return Objects.hash(missingAllFieldsCount, inferenceCount, failureCount, cacheMissCount, timeStamp); - } - - @Override - public String toString() { - return "InferenceStats{" - + "missingAllFieldsCount=" - + missingAllFieldsCount - + ", inferenceCount=" - + inferenceCount - + ", failureCount=" - + failureCount - + ", cacheMissCount=" - + cacheMissCount - + ", timeStamp=" - + timeStamp - + '}'; - } - - private static long unboxOrZero(@Nullable Long value) { - return value == null ? 0L : value; - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/RegressionConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/RegressionConfig.java deleted file mode 100644 index 496cceb4e5a17..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/RegressionConfig.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class RegressionConfig implements InferenceConfig { - - public static final ParseField NAME = new ParseField("regression"); - public static final ParseField RESULTS_FIELD = new ParseField("results_field"); - public static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - args -> new RegressionConfig((String) args[0], (Integer) args[1]) - ); - - static { - PARSER.declareString(optionalConstructorArg(), RESULTS_FIELD); - PARSER.declareInt(optionalConstructorArg(), NUM_TOP_FEATURE_IMPORTANCE_VALUES); - } - - public static RegressionConfig fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final String resultsField; - private final Integer numTopFeatureImportanceValues; - - public RegressionConfig() { - this(null, null); - } - - public RegressionConfig(String resultsField, Integer numTopFeatureImportanceValues) { - this.resultsField = resultsField; - this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; - } - - public Integer getNumTopFeatureImportanceValues() { - return numTopFeatureImportanceValues; - } - - public String getResultsField() { - return resultsField; - } - - @Override - public String getName() { - return NAME.getPreferredName(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (resultsField != null) { - builder.field(RESULTS_FIELD.getPreferredName(), resultsField); - } - if (numTopFeatureImportanceValues != null) { - builder.field(NUM_TOP_FEATURE_IMPORTANCE_VALUES.getPreferredName(), numTopFeatureImportanceValues); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - RegressionConfig that = (RegressionConfig) o; - return Objects.equals(this.resultsField, that.resultsField) - && Objects.equals(this.numTopFeatureImportanceValues, that.numTopFeatureImportanceValues); - } - - @Override - public int hashCode() { - return Objects.hash(resultsField, numTopFeatureImportanceValues); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TargetType.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TargetType.java deleted file mode 100644 index 4f5ce1aecadcc..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TargetType.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.xcontent.ParseField; - -import java.util.Locale; - -public enum TargetType { - - REGRESSION, - CLASSIFICATION; - - public static final ParseField TARGET_TYPE = new ParseField("target_type"); - - public static TargetType fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java deleted file mode 100644 index 76d5538708f52..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModel.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.client.ml.inference.NamedXContentObject; - -import java.util.List; - -public interface TrainedModel extends NamedXContentObject { - - /** - * @return List of featureNames expected by the model. In the order that they are expected - */ - List getFeatureNames(); - - /** - * @return The name of the model - */ - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModelLocation.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModelLocation.java deleted file mode 100644 index cb86b0d121c1f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/TrainedModelLocation.java +++ /dev/null @@ -1,13 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.inference.trainedmodel; - -import org.elasticsearch.client.ml.inference.NamedXContentObject; - -public interface TrainedModelLocation extends NamedXContentObject {} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java deleted file mode 100644 index c19e50b46c824..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java +++ /dev/null @@ -1,198 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.client.ml.inference.NamedXContentObjectHelper; -import org.elasticsearch.client.ml.inference.trainedmodel.TargetType; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -public class Ensemble implements TrainedModel { - - public static final String NAME = "ensemble"; - public static final ParseField FEATURE_NAMES = new ParseField("feature_names"); - public static final ParseField TRAINED_MODELS = new ParseField("trained_models"); - public static final ParseField AGGREGATE_OUTPUT = new ParseField("aggregate_output"); - public static final ParseField CLASSIFICATION_LABELS = new ParseField("classification_labels"); - public static final ParseField CLASSIFICATION_WEIGHTS = new ParseField("classification_weights"); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, Ensemble.Builder::new); - - static { - PARSER.declareStringArray(Ensemble.Builder::setFeatureNames, FEATURE_NAMES); - PARSER.declareNamedObjects( - Ensemble.Builder::setTrainedModels, - (p, c, n) -> p.namedObject(TrainedModel.class, n, null), - (ensembleBuilder) -> { /* Noop does not matter client side */ }, - TRAINED_MODELS - ); - PARSER.declareNamedObject( - Ensemble.Builder::setOutputAggregator, - (p, c, n) -> p.namedObject(OutputAggregator.class, n, null), - AGGREGATE_OUTPUT - ); - PARSER.declareString(Ensemble.Builder::setTargetType, TargetType.TARGET_TYPE); - PARSER.declareStringArray(Ensemble.Builder::setClassificationLabels, CLASSIFICATION_LABELS); - PARSER.declareDoubleArray(Ensemble.Builder::setClassificationWeights, CLASSIFICATION_WEIGHTS); - } - - public static Ensemble fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - private final List featureNames; - private final List models; - private final OutputAggregator outputAggregator; - private final TargetType targetType; - private final List classificationLabels; - private final double[] classificationWeights; - - Ensemble( - List featureNames, - List models, - @Nullable OutputAggregator outputAggregator, - TargetType targetType, - @Nullable List classificationLabels, - @Nullable double[] classificationWeights - ) { - this.featureNames = featureNames; - this.models = models; - this.outputAggregator = outputAggregator; - this.targetType = targetType; - this.classificationLabels = classificationLabels; - this.classificationWeights = classificationWeights; - } - - @Override - public List getFeatureNames() { - return featureNames; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (featureNames != null && featureNames.isEmpty() == false) { - builder.field(FEATURE_NAMES.getPreferredName(), featureNames); - } - if (models != null) { - NamedXContentObjectHelper.writeNamedObjects(builder, params, true, TRAINED_MODELS.getPreferredName(), models); - } - if (outputAggregator != null) { - NamedXContentObjectHelper.writeNamedObjects( - builder, - params, - false, - AGGREGATE_OUTPUT.getPreferredName(), - Collections.singletonList(outputAggregator) - ); - } - if (targetType != null) { - builder.field(TargetType.TARGET_TYPE.getPreferredName(), targetType); - } - if (classificationLabels != null) { - builder.field(CLASSIFICATION_LABELS.getPreferredName(), classificationLabels); - } - if (classificationWeights != null) { - builder.field(CLASSIFICATION_WEIGHTS.getPreferredName(), classificationWeights); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Ensemble that = (Ensemble) o; - return Objects.equals(featureNames, that.featureNames) - && Objects.equals(models, that.models) - && Objects.equals(targetType, that.targetType) - && Objects.equals(classificationLabels, that.classificationLabels) - && Arrays.equals(classificationWeights, that.classificationWeights) - && Objects.equals(outputAggregator, that.outputAggregator); - } - - @Override - public int hashCode() { - return Objects.hash( - featureNames, - models, - outputAggregator, - classificationLabels, - targetType, - Arrays.hashCode(classificationWeights) - ); - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - private List featureNames = Collections.emptyList(); - private List trainedModels; - private OutputAggregator outputAggregator; - private TargetType targetType; - private List classificationLabels; - private double[] classificationWeights; - - public Builder setFeatureNames(List featureNames) { - this.featureNames = featureNames; - return this; - } - - public Builder setTrainedModels(List trainedModels) { - this.trainedModels = trainedModels; - return this; - } - - public Builder setOutputAggregator(OutputAggregator outputAggregator) { - this.outputAggregator = outputAggregator; - return this; - } - - public Builder setTargetType(TargetType targetType) { - this.targetType = targetType; - return this; - } - - public Builder setClassificationLabels(List classificationLabels) { - this.classificationLabels = classificationLabels; - return this; - } - - public Builder setClassificationWeights(List classificationWeights) { - this.classificationWeights = classificationWeights.stream().mapToDouble(Double::doubleValue).toArray(); - return this; - } - - private void setTargetType(String targetType) { - this.targetType = TargetType.fromString(targetType); - } - - public Ensemble build() { - return new Ensemble(featureNames, trainedModels, outputAggregator, targetType, classificationLabels, classificationWeights); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Exponent.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Exponent.java deleted file mode 100644 index 22fc6f7ef3f55..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Exponent.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class Exponent implements OutputAggregator { - - public static final String NAME = "exponent"; - public static final ParseField WEIGHTS = new ParseField("weights"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new Exponent((List) a[0]) - ); - static { - PARSER.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), WEIGHTS); - } - - public static Exponent fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List weights; - - public Exponent(List weights) { - this.weights = weights; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (weights != null) { - builder.field(WEIGHTS.getPreferredName(), weights); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Exponent that = (Exponent) o; - return Objects.equals(weights, that.weights); - } - - @Override - public int hashCode() { - return Objects.hash(weights); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/LogisticRegression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/LogisticRegression.java deleted file mode 100644 index 19693a728d2ee..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/LogisticRegression.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class LogisticRegression implements OutputAggregator { - - public static final String NAME = "logistic_regression"; - public static final ParseField WEIGHTS = new ParseField("weights"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new LogisticRegression((List) a[0]) - ); - static { - PARSER.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), WEIGHTS); - } - - public static LogisticRegression fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List weights; - - public LogisticRegression(List weights) { - this.weights = weights; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (weights != null) { - builder.field(WEIGHTS.getPreferredName(), weights); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - LogisticRegression that = (LogisticRegression) o; - return Objects.equals(weights, that.weights); - } - - @Override - public int hashCode() { - return Objects.hash(weights); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/OutputAggregator.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/OutputAggregator.java deleted file mode 100644 index 8a0f50d3a8bb6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/OutputAggregator.java +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.client.ml.inference.NamedXContentObject; - -public interface OutputAggregator extends NamedXContentObject { - /** - * @return The name of the output aggregator - */ - String getName(); -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedMode.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedMode.java deleted file mode 100644 index 422dfb0a21fc1..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedMode.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class WeightedMode implements OutputAggregator { - - public static final String NAME = "weighted_mode"; - public static final ParseField WEIGHTS = new ParseField("weights"); - public static final ParseField NUM_CLASSES = new ParseField("num_classes"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new WeightedMode((Integer) a[0], (List) a[1]) - ); - static { - PARSER.declareInt(ConstructingObjectParser.constructorArg(), NUM_CLASSES); - PARSER.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), WEIGHTS); - } - - public static WeightedMode fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List weights; - private final int numClasses; - - public WeightedMode(int numClasses, List weights) { - this.weights = weights; - this.numClasses = numClasses; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (weights != null) { - builder.field(WEIGHTS.getPreferredName(), weights); - } - builder.field(NUM_CLASSES.getPreferredName(), numClasses); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - WeightedMode that = (WeightedMode) o; - return Objects.equals(weights, that.weights) && numClasses == that.numClasses; - } - - @Override - public int hashCode() { - return Objects.hash(weights, numClasses); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedSum.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedSum.java deleted file mode 100644 index a36c13b138f78..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/WeightedSum.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.ensemble; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class WeightedSum implements OutputAggregator { - - public static final String NAME = "weighted_sum"; - public static final ParseField WEIGHTS = new ParseField("weights"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new WeightedSum((List) a[0]) - ); - - static { - PARSER.declareDoubleArray(ConstructingObjectParser.optionalConstructorArg(), WEIGHTS); - } - - public static WeightedSum fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final List weights; - - public WeightedSum(List weights) { - this.weights = weights; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - if (weights != null) { - builder.field(WEIGHTS.getPreferredName(), weights); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - WeightedSum that = (WeightedSum) o; - return Objects.equals(weights, that.weights); - } - - @Override - public int hashCode() { - return Objects.hash(weights); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangIdentNeuralNetwork.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangIdentNeuralNetwork.java deleted file mode 100644 index 89f5625331cd7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangIdentNeuralNetwork.java +++ /dev/null @@ -1,94 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.langident; - -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Shallow, fully connected, feed forward NN modeled after and ported from https://github.com/google/cld3 - */ -public class LangIdentNeuralNetwork implements TrainedModel { - - public static final String NAME = "lang_ident_neural_network"; - public static final ParseField EMBEDDED_VECTOR_FEATURE_NAME = new ParseField("embedded_vector_feature_name"); - public static final ParseField HIDDEN_LAYER = new ParseField("hidden_layer"); - public static final ParseField SOFTMAX_LAYER = new ParseField("softmax_layer"); - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME, - true, - a -> new LangIdentNeuralNetwork((String) a[0], (LangNetLayer) a[1], (LangNetLayer) a[2]) - ); - - static { - PARSER.declareString(constructorArg(), EMBEDDED_VECTOR_FEATURE_NAME); - PARSER.declareObject(constructorArg(), LangNetLayer.PARSER::apply, HIDDEN_LAYER); - PARSER.declareObject(constructorArg(), LangNetLayer.PARSER::apply, SOFTMAX_LAYER); - } - - public static LangIdentNeuralNetwork fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final LangNetLayer hiddenLayer; - private final LangNetLayer softmaxLayer; - private final String embeddedVectorFeatureName; - - LangIdentNeuralNetwork(String embeddedVectorFeatureName, LangNetLayer hiddenLayer, LangNetLayer softmaxLayer) { - this.embeddedVectorFeatureName = embeddedVectorFeatureName; - this.hiddenLayer = hiddenLayer; - this.softmaxLayer = softmaxLayer; - } - - @Override - public List getFeatureNames() { - return Collections.singletonList(embeddedVectorFeatureName); - } - - @Override - public String getName() { - return NAME; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(EMBEDDED_VECTOR_FEATURE_NAME.getPreferredName(), embeddedVectorFeatureName); - builder.field(HIDDEN_LAYER.getPreferredName(), hiddenLayer); - builder.field(SOFTMAX_LAYER.getPreferredName(), softmaxLayer); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - LangIdentNeuralNetwork that = (LangIdentNeuralNetwork) o; - return Objects.equals(embeddedVectorFeatureName, that.embeddedVectorFeatureName) - && Objects.equals(hiddenLayer, that.hiddenLayer) - && Objects.equals(softmaxLayer, that.softmaxLayer); - } - - @Override - public int hashCode() { - return Objects.hash(embeddedVectorFeatureName, hiddenLayer, softmaxLayer); - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangNetLayer.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangNetLayer.java deleted file mode 100644 index 9737a577725f7..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/langident/LangNetLayer.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.langident; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; - -/** - * Represents a single layer in the compressed Lang Net - */ -public class LangNetLayer implements ToXContentObject { - - public static final ParseField NAME = new ParseField("lang_net_layer"); - - private static final ParseField NUM_ROWS = new ParseField("num_rows"); - private static final ParseField NUM_COLS = new ParseField("num_cols"); - private static final ParseField WEIGHTS = new ParseField("weights"); - private static final ParseField BIAS = new ParseField("bias"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - NAME.getPreferredName(), - true, - a -> new LangNetLayer((List) a[0], (int) a[1], (int) a[2], (List) a[3]) - ); - - static { - PARSER.declareDoubleArray(constructorArg(), WEIGHTS); - PARSER.declareInt(constructorArg(), NUM_COLS); - PARSER.declareInt(constructorArg(), NUM_ROWS); - PARSER.declareDoubleArray(constructorArg(), BIAS); - } - - private final double[] weights; - private final int weightRows; - private final int weightCols; - private final double[] bias; - - private LangNetLayer(List weights, int numCols, int numRows, List bias) { - this( - weights.stream().mapToDouble(Double::doubleValue).toArray(), - numCols, - numRows, - bias.stream().mapToDouble(Double::doubleValue).toArray() - ); - } - - LangNetLayer(double[] weights, int numCols, int numRows, double[] bias) { - this.weights = weights; - this.weightCols = numCols; - this.weightRows = numRows; - this.bias = bias; - } - - double[] getWeights() { - return weights; - } - - int getWeightRows() { - return weightRows; - } - - int getWeightCols() { - return weightCols; - } - - double[] getBias() { - return bias; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(NUM_COLS.getPreferredName(), weightCols); - builder.field(NUM_ROWS.getPreferredName(), weightRows); - builder.field(WEIGHTS.getPreferredName(), weights); - builder.field(BIAS.getPreferredName(), bias); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - LangNetLayer that = (LangNetLayer) o; - return Arrays.equals(weights, that.weights) - && Arrays.equals(bias, that.bias) - && Objects.equals(weightCols, that.weightCols) - && Objects.equals(weightRows, that.weightRows); - } - - @Override - public int hashCode() { - return Objects.hash(Arrays.hashCode(weights), Arrays.hashCode(bias), weightCols, weightRows); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java deleted file mode 100644 index 7d0b633693e7d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/Tree.java +++ /dev/null @@ -1,231 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.tree; - -import org.elasticsearch.client.ml.inference.trainedmodel.TargetType; -import org.elasticsearch.client.ml.inference.trainedmodel.TrainedModel; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; - -public class Tree implements TrainedModel { - - public static final String NAME = "tree"; - - public static final ParseField FEATURE_NAMES = new ParseField("feature_names"); - public static final ParseField TREE_STRUCTURE = new ParseField("tree_structure"); - public static final ParseField CLASSIFICATION_LABELS = new ParseField("classification_labels"); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, Builder::new); - - static { - PARSER.declareStringArray(Builder::setFeatureNames, FEATURE_NAMES); - PARSER.declareObjectArray(Builder::setNodes, (p, c) -> TreeNode.fromXContent(p), TREE_STRUCTURE); - PARSER.declareString(Builder::setTargetType, TargetType.TARGET_TYPE); - PARSER.declareStringArray(Builder::setClassificationLabels, CLASSIFICATION_LABELS); - } - - public static Tree fromXContent(XContentParser parser) { - return PARSER.apply(parser, null).build(); - } - - private final List featureNames; - private final List nodes; - private final TargetType targetType; - private final List classificationLabels; - - Tree(List featureNames, List nodes, TargetType targetType, List classificationLabels) { - this.featureNames = featureNames; - this.nodes = nodes; - this.targetType = targetType; - this.classificationLabels = classificationLabels; - } - - @Override - public String getName() { - return NAME; - } - - @Override - public List getFeatureNames() { - return featureNames; - } - - public List getNodes() { - return nodes; - } - - @Nullable - public List getClassificationLabels() { - return classificationLabels; - } - - public TargetType getTargetType() { - return targetType; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (featureNames != null) { - builder.field(FEATURE_NAMES.getPreferredName(), featureNames); - } - if (nodes != null) { - builder.field(TREE_STRUCTURE.getPreferredName(), nodes); - } - if (classificationLabels != null) { - builder.field(CLASSIFICATION_LABELS.getPreferredName(), classificationLabels); - } - if (targetType != null) { - builder.field(TargetType.TARGET_TYPE.getPreferredName(), targetType.toString()); - } - builder.endObject(); - return builder; - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Tree that = (Tree) o; - return Objects.equals(featureNames, that.featureNames) - && Objects.equals(classificationLabels, that.classificationLabels) - && Objects.equals(targetType, that.targetType) - && Objects.equals(nodes, that.nodes); - } - - @Override - public int hashCode() { - return Objects.hash(featureNames, nodes, targetType, classificationLabels); - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - private List featureNames; - private ArrayList nodes; - private int numNodes; - private TargetType targetType; - private List classificationLabels; - - public Builder() { - nodes = new ArrayList<>(); - // allocate space in the root node and set to a leaf - nodes.add(null); - addLeaf(0, 0.0); - numNodes = 1; - } - - public Builder setFeatureNames(List featureNames) { - this.featureNames = featureNames; - return this; - } - - public Builder addNode(TreeNode.Builder node) { - nodes.add(node); - return this; - } - - public Builder setNodes(List nodes) { - this.nodes = new ArrayList<>(nodes); - return this; - } - - public Builder setNodes(TreeNode.Builder... nodes) { - return setNodes(Arrays.asList(nodes)); - } - - public Builder setTargetType(TargetType targetType) { - this.targetType = targetType; - return this; - } - - public Builder setClassificationLabels(List classificationLabels) { - this.classificationLabels = classificationLabels; - return this; - } - - private void setTargetType(String targetType) { - this.targetType = TargetType.fromString(targetType); - } - - /** - * Add a decision node. Space for the child nodes is allocated - * @param nodeIndex Where to place the node. This is either 0 (root) or an existing child node index - * @param featureIndex The feature index the decision is made on - * @param isDefaultLeft Default left branch if the feature is missing - * @param decisionThreshold The decision threshold - * @return The created node - */ - public TreeNode.Builder addJunction(int nodeIndex, int featureIndex, boolean isDefaultLeft, double decisionThreshold) { - int leftChild = numNodes++; - int rightChild = numNodes++; - nodes.ensureCapacity(nodeIndex + 1); - for (int i = nodes.size(); i < nodeIndex + 1; i++) { - nodes.add(null); - } - - TreeNode.Builder node = TreeNode.builder(nodeIndex) - .setDefaultLeft(isDefaultLeft) - .setLeftChild(leftChild) - .setRightChild(rightChild) - .setSplitFeature(featureIndex) - .setThreshold(decisionThreshold); - nodes.set(nodeIndex, node); - - // allocate space for the child nodes - while (nodes.size() <= rightChild) { - nodes.add(null); - } - - return node; - } - - /** - * Sets the node at {@code nodeIndex} to a leaf node. - * @param nodeIndex The index as allocated by a call to {@link #addJunction(int, int, boolean, double)} - * @param value The prediction value - * @return this - */ - public Builder addLeaf(int nodeIndex, double value) { - for (int i = nodes.size(); i < nodeIndex + 1; i++) { - nodes.add(null); - } - nodes.set(nodeIndex, TreeNode.builder(nodeIndex).setLeafValue(Collections.singletonList(value))); - return this; - } - - public Tree build() { - return new Tree( - featureNames, - nodes.stream().map(TreeNode.Builder::build).collect(Collectors.toList()), - targetType, - classificationLabels - ); - } - } - -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java deleted file mode 100644 index cb7d9a0f8f211..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/tree/TreeNode.java +++ /dev/null @@ -1,286 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.inference.trainedmodel.tree; - -import org.elasticsearch.client.ml.job.config.Operator; -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.List; -import java.util.Objects; - -public class TreeNode implements ToXContentObject { - - public static final String NAME = "tree_node"; - - public static final ParseField DECISION_TYPE = new ParseField("decision_type"); - public static final ParseField THRESHOLD = new ParseField("threshold"); - public static final ParseField LEFT_CHILD = new ParseField("left_child"); - public static final ParseField RIGHT_CHILD = new ParseField("right_child"); - public static final ParseField DEFAULT_LEFT = new ParseField("default_left"); - public static final ParseField SPLIT_FEATURE = new ParseField("split_feature"); - public static final ParseField NODE_INDEX = new ParseField("node_index"); - public static final ParseField SPLIT_GAIN = new ParseField("split_gain"); - public static final ParseField LEAF_VALUE = new ParseField("leaf_value"); - public static final ParseField NUMBER_SAMPLES = new ParseField("number_samples"); - - private static final ObjectParser PARSER = new ObjectParser<>(NAME, true, Builder::new); - static { - PARSER.declareDouble(Builder::setThreshold, THRESHOLD); - PARSER.declareField(Builder::setOperator, p -> Operator.fromString(p.text()), DECISION_TYPE, ObjectParser.ValueType.STRING); - PARSER.declareInt(Builder::setLeftChild, LEFT_CHILD); - PARSER.declareInt(Builder::setRightChild, RIGHT_CHILD); - PARSER.declareBoolean(Builder::setDefaultLeft, DEFAULT_LEFT); - PARSER.declareInt(Builder::setSplitFeature, SPLIT_FEATURE); - PARSER.declareInt(Builder::setNodeIndex, NODE_INDEX); - PARSER.declareDouble(Builder::setSplitGain, SPLIT_GAIN); - PARSER.declareDoubleArray(Builder::setLeafValue, LEAF_VALUE); - PARSER.declareLong(Builder::setNumberSamples, NUMBER_SAMPLES); - } - - public static Builder fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - - private final Operator operator; - private final Double threshold; - private final Integer splitFeature; - private final int nodeIndex; - private final Double splitGain; - private final List leafValue; - private final Boolean defaultLeft; - private final Integer leftChild; - private final Integer rightChild; - private final Long numberSamples; - - TreeNode( - Operator operator, - Double threshold, - Integer splitFeature, - int nodeIndex, - Double splitGain, - List leafValue, - Boolean defaultLeft, - Integer leftChild, - Integer rightChild, - Long numberSamples - ) { - this.operator = operator; - this.threshold = threshold; - this.splitFeature = splitFeature; - this.nodeIndex = nodeIndex; - this.splitGain = splitGain; - this.leafValue = leafValue; - this.defaultLeft = defaultLeft; - this.leftChild = leftChild; - this.rightChild = rightChild; - this.numberSamples = numberSamples; - } - - public Operator getOperator() { - return operator; - } - - public Double getThreshold() { - return threshold; - } - - public Integer getSplitFeature() { - return splitFeature; - } - - public Integer getNodeIndex() { - return nodeIndex; - } - - public Double getSplitGain() { - return splitGain; - } - - public List getLeafValue() { - return leafValue; - } - - public Boolean isDefaultLeft() { - return defaultLeft; - } - - public Integer getLeftChild() { - return leftChild; - } - - public Integer getRightChild() { - return rightChild; - } - - public Long getNumberSamples() { - return numberSamples; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - addOptionalField(builder, DECISION_TYPE, operator); - addOptionalField(builder, THRESHOLD, threshold); - addOptionalField(builder, SPLIT_FEATURE, splitFeature); - addOptionalField(builder, SPLIT_GAIN, splitGain); - addOptionalField(builder, NODE_INDEX, nodeIndex); - addOptionalField(builder, LEAF_VALUE, leafValue); - addOptionalField(builder, DEFAULT_LEFT, defaultLeft); - addOptionalField(builder, LEFT_CHILD, leftChild); - addOptionalField(builder, RIGHT_CHILD, rightChild); - addOptionalField(builder, NUMBER_SAMPLES, numberSamples); - builder.endObject(); - return builder; - } - - private void addOptionalField(XContentBuilder builder, ParseField field, Object value) throws IOException { - if (value != null) { - builder.field(field.getPreferredName(), value); - } - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - TreeNode that = (TreeNode) o; - return Objects.equals(operator, that.operator) - && Objects.equals(threshold, that.threshold) - && Objects.equals(splitFeature, that.splitFeature) - && Objects.equals(nodeIndex, that.nodeIndex) - && Objects.equals(splitGain, that.splitGain) - && Objects.equals(leafValue, that.leafValue) - && Objects.equals(defaultLeft, that.defaultLeft) - && Objects.equals(leftChild, that.leftChild) - && Objects.equals(rightChild, that.rightChild) - && Objects.equals(numberSamples, that.numberSamples); - } - - @Override - public int hashCode() { - return Objects.hash( - operator, - threshold, - splitFeature, - splitGain, - nodeIndex, - leafValue, - defaultLeft, - leftChild, - rightChild, - numberSamples - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - public static Builder builder(int nodeIndex) { - return new Builder(nodeIndex); - } - - public static class Builder { - private Operator operator; - private Double threshold; - private Integer splitFeature; - private int nodeIndex; - private Double splitGain; - private List leafValue; - private Boolean defaultLeft; - private Integer leftChild; - private Integer rightChild; - private Long numberSamples; - - public Builder(int nodeIndex) { - this.nodeIndex = nodeIndex; - } - - private Builder() {} - - public Builder setOperator(Operator operator) { - this.operator = operator; - return this; - } - - public Builder setThreshold(Double threshold) { - this.threshold = threshold; - return this; - } - - public Builder setSplitFeature(Integer splitFeature) { - this.splitFeature = splitFeature; - return this; - } - - public Builder setNodeIndex(int nodeIndex) { - this.nodeIndex = nodeIndex; - return this; - } - - public Builder setSplitGain(Double splitGain) { - this.splitGain = splitGain; - return this; - } - - public Builder setLeafValue(List leafValue) { - this.leafValue = leafValue; - return this; - } - - public Builder setDefaultLeft(Boolean defaultLeft) { - this.defaultLeft = defaultLeft; - return this; - } - - public Builder setLeftChild(Integer leftChild) { - this.leftChild = leftChild; - return this; - } - - public Integer getLeftChild() { - return leftChild; - } - - public Builder setRightChild(Integer rightChild) { - this.rightChild = rightChild; - return this; - } - - public Integer getRightChild() { - return rightChild; - } - - public Builder setNumberSamples(Long numberSamples) { - this.numberSamples = numberSamples; - return this; - } - - public TreeNode build() { - return new TreeNode( - operator, - threshold, - splitFeature, - nodeIndex, - splitGain, - leafValue, - defaultLeft, - leftChild, - rightChild, - numberSamples - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisConfig.java deleted file mode 100644 index 09b8ef16eeda4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisConfig.java +++ /dev/null @@ -1,446 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashSet; -import java.util.List; -import java.util.Objects; -import java.util.Set; -import java.util.function.Function; - -/** - * Analysis configuration options that describe which fields are - * analyzed and which functions are used to detect anomalies. - *

- * The configuration can contain multiple detectors, a new anomaly detector will - * be created for each detector configuration. The fields - * bucketSpan, summaryCountFieldName and categorizationFieldName - * apply to all detectors. - *

- * If a value has not been set it will be null - * Object wrappers are used around integral types & booleans so they can take - * null values. - */ -public class AnalysisConfig implements ToXContentObject { - /** - * Serialisation names - */ - public static final ParseField ANALYSIS_CONFIG = new ParseField("analysis_config"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField CATEGORIZATION_FIELD_NAME = new ParseField("categorization_field_name"); - public static final ParseField CATEGORIZATION_FILTERS = new ParseField("categorization_filters"); - public static final ParseField CATEGORIZATION_ANALYZER = CategorizationAnalyzerConfig.CATEGORIZATION_ANALYZER; - public static final ParseField PER_PARTITION_CATEGORIZATION = new ParseField("per_partition_categorization"); - public static final ParseField LATENCY = new ParseField("latency"); - public static final ParseField SUMMARY_COUNT_FIELD_NAME = new ParseField("summary_count_field_name"); - public static final ParseField DETECTORS = new ParseField("detectors"); - public static final ParseField INFLUENCERS = new ParseField("influencers"); - public static final ParseField MULTIVARIATE_BY_FIELDS = new ParseField("multivariate_by_fields"); - public static final ParseField MODEL_PRUNE_WINDOW = new ParseField("model_prune_window"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - ANALYSIS_CONFIG.getPreferredName(), - true, - a -> new AnalysisConfig.Builder((List) a[0]) - ); - - static { - PARSER.declareObjectArray(ConstructingObjectParser.constructorArg(), (p, c) -> (Detector.PARSER).apply(p, c).build(), DETECTORS); - PARSER.declareString( - (builder, val) -> builder.setBucketSpan(TimeValue.parseTimeValue(val, BUCKET_SPAN.getPreferredName())), - BUCKET_SPAN - ); - PARSER.declareString(Builder::setCategorizationFieldName, CATEGORIZATION_FIELD_NAME); - PARSER.declareStringArray(Builder::setCategorizationFilters, CATEGORIZATION_FILTERS); - // This one is nasty - the syntax for analyzers takes either names or objects at many levels, hence it's not - // possible to simply declare whether the field is a string or object and a completely custom parser is required - PARSER.declareField( - Builder::setCategorizationAnalyzerConfig, - (p, c) -> CategorizationAnalyzerConfig.buildFromXContentFragment(p), - CATEGORIZATION_ANALYZER, - ObjectParser.ValueType.OBJECT_OR_STRING - ); - PARSER.declareObject( - Builder::setPerPartitionCategorizationConfig, - PerPartitionCategorizationConfig.PARSER, - PER_PARTITION_CATEGORIZATION - ); - PARSER.declareString((builder, val) -> builder.setLatency(TimeValue.parseTimeValue(val, LATENCY.getPreferredName())), LATENCY); - PARSER.declareString(Builder::setSummaryCountFieldName, SUMMARY_COUNT_FIELD_NAME); - PARSER.declareStringArray(Builder::setInfluencers, INFLUENCERS); - PARSER.declareBoolean(Builder::setMultivariateByFields, MULTIVARIATE_BY_FIELDS); - PARSER.declareString( - (builder, val) -> builder.setModelPruneWindow(TimeValue.parseTimeValue(val, MODEL_PRUNE_WINDOW.getPreferredName())), - MODEL_PRUNE_WINDOW - ); - } - - /** - * These values apply to all detectors - */ - private final TimeValue bucketSpan; - private final String categorizationFieldName; - private final List categorizationFilters; - private final CategorizationAnalyzerConfig categorizationAnalyzerConfig; - private final PerPartitionCategorizationConfig perPartitionCategorizationConfig; - private final TimeValue latency; - private final String summaryCountFieldName; - private final List detectors; - private final List influencers; - private final Boolean multivariateByFields; - private final TimeValue modelPruneWindow; - - private AnalysisConfig( - TimeValue bucketSpan, - String categorizationFieldName, - List categorizationFilters, - CategorizationAnalyzerConfig categorizationAnalyzerConfig, - PerPartitionCategorizationConfig perPartitionCategorizationConfig, - TimeValue latency, - String summaryCountFieldName, - List detectors, - List influencers, - Boolean multivariateByFields, - TimeValue modelPruneWindow - ) { - this.detectors = Collections.unmodifiableList(detectors); - this.bucketSpan = bucketSpan; - this.latency = latency; - this.categorizationFieldName = categorizationFieldName; - this.categorizationAnalyzerConfig = categorizationAnalyzerConfig; - this.perPartitionCategorizationConfig = perPartitionCategorizationConfig; - this.categorizationFilters = categorizationFilters == null ? null : Collections.unmodifiableList(categorizationFilters); - this.summaryCountFieldName = summaryCountFieldName; - this.influencers = Collections.unmodifiableList(influencers); - this.multivariateByFields = multivariateByFields; - this.modelPruneWindow = modelPruneWindow; - } - - /** - * The analysis bucket span - * - * @return The bucketspan or null if not set - */ - public TimeValue getBucketSpan() { - return bucketSpan; - } - - public String getCategorizationFieldName() { - return categorizationFieldName; - } - - public List getCategorizationFilters() { - return categorizationFilters; - } - - public CategorizationAnalyzerConfig getCategorizationAnalyzerConfig() { - return categorizationAnalyzerConfig; - } - - public PerPartitionCategorizationConfig getPerPartitionCategorizationConfig() { - return perPartitionCategorizationConfig; - } - - /** - * The latency interval during which out-of-order records should be handled. - * - * @return The latency interval or null if not set - */ - public TimeValue getLatency() { - return latency; - } - - /** - * The name of the field that contains counts for pre-summarised input - * - * @return The field name or null if not set - */ - public String getSummaryCountFieldName() { - return summaryCountFieldName; - } - - /** - * The list of analysis detectors. In a valid configuration the list should - * contain at least 1 {@link Detector} - * - * @return The Detectors used in this job - */ - public List getDetectors() { - return detectors; - } - - /** - * The list of influence field names - */ - public List getInfluencers() { - return influencers; - } - - public Boolean getMultivariateByFields() { - return multivariateByFields; - } - - public TimeValue getModelPruneWindow() { - return modelPruneWindow; - } - - private static void addIfNotNull(Set fields, String field) { - if (field != null) { - fields.add(field); - } - } - - public List fields() { - return collectNonNullAndNonEmptyDetectorFields(Detector::getFieldName); - } - - private List collectNonNullAndNonEmptyDetectorFields(Function fieldGetter) { - Set fields = new HashSet<>(); - - for (Detector d : getDetectors()) { - addIfNotNull(fields, fieldGetter.apply(d)); - } - - // remove empty strings - fields.remove(""); - - return new ArrayList<>(fields); - } - - public List byFields() { - return collectNonNullAndNonEmptyDetectorFields(Detector::getByFieldName); - } - - public List overFields() { - return collectNonNullAndNonEmptyDetectorFields(Detector::getOverFieldName); - } - - public List partitionFields() { - return collectNonNullAndNonEmptyDetectorFields(Detector::getPartitionFieldName); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (bucketSpan != null) { - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan.getStringRep()); - } - if (categorizationFieldName != null) { - builder.field(CATEGORIZATION_FIELD_NAME.getPreferredName(), categorizationFieldName); - } - if (categorizationFilters != null) { - builder.field(CATEGORIZATION_FILTERS.getPreferredName(), categorizationFilters); - } - if (categorizationAnalyzerConfig != null) { - // This cannot be builder.field(CATEGORIZATION_ANALYZER.getPreferredName(), categorizationAnalyzerConfig, params); - // because that always writes categorizationAnalyzerConfig as an object, and in the case of a global analyzer it - // gets written as a single string. - categorizationAnalyzerConfig.toXContent(builder, params); - } - if (perPartitionCategorizationConfig != null) { - builder.field(PER_PARTITION_CATEGORIZATION.getPreferredName(), perPartitionCategorizationConfig); - } - if (latency != null) { - builder.field(LATENCY.getPreferredName(), latency.getStringRep()); - } - if (summaryCountFieldName != null) { - builder.field(SUMMARY_COUNT_FIELD_NAME.getPreferredName(), summaryCountFieldName); - } - builder.startArray(DETECTORS.getPreferredName()); - for (Detector detector : detectors) { - detector.toXContent(builder, params); - } - builder.endArray(); - builder.field(INFLUENCERS.getPreferredName(), influencers); - if (multivariateByFields != null) { - builder.field(MULTIVARIATE_BY_FIELDS.getPreferredName(), multivariateByFields); - } - if (modelPruneWindow != null) { - builder.field(MODEL_PRUNE_WINDOW.getPreferredName(), modelPruneWindow.getStringRep()); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - AnalysisConfig that = (AnalysisConfig) object; - return Objects.equals(latency, that.latency) - && Objects.equals(bucketSpan, that.bucketSpan) - && Objects.equals(categorizationFieldName, that.categorizationFieldName) - && Objects.equals(categorizationFilters, that.categorizationFilters) - && Objects.equals(categorizationAnalyzerConfig, that.categorizationAnalyzerConfig) - && Objects.equals(perPartitionCategorizationConfig, that.perPartitionCategorizationConfig) - && Objects.equals(summaryCountFieldName, that.summaryCountFieldName) - && Objects.equals(detectors, that.detectors) - && Objects.equals(influencers, that.influencers) - && Objects.equals(multivariateByFields, that.multivariateByFields) - && Objects.equals(modelPruneWindow, that.modelPruneWindow); - } - - @Override - public int hashCode() { - return Objects.hash( - bucketSpan, - categorizationFieldName, - categorizationFilters, - categorizationAnalyzerConfig, - perPartitionCategorizationConfig, - latency, - summaryCountFieldName, - detectors, - influencers, - multivariateByFields, - modelPruneWindow - ); - } - - public static Builder builder(List detectors) { - return new Builder(detectors); - } - - public static class Builder { - - private List detectors; - private TimeValue bucketSpan; - private TimeValue latency; - private String categorizationFieldName; - private List categorizationFilters; - private CategorizationAnalyzerConfig categorizationAnalyzerConfig; - private PerPartitionCategorizationConfig perPartitionCategorizationConfig; - private String summaryCountFieldName; - private List influencers = new ArrayList<>(); - private Boolean multivariateByFields; - private TimeValue modelPruneWindow; - - public Builder(List detectors) { - setDetectors(detectors); - } - - public Builder(AnalysisConfig analysisConfig) { - this.detectors = new ArrayList<>(analysisConfig.detectors); - this.bucketSpan = analysisConfig.bucketSpan; - this.latency = analysisConfig.latency; - this.categorizationFieldName = analysisConfig.categorizationFieldName; - this.categorizationFilters = analysisConfig.categorizationFilters == null - ? null - : new ArrayList<>(analysisConfig.categorizationFilters); - this.categorizationAnalyzerConfig = analysisConfig.categorizationAnalyzerConfig; - this.perPartitionCategorizationConfig = analysisConfig.perPartitionCategorizationConfig; - this.summaryCountFieldName = analysisConfig.summaryCountFieldName; - this.influencers = new ArrayList<>(analysisConfig.influencers); - this.multivariateByFields = analysisConfig.multivariateByFields; - this.modelPruneWindow = analysisConfig.modelPruneWindow; - } - - public Builder setDetectors(List detectors) { - Objects.requireNonNull(detectors, "[" + DETECTORS.getPreferredName() + "] must not be null"); - // We always assign sequential IDs to the detectors that are correct for this analysis config - int detectorIndex = 0; - List sequentialIndexDetectors = new ArrayList<>(detectors.size()); - for (Detector origDetector : detectors) { - Detector.Builder builder = new Detector.Builder(origDetector); - builder.setDetectorIndex(detectorIndex++); - sequentialIndexDetectors.add(builder.build()); - } - this.detectors = sequentialIndexDetectors; - return this; - } - - public Builder setDetector(int detectorIndex, Detector detector) { - detectors.set(detectorIndex, detector); - return this; - } - - public Builder setBucketSpan(TimeValue bucketSpan) { - this.bucketSpan = bucketSpan; - return this; - } - - public Builder setLatency(TimeValue latency) { - this.latency = latency; - return this; - } - - public Builder setCategorizationFieldName(String categorizationFieldName) { - this.categorizationFieldName = categorizationFieldName; - return this; - } - - public Builder setCategorizationFilters(List categorizationFilters) { - this.categorizationFilters = categorizationFilters; - return this; - } - - public Builder setCategorizationAnalyzerConfig(CategorizationAnalyzerConfig categorizationAnalyzerConfig) { - this.categorizationAnalyzerConfig = categorizationAnalyzerConfig; - return this; - } - - public Builder setPerPartitionCategorizationConfig(PerPartitionCategorizationConfig perPartitionCategorizationConfig) { - this.perPartitionCategorizationConfig = perPartitionCategorizationConfig; - return this; - } - - public Builder setSummaryCountFieldName(String summaryCountFieldName) { - this.summaryCountFieldName = summaryCountFieldName; - return this; - } - - public Builder setInfluencers(List influencers) { - this.influencers = Objects.requireNonNull(influencers, INFLUENCERS.getPreferredName()); - return this; - } - - public Builder setMultivariateByFields(Boolean multivariateByFields) { - this.multivariateByFields = multivariateByFields; - return this; - } - - public Builder setModelPruneWindow(TimeValue modelPruneWindow) { - this.modelPruneWindow = modelPruneWindow; - return this; - } - - public AnalysisConfig build() { - - return new AnalysisConfig( - bucketSpan, - categorizationFieldName, - categorizationFilters, - categorizationAnalyzerConfig, - perPartitionCategorizationConfig, - latency, - summaryCountFieldName, - detectors, - influencers, - multivariateByFields, - modelPruneWindow - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisLimits.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisLimits.java deleted file mode 100644 index f4172c843dd39..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/AnalysisLimits.java +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Objects; - -/** - * Analysis limits for autodetect. In particular, - * this is a collection of parameters that allow limiting - * the resources used by the job. - */ -public class AnalysisLimits implements ToXContentObject { - - /** - * Serialisation field names - */ - public static final ParseField MODEL_MEMORY_LIMIT = new ParseField("model_memory_limit"); - public static final ParseField CATEGORIZATION_EXAMPLES_LIMIT = new ParseField("categorization_examples_limit"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "analysis_limits", - true, - a -> new AnalysisLimits((Long) a[0], (Long) a[1]) - ); - - static { - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return ByteSizeValue.parseBytesSizeValue(p.text(), MODEL_MEMORY_LIMIT.getPreferredName()).getMb(); - } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return p.longValue(); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, MODEL_MEMORY_LIMIT, ObjectParser.ValueType.VALUE); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), CATEGORIZATION_EXAMPLES_LIMIT); - } - - /** - * The model memory limit in MiBs. - * It is initialised to null, which implies that the server-side default will be used. - */ - private final Long modelMemoryLimit; - - /** - * It is initialised to null. - * A value of null will result in the server-side default being used. - */ - private final Long categorizationExamplesLimit; - - public AnalysisLimits(Long categorizationExamplesLimit) { - this(null, categorizationExamplesLimit); - } - - public AnalysisLimits(Long modelMemoryLimit, Long categorizationExamplesLimit) { - this.modelMemoryLimit = modelMemoryLimit; - this.categorizationExamplesLimit = categorizationExamplesLimit; - } - - /** - * Maximum size of the model in MB before the anomaly detector - * will drop new samples to prevent the model using any more - * memory. - * - * @return The set memory limit or null if not set - */ - @Nullable - public Long getModelMemoryLimit() { - return modelMemoryLimit; - } - - /** - * Gets the limit to the number of examples that are stored per category - * - * @return the limit or null if not set - */ - @Nullable - public Long getCategorizationExamplesLimit() { - return categorizationExamplesLimit; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (modelMemoryLimit != null) { - builder.field(MODEL_MEMORY_LIMIT.getPreferredName(), modelMemoryLimit + "mb"); - } - if (categorizationExamplesLimit != null) { - builder.field(CATEGORIZATION_EXAMPLES_LIMIT.getPreferredName(), categorizationExamplesLimit); - } - builder.endObject(); - return builder; - } - - /** - * Overridden equality test - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof AnalysisLimits == false) { - return false; - } - - AnalysisLimits that = (AnalysisLimits) other; - return Objects.equals(this.modelMemoryLimit, that.modelMemoryLimit) - && Objects.equals(this.categorizationExamplesLimit, that.categorizationExamplesLimit); - } - - @Override - public int hashCode() { - return Objects.hash(modelMemoryLimit, categorizationExamplesLimit); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/CategorizationAnalyzerConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/CategorizationAnalyzerConfig.java deleted file mode 100644 index 8782bacc83f32..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/CategorizationAnalyzerConfig.java +++ /dev/null @@ -1,347 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.rest.action.admin.indices.RestAnalyzeAction; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentFragment; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * Configuration for the categorization analyzer. - * - * The syntax is a subset of what can be supplied to the {@linkplain RestAnalyzeAction _analyze endpoint}. - * To summarize, the first option is to specify the name of an out-of-the-box analyzer: - * - * "categorization_analyzer" : "standard" - * - * - * The second option is to specify a custom analyzer by combining the char_filters, tokenizer - * and token_filters fields. In turn, each of these can be specified as the name of an out-of-the-box - * one or as an object defining a custom one. For example: - * - * "char_filters" : [ - * "html_strip", - * { "type" : "pattern_replace", "pattern": "SQL: .*" } - * ], - * "tokenizer" : "thai", - * "token_filters" : [ - * "lowercase", - * { "type" : "pattern_replace", "pattern": "^[0-9].*" } - * ] - * - */ -public class CategorizationAnalyzerConfig implements ToXContentFragment { - - public static final ParseField CATEGORIZATION_ANALYZER = new ParseField("categorization_analyzer"); - private static final ParseField TOKENIZER = AnalyzeAction.Fields.TOKENIZER; - private static final ParseField TOKEN_FILTERS = AnalyzeAction.Fields.TOKEN_FILTERS; - private static final ParseField CHAR_FILTERS = AnalyzeAction.Fields.CHAR_FILTERS; - - /** - * This method is only used in the unit tests - in production code this config is always parsed as a fragment. - */ - static CategorizationAnalyzerConfig buildFromXContentObject(XContentParser parser) throws IOException { - - if (parser.nextToken() != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("Expected start object but got [" + parser.currentToken() + "]"); - } - if (parser.nextToken() != XContentParser.Token.FIELD_NAME) { - throw new IllegalArgumentException("Expected field name but got [" + parser.currentToken() + "]"); - } - parser.nextToken(); - CategorizationAnalyzerConfig categorizationAnalyzerConfig = buildFromXContentFragment(parser); - parser.nextToken(); - return categorizationAnalyzerConfig; - } - - /** - * Parse a categorization_analyzer configuration. A custom parser is needed due to the - * complexity of the format, with many elements able to be specified as either the name of a built-in - * element or an object containing a custom definition. - */ - static CategorizationAnalyzerConfig buildFromXContentFragment(XContentParser parser) throws IOException { - - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(); - - XContentParser.Token token = parser.currentToken(); - if (token == XContentParser.Token.VALUE_STRING) { - builder.setAnalyzer(parser.text()); - } else if (token != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("[" + CATEGORIZATION_ANALYZER + "] should be analyzer's name or settings [" + token + "]"); - } else { - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (CHAR_FILTERS.match(currentFieldName, parser.getDeprecationHandler()) - && token == XContentParser.Token.START_ARRAY) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.VALUE_STRING) { - builder.addCharFilter(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - builder.addCharFilter(parser.map()); - } else { - throw new IllegalArgumentException( - "[" - + currentFieldName - + "] in [" - + CATEGORIZATION_ANALYZER - + "] array element should contain char_filter's name or settings [" - + token - + "]" - ); - } - } - } else if (TOKENIZER.match(currentFieldName, parser.getDeprecationHandler())) { - if (token == XContentParser.Token.VALUE_STRING) { - builder.setTokenizer(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - builder.setTokenizer(parser.map()); - } else { - throw new IllegalArgumentException( - "[" - + currentFieldName - + "] in [" - + CATEGORIZATION_ANALYZER - + "] should be tokenizer's name or settings [" - + token - + "]" - ); - } - } else if (TOKEN_FILTERS.match(currentFieldName, parser.getDeprecationHandler()) - && token == XContentParser.Token.START_ARRAY) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.VALUE_STRING) { - builder.addTokenFilter(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - builder.addTokenFilter(parser.map()); - } else { - throw new IllegalArgumentException( - "[" - + currentFieldName - + "] in [" - + CATEGORIZATION_ANALYZER - + "] array element should contain token_filter's name or settings [" - + token - + "]" - ); - } - } - } - } - } - - return builder.build(); - } - - /** - * Simple store of either a name of a built-in analyzer element or a custom definition. - */ - public static final class NameOrDefinition implements ToXContentFragment { - - // Exactly one of these two members is not null - public final String name; - public final Settings definition; - - NameOrDefinition(String name) { - this.name = Objects.requireNonNull(name); - this.definition = null; - } - - NameOrDefinition(ParseField field, Map definition) { - this.name = null; - Objects.requireNonNull(definition); - try { - this.definition = Settings.builder().loadFromMap(definition).build(); - } catch (Exception e) { - throw new IllegalArgumentException("Failed to parse [" + definition + "] in [" + field.getPreferredName() + "]", e); - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (definition == null) { - builder.value(name); - } else { - builder.startObject(); - definition.toXContent(builder, params); - builder.endObject(); - } - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - NameOrDefinition that = (NameOrDefinition) o; - return Objects.equals(name, that.name) && Objects.equals(definition, that.definition); - } - - @Override - public int hashCode() { - return Objects.hash(name, definition); - } - - @Override - public String toString() { - if (definition == null) { - return name; - } else { - return definition.toDelimitedString(';'); - } - } - } - - private final String analyzer; - private final List charFilters; - private final NameOrDefinition tokenizer; - private final List tokenFilters; - - private CategorizationAnalyzerConfig( - String analyzer, - List charFilters, - NameOrDefinition tokenizer, - List tokenFilters - ) { - this.analyzer = analyzer; - this.charFilters = Collections.unmodifiableList(charFilters); - this.tokenizer = tokenizer; - this.tokenFilters = Collections.unmodifiableList(tokenFilters); - } - - public String getAnalyzer() { - return analyzer; - } - - public List getCharFilters() { - return charFilters; - } - - public NameOrDefinition getTokenizer() { - return tokenizer; - } - - public List getTokenFilters() { - return tokenFilters; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (analyzer != null) { - builder.field(CATEGORIZATION_ANALYZER.getPreferredName(), analyzer); - } else { - builder.startObject(CATEGORIZATION_ANALYZER.getPreferredName()); - if (charFilters.isEmpty() == false) { - builder.startArray(CHAR_FILTERS.getPreferredName()); - for (NameOrDefinition charFilter : charFilters) { - charFilter.toXContent(builder, params); - } - builder.endArray(); - } - if (tokenizer != null) { - builder.field(TOKENIZER.getPreferredName(), tokenizer); - } - if (tokenFilters.isEmpty() == false) { - builder.startArray(TOKEN_FILTERS.getPreferredName()); - for (NameOrDefinition tokenFilter : tokenFilters) { - tokenFilter.toXContent(builder, params); - } - builder.endArray(); - } - builder.endObject(); - } - return builder; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CategorizationAnalyzerConfig that = (CategorizationAnalyzerConfig) o; - return Objects.equals(analyzer, that.analyzer) - && Objects.equals(charFilters, that.charFilters) - && Objects.equals(tokenizer, that.tokenizer) - && Objects.equals(tokenFilters, that.tokenFilters); - } - - @Override - public int hashCode() { - return Objects.hash(analyzer, charFilters, tokenizer, tokenFilters); - } - - public static class Builder { - - private String analyzer; - private List charFilters = new ArrayList<>(); - private NameOrDefinition tokenizer; - private List tokenFilters = new ArrayList<>(); - - public Builder() {} - - public Builder(CategorizationAnalyzerConfig categorizationAnalyzerConfig) { - this.analyzer = categorizationAnalyzerConfig.analyzer; - this.charFilters = new ArrayList<>(categorizationAnalyzerConfig.charFilters); - this.tokenizer = categorizationAnalyzerConfig.tokenizer; - this.tokenFilters = new ArrayList<>(categorizationAnalyzerConfig.tokenFilters); - } - - public Builder setAnalyzer(String analyzer) { - this.analyzer = analyzer; - return this; - } - - public Builder addCharFilter(String charFilter) { - this.charFilters.add(new NameOrDefinition(charFilter)); - return this; - } - - public Builder addCharFilter(Map charFilter) { - this.charFilters.add(new NameOrDefinition(CHAR_FILTERS, charFilter)); - return this; - } - - public Builder setTokenizer(String tokenizer) { - this.tokenizer = new NameOrDefinition(tokenizer); - return this; - } - - public Builder setTokenizer(Map tokenizer) { - this.tokenizer = new NameOrDefinition(TOKENIZER, tokenizer); - return this; - } - - public Builder addTokenFilter(String tokenFilter) { - this.tokenFilters.add(new NameOrDefinition(tokenFilter)); - return this; - } - - public Builder addTokenFilter(Map tokenFilter) { - this.tokenFilters.add(new NameOrDefinition(TOKEN_FILTERS, tokenFilter)); - return this; - } - - /** - * Create a config - */ - public CategorizationAnalyzerConfig build() { - return new CategorizationAnalyzerConfig(analyzer, charFilters, tokenizer, tokenFilters); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DataDescription.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DataDescription.java deleted file mode 100644 index d460cf9bd81a4..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DataDescription.java +++ /dev/null @@ -1,176 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -/** - * Describes the format of the data used in the job and how it should - * be interpreted by the ML job. - *

- * {@link #getTimeField()} is the name of the field containing the timestamp and - * {@link #getTimeFormat()} is the format code for the date string in as described by - * {@link java.time.format.DateTimeFormatter}. - */ -public class DataDescription implements ToXContentObject { - /** - * Enum of the acceptable data formats. - */ - public enum DataFormat { - XCONTENT; - - /** - * Case-insensitive from string method. - * Works with either XCONTENT, XContent, etc. - * - * @param value String representation - * @return The data format - */ - public static DataFormat forString(String value) { - return DataFormat.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - private static final ParseField DATA_DESCRIPTION_FIELD = new ParseField("data_description"); - private static final ParseField TIME_FIELD_NAME_FIELD = new ParseField("time_field"); - private static final ParseField TIME_FORMAT_FIELD = new ParseField("time_format"); - - /** - * Special time format string for epoch times (seconds) - */ - public static final String EPOCH = "epoch"; - - /** - * Special time format string for epoch times (milli-seconds) - */ - public static final String EPOCH_MS = "epoch_ms"; - - /** - * By default autodetect expects the timestamp in a field with this name - */ - public static final String DEFAULT_TIME_FIELD = "time"; - - private final String timeFieldName; - private final String timeFormat; - - public static final ObjectParser PARSER = new ObjectParser<>( - DATA_DESCRIPTION_FIELD.getPreferredName(), - true, - Builder::new - ); - - static { - PARSER.declareString(Builder::setTimeField, TIME_FIELD_NAME_FIELD); - PARSER.declareString(Builder::setTimeFormat, TIME_FORMAT_FIELD); - } - - public DataDescription(String timeFieldName, String timeFormat) { - this.timeFieldName = timeFieldName; - this.timeFormat = timeFormat; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(TIME_FIELD_NAME_FIELD.getPreferredName(), timeFieldName); - builder.field(TIME_FORMAT_FIELD.getPreferredName(), timeFormat); - builder.endObject(); - return builder; - } - - /** - * The format of the data to be processed. - * Always {@link DataDescription.DataFormat#XCONTENT} - * - * @return The data format - */ - public DataFormat getFormat() { - return DataFormat.XCONTENT; - } - - /** - * The name of the field containing the timestamp - * - * @return A String if set or null - */ - public String getTimeField() { - return timeFieldName; - } - - /** - * Either {@value #EPOCH}, {@value #EPOCH_MS} or a SimpleDateTime format string. - * If not set (is null or an empty string) or set to - * {@value #EPOCH_MS} (the default) then the date is assumed to be in - * milliseconds from the epoch. - * - * @return A String if set or null - */ - public String getTimeFormat() { - return timeFormat; - } - - /** - * Overridden equality test - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof DataDescription == false) { - return false; - } - - DataDescription that = (DataDescription) other; - - return Objects.equals(this.timeFieldName, that.timeFieldName) && Objects.equals(this.timeFormat, that.timeFormat); - } - - @Override - public int hashCode() { - return Objects.hash(timeFieldName, timeFormat); - } - - public static class Builder { - - private String timeFieldName = DEFAULT_TIME_FIELD; - private String timeFormat = EPOCH_MS; - - public Builder setFormat(DataFormat format) { - Objects.requireNonNull(format); - return this; - } - - public Builder setTimeField(String fieldName) { - timeFieldName = Objects.requireNonNull(fieldName); - return this; - } - - public Builder setTimeFormat(String format) { - timeFormat = Objects.requireNonNull(format); - return this; - } - - public DataDescription build() { - return new DataDescription(timeFieldName, timeFormat); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DefaultDetectorDescription.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DefaultDetectorDescription.java deleted file mode 100644 index 66ea72f928d54..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DefaultDetectorDescription.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.common.Strings; - -public final class DefaultDetectorDescription { - private static final String BY_TOKEN = " by "; - private static final String OVER_TOKEN = " over "; - - private static final String USE_NULL_OPTION = " usenull="; - private static final String PARTITION_FIELD_OPTION = " partitionfield="; - private static final String EXCLUDE_FREQUENT_OPTION = " excludefrequent="; - - private DefaultDetectorDescription() {} - - /** - * Returns the default description for the given {@code detector} - * - * @param detector the {@code Detector} for which a default description is requested - * @return the default description - */ - public static String of(Detector detector) { - StringBuilder sb = new StringBuilder(); - appendOn(detector, sb); - return sb.toString(); - } - - /** - * Appends to the given {@code StringBuilder} the default description - * for the given {@code detector} - * - * @param detector the {@code Detector} for which a default description is requested - * @param sb the {@code StringBuilder} to append to - */ - public static void appendOn(Detector detector, StringBuilder sb) { - if (isNotNullOrEmpty(detector.getFunction().getFullName())) { - sb.append(detector.getFunction()); - if (isNotNullOrEmpty(detector.getFieldName())) { - sb.append('(').append(quoteField(detector.getFieldName())).append(')'); - } - } else if (isNotNullOrEmpty(detector.getFieldName())) { - sb.append(quoteField(detector.getFieldName())); - } - - if (isNotNullOrEmpty(detector.getByFieldName())) { - sb.append(BY_TOKEN).append(quoteField(detector.getByFieldName())); - } - - if (isNotNullOrEmpty(detector.getOverFieldName())) { - sb.append(OVER_TOKEN).append(quoteField(detector.getOverFieldName())); - } - - if (detector.isUseNull()) { - sb.append(USE_NULL_OPTION).append(detector.isUseNull()); - } - - if (isNotNullOrEmpty(detector.getPartitionFieldName())) { - sb.append(PARTITION_FIELD_OPTION).append(quoteField(detector.getPartitionFieldName())); - } - - if (detector.getExcludeFrequent() != null) { - sb.append(EXCLUDE_FREQUENT_OPTION).append(detector.getExcludeFrequent()); - } - } - - private static String quoteField(String field) { - if (field.matches("\\w*")) { - return field; - } else { - return "\"" + field.replace("\\", "\\\\").replace("\"", "\\\"") + "\""; - } - } - - private static boolean isNotNullOrEmpty(String arg) { - return Strings.isNullOrEmpty(arg) == false; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectionRule.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectionRule.java deleted file mode 100644 index e23cad0c024aa..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectionRule.java +++ /dev/null @@ -1,142 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.EnumSet; -import java.util.List; -import java.util.Objects; - -public class DetectionRule implements ToXContentObject { - - public static final ParseField DETECTION_RULE_FIELD = new ParseField("detection_rule"); - public static final ParseField ACTIONS_FIELD = new ParseField("actions"); - public static final ParseField SCOPE_FIELD = new ParseField("scope"); - public static final ParseField CONDITIONS_FIELD = new ParseField("conditions"); - - public static final ObjectParser PARSER = new ObjectParser<>( - DETECTION_RULE_FIELD.getPreferredName(), - true, - Builder::new - ); - - static { - PARSER.declareStringArray(Builder::setActions, ACTIONS_FIELD); - PARSER.declareObject(Builder::setScope, RuleScope.parser(), SCOPE_FIELD); - PARSER.declareObjectArray(Builder::setConditions, RuleCondition.PARSER, CONDITIONS_FIELD); - } - - private final EnumSet actions; - private final RuleScope scope; - private final List conditions; - - private DetectionRule(EnumSet actions, RuleScope scope, List conditions) { - this.actions = Objects.requireNonNull(actions); - this.scope = Objects.requireNonNull(scope); - this.conditions = Collections.unmodifiableList(conditions); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ACTIONS_FIELD.getPreferredName(), actions); - if (scope.isEmpty() == false) { - builder.field(SCOPE_FIELD.getPreferredName(), scope); - } - if (conditions.isEmpty() == false) { - builder.field(CONDITIONS_FIELD.getPreferredName(), conditions); - } - builder.endObject(); - return builder; - } - - public EnumSet getActions() { - return actions; - } - - public RuleScope getScope() { - return scope; - } - - public List getConditions() { - return conditions; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj instanceof DetectionRule == false) { - return false; - } - - DetectionRule other = (DetectionRule) obj; - return Objects.equals(actions, other.actions) && Objects.equals(scope, other.scope) && Objects.equals(conditions, other.conditions); - } - - @Override - public int hashCode() { - return Objects.hash(actions, scope, conditions); - } - - public static class Builder { - private EnumSet actions = EnumSet.of(RuleAction.SKIP_RESULT); - private RuleScope scope = new RuleScope(); - private List conditions = Collections.emptyList(); - - public Builder(RuleScope.Builder scope) { - this.scope = scope.build(); - } - - public Builder(List conditions) { - this.conditions = Objects.requireNonNull(conditions); - } - - Builder() {} - - public Builder setActions(List actions) { - this.actions.clear(); - actions.stream().map(RuleAction::fromString).forEach(this.actions::add); - return this; - } - - public Builder setActions(EnumSet actions) { - this.actions = Objects.requireNonNull(actions, ACTIONS_FIELD.getPreferredName()); - return this; - } - - public Builder setActions(RuleAction... actions) { - this.actions.clear(); - Arrays.stream(actions).forEach(this.actions::add); - return this; - } - - public Builder setScope(RuleScope scope) { - this.scope = Objects.requireNonNull(scope); - return this; - } - - public Builder setConditions(List conditions) { - this.conditions = Objects.requireNonNull(conditions); - return this; - } - - public DetectionRule build() { - return new DetectionRule(actions, scope, conditions); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java deleted file mode 100644 index f20d67a238008..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Detector.java +++ /dev/null @@ -1,377 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Locale; -import java.util.Objects; - -/** - * Defines the fields and functions used in the analysis. A combination of field_name, - * by_field_name and over_field_name can be used depending on the specific - * function chosen. For more information see the - * create anomaly detection - * jobs API and detector functions. - */ -public class Detector implements ToXContentObject { - - public enum ExcludeFrequent { - ALL, - NONE, - BY, - OVER; - - /** - * Case-insensitive from string method. - * Works with either ALL, All, etc. - * - * @param value String representation - * @return The data format - */ - public static ExcludeFrequent forString(String value) { - return valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - public static final ParseField DETECTOR_DESCRIPTION_FIELD = new ParseField("detector_description"); - public static final ParseField FUNCTION_FIELD = new ParseField("function"); - public static final ParseField FIELD_NAME_FIELD = new ParseField("field_name"); - public static final ParseField BY_FIELD_NAME_FIELD = new ParseField("by_field_name"); - public static final ParseField OVER_FIELD_NAME_FIELD = new ParseField("over_field_name"); - public static final ParseField PARTITION_FIELD_NAME_FIELD = new ParseField("partition_field_name"); - public static final ParseField USE_NULL_FIELD = new ParseField("use_null"); - public static final ParseField EXCLUDE_FREQUENT_FIELD = new ParseField("exclude_frequent"); - public static final ParseField CUSTOM_RULES_FIELD = new ParseField("custom_rules"); - public static final ParseField DETECTOR_INDEX = new ParseField("detector_index"); - - public static final ObjectParser PARSER = new ObjectParser<>("detector", true, Builder::new); - - static { - PARSER.declareString(Builder::setDetectorDescription, DETECTOR_DESCRIPTION_FIELD); - PARSER.declareString(Builder::setFunction, FUNCTION_FIELD); - PARSER.declareString(Builder::setFieldName, FIELD_NAME_FIELD); - PARSER.declareString(Builder::setByFieldName, BY_FIELD_NAME_FIELD); - PARSER.declareString(Builder::setOverFieldName, OVER_FIELD_NAME_FIELD); - PARSER.declareString(Builder::setPartitionFieldName, PARTITION_FIELD_NAME_FIELD); - PARSER.declareBoolean(Builder::setUseNull, USE_NULL_FIELD); - PARSER.declareString(Builder::setExcludeFrequent, ExcludeFrequent::forString, EXCLUDE_FREQUENT_FIELD); - PARSER.declareObjectArray(Builder::setRules, (p, c) -> DetectionRule.PARSER.apply(p, c).build(), CUSTOM_RULES_FIELD); - PARSER.declareInt(Builder::setDetectorIndex, DETECTOR_INDEX); - } - - private final String detectorDescription; - private final DetectorFunction function; - private final String fieldName; - private final String byFieldName; - private final String overFieldName; - private final String partitionFieldName; - private final boolean useNull; - private final ExcludeFrequent excludeFrequent; - private final List rules; - private final int detectorIndex; - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(DETECTOR_DESCRIPTION_FIELD.getPreferredName(), detectorDescription); - builder.field(FUNCTION_FIELD.getPreferredName(), function); - if (fieldName != null) { - builder.field(FIELD_NAME_FIELD.getPreferredName(), fieldName); - } - if (byFieldName != null) { - builder.field(BY_FIELD_NAME_FIELD.getPreferredName(), byFieldName); - } - if (overFieldName != null) { - builder.field(OVER_FIELD_NAME_FIELD.getPreferredName(), overFieldName); - } - if (partitionFieldName != null) { - builder.field(PARTITION_FIELD_NAME_FIELD.getPreferredName(), partitionFieldName); - } - if (useNull) { - builder.field(USE_NULL_FIELD.getPreferredName(), useNull); - } - if (excludeFrequent != null) { - builder.field(EXCLUDE_FREQUENT_FIELD.getPreferredName(), excludeFrequent); - } - if (rules.isEmpty() == false) { - builder.field(CUSTOM_RULES_FIELD.getPreferredName(), rules); - } - // negative means unknown - if (detectorIndex >= 0) { - builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex); - } - builder.endObject(); - return builder; - } - - private Detector( - String detectorDescription, - DetectorFunction function, - String fieldName, - String byFieldName, - String overFieldName, - String partitionFieldName, - boolean useNull, - ExcludeFrequent excludeFrequent, - List rules, - int detectorIndex - ) { - this.function = function; - this.fieldName = fieldName; - this.byFieldName = byFieldName; - this.overFieldName = overFieldName; - this.partitionFieldName = partitionFieldName; - this.useNull = useNull; - this.excludeFrequent = excludeFrequent; - this.rules = Collections.unmodifiableList(rules); - this.detectorDescription = detectorDescription != null ? detectorDescription : DefaultDetectorDescription.of(this); - this.detectorIndex = detectorIndex; - } - - public String getDetectorDescription() { - return detectorDescription; - } - - /** - * The analysis function used e.g. count, rare, min etc. - * - * @return The function or null if not set - */ - public DetectorFunction getFunction() { - return function; - } - - /** - * The Analysis field - * - * @return The field to analyse - */ - public String getFieldName() { - return fieldName; - } - - /** - * The 'by' field or null if not set. - * - * @return The 'by' field - */ - public String getByFieldName() { - return byFieldName; - } - - /** - * The 'over' field or null if not set. - * - * @return The 'over' field - */ - public String getOverFieldName() { - return overFieldName; - } - - /** - * Segments the analysis along another field to have completely - * independent baselines for each instance of partitionfield - * - * @return The Partition Field - */ - public String getPartitionFieldName() { - return partitionFieldName; - } - - /** - * Where there isn't a value for the 'by' or 'over' field should a new - * series be used as the 'null' series. - * - * @return true if the 'null' series should be created - */ - public boolean isUseNull() { - return useNull; - } - - /** - * Excludes frequently-occurring metrics from the analysis; - * can apply to 'by' field, 'over' field, or both - * - * @return the value that the user set - */ - public ExcludeFrequent getExcludeFrequent() { - return excludeFrequent; - } - - public List getRules() { - return rules; - } - - /** - * @return the detector index or a negative number if unknown - */ - public int getDetectorIndex() { - return detectorIndex; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof Detector == false) { - return false; - } - - Detector that = (Detector) other; - - return Objects.equals(this.detectorDescription, that.detectorDescription) - && Objects.equals(this.function, that.function) - && Objects.equals(this.fieldName, that.fieldName) - && Objects.equals(this.byFieldName, that.byFieldName) - && Objects.equals(this.overFieldName, that.overFieldName) - && Objects.equals(this.partitionFieldName, that.partitionFieldName) - && Objects.equals(this.useNull, that.useNull) - && Objects.equals(this.excludeFrequent, that.excludeFrequent) - && Objects.equals(this.rules, that.rules) - && this.detectorIndex == that.detectorIndex; - } - - @Override - public int hashCode() { - return Objects.hash( - detectorDescription, - function, - fieldName, - byFieldName, - overFieldName, - partitionFieldName, - useNull, - excludeFrequent, - rules, - detectorIndex - ); - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - - private String detectorDescription; - private DetectorFunction function; - private String fieldName; - private String byFieldName; - private String overFieldName; - private String partitionFieldName; - private boolean useNull = false; - private ExcludeFrequent excludeFrequent; - private List rules = Collections.emptyList(); - // negative means unknown - private int detectorIndex = -1; - - public Builder() {} - - public Builder(Detector detector) { - detectorDescription = detector.detectorDescription; - function = detector.function; - fieldName = detector.fieldName; - byFieldName = detector.byFieldName; - overFieldName = detector.overFieldName; - partitionFieldName = detector.partitionFieldName; - useNull = detector.useNull; - excludeFrequent = detector.excludeFrequent; - rules = new ArrayList<>(detector.rules); - detectorIndex = detector.detectorIndex; - } - - public Builder(String function, String fieldName) { - this(DetectorFunction.fromString(function), fieldName); - } - - public Builder(DetectorFunction function, String fieldName) { - this.function = function; - this.fieldName = fieldName; - } - - public Builder setDetectorDescription(String detectorDescription) { - this.detectorDescription = detectorDescription; - return this; - } - - public Builder setFunction(String function) { - this.function = DetectorFunction.fromString(function); - return this; - } - - public Builder setFieldName(String fieldName) { - this.fieldName = fieldName; - return this; - } - - public Builder setByFieldName(String byFieldName) { - this.byFieldName = byFieldName; - return this; - } - - public Builder setOverFieldName(String overFieldName) { - this.overFieldName = overFieldName; - return this; - } - - public Builder setPartitionFieldName(String partitionFieldName) { - this.partitionFieldName = partitionFieldName; - return this; - } - - public Builder setUseNull(boolean useNull) { - this.useNull = useNull; - return this; - } - - public Builder setExcludeFrequent(ExcludeFrequent excludeFrequent) { - this.excludeFrequent = excludeFrequent; - return this; - } - - public Builder setRules(List rules) { - this.rules = rules; - return this; - } - - public Builder setDetectorIndex(int detectorIndex) { - this.detectorIndex = detectorIndex; - return this; - } - - public Detector build() { - return new Detector( - detectorDescription, - function, - fieldName, - byFieldName, - overFieldName, - partitionFieldName, - useNull, - excludeFrequent, - rules, - detectorIndex - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectorFunction.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectorFunction.java deleted file mode 100644 index c33ffffd34f1a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/DetectorFunction.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import java.util.Arrays; -import java.util.Collections; -import java.util.Locale; -import java.util.Set; -import java.util.stream.Collectors; - -public enum DetectorFunction { - - COUNT, - LOW_COUNT, - HIGH_COUNT, - NON_ZERO_COUNT("nzc"), - LOW_NON_ZERO_COUNT("low_nzc"), - HIGH_NON_ZERO_COUNT("high_nzc"), - DISTINCT_COUNT("dc"), - LOW_DISTINCT_COUNT("low_dc"), - HIGH_DISTINCT_COUNT("high_dc"), - RARE, - FREQ_RARE, - INFO_CONTENT, - LOW_INFO_CONTENT, - HIGH_INFO_CONTENT, - METRIC, - MEAN, - LOW_MEAN, - HIGH_MEAN, - AVG, - LOW_AVG, - HIGH_AVG, - MEDIAN, - LOW_MEDIAN, - HIGH_MEDIAN, - MIN, - MAX, - SUM, - LOW_SUM, - HIGH_SUM, - NON_NULL_SUM, - LOW_NON_NULL_SUM, - HIGH_NON_NULL_SUM, - VARP, - LOW_VARP, - HIGH_VARP, - TIME_OF_DAY, - TIME_OF_WEEK, - LAT_LONG; - - private Set shortcuts; - - DetectorFunction() { - shortcuts = Collections.emptySet(); - } - - DetectorFunction(String... shortcuts) { - this.shortcuts = Arrays.stream(shortcuts).collect(Collectors.toSet()); - } - - public String getFullName() { - return name().toLowerCase(Locale.ROOT); - } - - @Override - public String toString() { - return getFullName(); - } - - public static DetectorFunction fromString(String op) { - for (DetectorFunction function : values()) { - if (function.getFullName().equals(op) || function.shortcuts.contains(op)) { - return function; - } - } - throw new IllegalArgumentException("Unknown detector function [" + op + "]"); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/FilterRef.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/FilterRef.java deleted file mode 100644 index e9c0fbece98c3..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/FilterRef.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -public class FilterRef implements ToXContentObject { - - public static final ParseField FILTER_REF_FIELD = new ParseField("filter_ref"); - public static final ParseField FILTER_ID = new ParseField("filter_id"); - public static final ParseField FILTER_TYPE = new ParseField("filter_type"); - - public enum FilterType { - INCLUDE, - EXCLUDE; - - public static FilterType fromString(String value) { - return valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - FILTER_REF_FIELD.getPreferredName(), - true, - a -> new FilterRef((String) a[0], (FilterType) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), FILTER_ID); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), FilterType::fromString, FILTER_TYPE); - } - - private final String filterId; - private final FilterType filterType; - - public FilterRef(String filterId, FilterType filterType) { - this.filterId = Objects.requireNonNull(filterId); - this.filterType = filterType == null ? FilterType.INCLUDE : filterType; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(FILTER_ID.getPreferredName(), filterId); - builder.field(FILTER_TYPE.getPreferredName(), filterType); - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj instanceof FilterRef == false) { - return false; - } - - FilterRef other = (FilterRef) obj; - return Objects.equals(filterId, other.filterId) && Objects.equals(filterType, other.filterType); - } - - @Override - public int hashCode() { - return Objects.hash(filterId, filterType); - } - - public String getFilterId() { - return filterId; - } - - public FilterType getFilterType() { - return filterType; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java deleted file mode 100644 index cfea39be07735..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Job.java +++ /dev/null @@ -1,627 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * This class represents a configured and created Job. The creation time is set - * to the time the object was constructed and the finished time and last - * data time fields are {@code null} until the job has seen some data or it is - * finished respectively. - */ -public class Job implements ToXContentObject { - - public static final String ANOMALY_DETECTOR_JOB_TYPE = "anomaly_detector"; - - /* - * Field names used in serialization - */ - public static final ParseField ID = new ParseField("job_id"); - public static final ParseField JOB_TYPE = new ParseField("job_type"); - public static final ParseField GROUPS = new ParseField("groups"); - public static final ParseField ANALYSIS_CONFIG = AnalysisConfig.ANALYSIS_CONFIG; - public static final ParseField ANALYSIS_LIMITS = new ParseField("analysis_limits"); - public static final ParseField CREATE_TIME = new ParseField("create_time"); - public static final ParseField CUSTOM_SETTINGS = new ParseField("custom_settings"); - public static final ParseField DATA_DESCRIPTION = new ParseField("data_description"); - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField FINISHED_TIME = new ParseField("finished_time"); - public static final ParseField MODEL_PLOT_CONFIG = new ParseField("model_plot_config"); - public static final ParseField RENORMALIZATION_WINDOW_DAYS = new ParseField("renormalization_window_days"); - public static final ParseField BACKGROUND_PERSIST_INTERVAL = new ParseField("background_persist_interval"); - public static final ParseField MODEL_SNAPSHOT_RETENTION_DAYS = new ParseField("model_snapshot_retention_days"); - public static final ParseField DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS = new ParseField("daily_model_snapshot_retention_after_days"); - public static final ParseField RESULTS_RETENTION_DAYS = new ParseField("results_retention_days"); - public static final ParseField MODEL_SNAPSHOT_ID = new ParseField("model_snapshot_id"); - public static final ParseField RESULTS_INDEX_NAME = new ParseField("results_index_name"); - public static final ParseField DELETING = new ParseField("deleting"); - public static final ParseField ALLOW_LAZY_OPEN = new ParseField("allow_lazy_open"); - - public static final ObjectParser PARSER = new ObjectParser<>("job_details", true, Builder::new); - - static { - PARSER.declareString(Builder::setId, ID); - PARSER.declareString(Builder::setJobType, JOB_TYPE); - PARSER.declareStringArray(Builder::setGroups, GROUPS); - PARSER.declareStringOrNull(Builder::setDescription, DESCRIPTION); - PARSER.declareField( - Builder::setCreateTime, - (p) -> TimeUtil.parseTimeField(p, CREATE_TIME.getPreferredName()), - CREATE_TIME, - ValueType.VALUE - ); - PARSER.declareField( - Builder::setFinishedTime, - (p) -> TimeUtil.parseTimeField(p, FINISHED_TIME.getPreferredName()), - FINISHED_TIME, - ValueType.VALUE - ); - PARSER.declareObject(Builder::setAnalysisConfig, AnalysisConfig.PARSER, ANALYSIS_CONFIG); - PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, ANALYSIS_LIMITS); - PARSER.declareObject(Builder::setDataDescription, DataDescription.PARSER, DATA_DESCRIPTION); - PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSER, MODEL_PLOT_CONFIG); - PARSER.declareLong(Builder::setRenormalizationWindowDays, RENORMALIZATION_WINDOW_DAYS); - PARSER.declareString( - (builder, val) -> builder.setBackgroundPersistInterval( - TimeValue.parseTimeValue(val, BACKGROUND_PERSIST_INTERVAL.getPreferredName()) - ), - BACKGROUND_PERSIST_INTERVAL - ); - PARSER.declareLong(Builder::setResultsRetentionDays, RESULTS_RETENTION_DAYS); - PARSER.declareLong(Builder::setModelSnapshotRetentionDays, MODEL_SNAPSHOT_RETENTION_DAYS); - PARSER.declareLong(Builder::setDailyModelSnapshotRetentionAfterDays, DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS); - PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.mapOrdered(), CUSTOM_SETTINGS, ValueType.OBJECT); - PARSER.declareStringOrNull(Builder::setModelSnapshotId, MODEL_SNAPSHOT_ID); - PARSER.declareString(Builder::setResultsIndexName, RESULTS_INDEX_NAME); - PARSER.declareBoolean(Builder::setDeleting, DELETING); - PARSER.declareBoolean(Builder::setAllowLazyOpen, ALLOW_LAZY_OPEN); - } - - private final String jobId; - private final String jobType; - - private final List groups; - private final String description; - private final Date createTime; - private final Date finishedTime; - private final AnalysisConfig analysisConfig; - private final AnalysisLimits analysisLimits; - private final DataDescription dataDescription; - private final ModelPlotConfig modelPlotConfig; - private final Long renormalizationWindowDays; - private final TimeValue backgroundPersistInterval; - private final Long modelSnapshotRetentionDays; - private final Long dailyModelSnapshotRetentionAfterDays; - private final Long resultsRetentionDays; - private final Map customSettings; - private final String modelSnapshotId; - private final String resultsIndexName; - private final Boolean deleting; - private final Boolean allowLazyOpen; - - private Job( - String jobId, - String jobType, - List groups, - String description, - Date createTime, - Date finishedTime, - AnalysisConfig analysisConfig, - AnalysisLimits analysisLimits, - DataDescription dataDescription, - ModelPlotConfig modelPlotConfig, - Long renormalizationWindowDays, - TimeValue backgroundPersistInterval, - Long modelSnapshotRetentionDays, - Long dailyModelSnapshotRetentionAfterDays, - Long resultsRetentionDays, - Map customSettings, - String modelSnapshotId, - String resultsIndexName, - Boolean deleting, - Boolean allowLazyOpen - ) { - - this.jobId = jobId; - this.jobType = jobType; - this.groups = Collections.unmodifiableList(groups); - this.description = description; - this.createTime = createTime; - this.finishedTime = finishedTime; - this.analysisConfig = analysisConfig; - this.analysisLimits = analysisLimits; - this.dataDescription = dataDescription; - this.modelPlotConfig = modelPlotConfig; - this.renormalizationWindowDays = renormalizationWindowDays; - this.backgroundPersistInterval = backgroundPersistInterval; - this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; - this.dailyModelSnapshotRetentionAfterDays = dailyModelSnapshotRetentionAfterDays; - this.resultsRetentionDays = resultsRetentionDays; - this.customSettings = customSettings == null ? null : Collections.unmodifiableMap(customSettings); - this.modelSnapshotId = modelSnapshotId; - this.resultsIndexName = resultsIndexName; - this.deleting = deleting; - this.allowLazyOpen = allowLazyOpen; - } - - /** - * Return the Job Id. - * - * @return The job Id string - */ - public String getId() { - return jobId; - } - - public String getJobType() { - return jobType; - } - - public List getGroups() { - return groups; - } - - /** - * Private version of getResultsIndexName so that a job can be built from another - * job and pass index name validation - * - * @return The job's index name, minus prefix - */ - private String getResultsIndexNameNoPrefix() { - return resultsIndexName; - } - - /** - * The job description - * - * @return job description - */ - public String getDescription() { - return description; - } - - /** - * The Job creation time. This name is preferred when serialising to the - * REST API. - * - * @return The date the job was created - */ - public Date getCreateTime() { - return createTime; - } - - /** - * The time the job was finished or null if not finished. - * - * @return The date the job was last retired or null - */ - public Date getFinishedTime() { - return finishedTime; - } - - /** - * The analysis configuration object - * - * @return The AnalysisConfig - */ - public AnalysisConfig getAnalysisConfig() { - return analysisConfig; - } - - /** - * The analysis options object - * - * @return The AnalysisLimits - */ - public AnalysisLimits getAnalysisLimits() { - return analysisLimits; - } - - public ModelPlotConfig getModelPlotConfig() { - return modelPlotConfig; - } - - /** - * If not set the input data is assumed to be csv with a '_time' field in - * epoch format. - * - * @return A DataDescription or null - * @see DataDescription - */ - public DataDescription getDataDescription() { - return dataDescription; - } - - /** - * The duration of the renormalization window in days - * - * @return renormalization window in days - */ - public Long getRenormalizationWindowDays() { - return renormalizationWindowDays; - } - - /** - * The background persistence interval - * - * @return background persistence interval - */ - public TimeValue getBackgroundPersistInterval() { - return backgroundPersistInterval; - } - - public Long getModelSnapshotRetentionDays() { - return modelSnapshotRetentionDays; - } - - public Long getDailyModelSnapshotRetentionAfterDays() { - return dailyModelSnapshotRetentionAfterDays; - } - - public Long getResultsRetentionDays() { - return resultsRetentionDays; - } - - public Map getCustomSettings() { - return customSettings; - } - - public String getModelSnapshotId() { - return modelSnapshotId; - } - - public Boolean getDeleting() { - return deleting; - } - - public Boolean getAllowLazyOpen() { - return allowLazyOpen; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - final String humanReadableSuffix = "_string"; - - builder.field(ID.getPreferredName(), jobId); - builder.field(JOB_TYPE.getPreferredName(), jobType); - - if (groups.isEmpty() == false) { - builder.field(GROUPS.getPreferredName(), groups); - } - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - if (createTime != null) { - builder.timeField(CREATE_TIME.getPreferredName(), CREATE_TIME.getPreferredName() + humanReadableSuffix, createTime.getTime()); - } - if (finishedTime != null) { - builder.timeField( - FINISHED_TIME.getPreferredName(), - FINISHED_TIME.getPreferredName() + humanReadableSuffix, - finishedTime.getTime() - ); - } - builder.field(ANALYSIS_CONFIG.getPreferredName(), analysisConfig, params); - if (analysisLimits != null) { - builder.field(ANALYSIS_LIMITS.getPreferredName(), analysisLimits, params); - } - if (dataDescription != null) { - builder.field(DATA_DESCRIPTION.getPreferredName(), dataDescription, params); - } - if (modelPlotConfig != null) { - builder.field(MODEL_PLOT_CONFIG.getPreferredName(), modelPlotConfig, params); - } - if (renormalizationWindowDays != null) { - builder.field(RENORMALIZATION_WINDOW_DAYS.getPreferredName(), renormalizationWindowDays); - } - if (backgroundPersistInterval != null) { - builder.field(BACKGROUND_PERSIST_INTERVAL.getPreferredName(), backgroundPersistInterval.getStringRep()); - } - if (modelSnapshotRetentionDays != null) { - builder.field(MODEL_SNAPSHOT_RETENTION_DAYS.getPreferredName(), modelSnapshotRetentionDays); - } - if (dailyModelSnapshotRetentionAfterDays != null) { - builder.field(DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS.getPreferredName(), dailyModelSnapshotRetentionAfterDays); - } - if (resultsRetentionDays != null) { - builder.field(RESULTS_RETENTION_DAYS.getPreferredName(), resultsRetentionDays); - } - if (customSettings != null) { - builder.field(CUSTOM_SETTINGS.getPreferredName(), customSettings); - } - if (modelSnapshotId != null) { - builder.field(MODEL_SNAPSHOT_ID.getPreferredName(), modelSnapshotId); - } - if (resultsIndexName != null) { - builder.field(RESULTS_INDEX_NAME.getPreferredName(), resultsIndexName); - } - if (deleting != null) { - builder.field(DELETING.getPreferredName(), deleting); - } - if (allowLazyOpen != null) { - builder.field(ALLOW_LAZY_OPEN.getPreferredName(), allowLazyOpen); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - Job that = (Job) other; - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.jobType, that.jobType) - && Objects.equals(this.groups, that.groups) - && Objects.equals(this.description, that.description) - && Objects.equals(this.createTime, that.createTime) - && Objects.equals(this.finishedTime, that.finishedTime) - && Objects.equals(this.analysisConfig, that.analysisConfig) - && Objects.equals(this.analysisLimits, that.analysisLimits) - && Objects.equals(this.dataDescription, that.dataDescription) - && Objects.equals(this.modelPlotConfig, that.modelPlotConfig) - && Objects.equals(this.renormalizationWindowDays, that.renormalizationWindowDays) - && Objects.equals(this.backgroundPersistInterval, that.backgroundPersistInterval) - && Objects.equals(this.modelSnapshotRetentionDays, that.modelSnapshotRetentionDays) - && Objects.equals(this.dailyModelSnapshotRetentionAfterDays, that.dailyModelSnapshotRetentionAfterDays) - && Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays) - && Objects.equals(this.customSettings, that.customSettings) - && Objects.equals(this.modelSnapshotId, that.modelSnapshotId) - && Objects.equals(this.resultsIndexName, that.resultsIndexName) - && Objects.equals(this.deleting, that.deleting) - && Objects.equals(this.allowLazyOpen, that.allowLazyOpen); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - jobType, - groups, - description, - createTime, - finishedTime, - analysisConfig, - analysisLimits, - dataDescription, - modelPlotConfig, - renormalizationWindowDays, - backgroundPersistInterval, - modelSnapshotRetentionDays, - dailyModelSnapshotRetentionAfterDays, - resultsRetentionDays, - customSettings, - modelSnapshotId, - resultsIndexName, - deleting, - allowLazyOpen - ); - } - - @Override - public final String toString() { - return Strings.toString(this); - } - - public static Builder builder(String id) { - return new Builder(id); - } - - public static class Builder { - - private String id; - private String jobType = ANOMALY_DETECTOR_JOB_TYPE; - private List groups = Collections.emptyList(); - private String description; - private AnalysisConfig analysisConfig; - private AnalysisLimits analysisLimits; - private DataDescription dataDescription; - private Date createTime; - private Date finishedTime; - private ModelPlotConfig modelPlotConfig; - private Long renormalizationWindowDays; - private TimeValue backgroundPersistInterval; - private Long modelSnapshotRetentionDays; - private Long dailyModelSnapshotRetentionAfterDays; - private Long resultsRetentionDays; - private Map customSettings; - private String modelSnapshotId; - private String resultsIndexName; - private Boolean deleting; - private Boolean allowLazyOpen; - - private Builder() {} - - public Builder(String id) { - this.id = id; - } - - public Builder(Job job) { - this.id = job.getId(); - this.jobType = job.getJobType(); - this.groups = new ArrayList<>(job.getGroups()); - this.description = job.getDescription(); - this.analysisConfig = job.getAnalysisConfig(); - this.analysisLimits = job.getAnalysisLimits(); - this.dataDescription = job.getDataDescription(); - this.createTime = job.getCreateTime(); - this.finishedTime = job.getFinishedTime(); - this.modelPlotConfig = job.getModelPlotConfig(); - this.renormalizationWindowDays = job.getRenormalizationWindowDays(); - this.backgroundPersistInterval = job.getBackgroundPersistInterval(); - this.modelSnapshotRetentionDays = job.getModelSnapshotRetentionDays(); - this.dailyModelSnapshotRetentionAfterDays = job.getDailyModelSnapshotRetentionAfterDays(); - this.resultsRetentionDays = job.getResultsRetentionDays(); - this.customSettings = job.getCustomSettings() == null ? null : new LinkedHashMap<>(job.getCustomSettings()); - this.modelSnapshotId = job.getModelSnapshotId(); - this.resultsIndexName = job.getResultsIndexNameNoPrefix(); - this.deleting = job.getDeleting(); - this.allowLazyOpen = job.getAllowLazyOpen(); - } - - public Builder setId(String id) { - this.id = id; - return this; - } - - public String getId() { - return id; - } - - public Builder setJobType(String jobType) { - this.jobType = jobType; - return this; - } - - public Builder setGroups(List groups) { - this.groups = groups == null ? Collections.emptyList() : groups; - return this; - } - - public Builder setCustomSettings(Map customSettings) { - this.customSettings = customSettings; - return this; - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setAnalysisConfig(AnalysisConfig.Builder configBuilder) { - analysisConfig = Objects.requireNonNull(configBuilder, ANALYSIS_CONFIG.getPreferredName()).build(); - return this; - } - - public Builder setAnalysisLimits(AnalysisLimits analysisLimits) { - this.analysisLimits = Objects.requireNonNull(analysisLimits, ANALYSIS_LIMITS.getPreferredName()); - return this; - } - - Builder setCreateTime(Date createTime) { - this.createTime = createTime; - return this; - } - - Builder setFinishedTime(Date finishedTime) { - this.finishedTime = finishedTime; - return this; - } - - public Builder setDataDescription(DataDescription.Builder descriptionBuilder) { - dataDescription = Objects.requireNonNull(descriptionBuilder, DATA_DESCRIPTION.getPreferredName()).build(); - return this; - } - - public Builder setModelPlotConfig(ModelPlotConfig modelPlotConfig) { - this.modelPlotConfig = modelPlotConfig; - return this; - } - - public Builder setBackgroundPersistInterval(TimeValue backgroundPersistInterval) { - this.backgroundPersistInterval = backgroundPersistInterval; - return this; - } - - public Builder setRenormalizationWindowDays(Long renormalizationWindowDays) { - this.renormalizationWindowDays = renormalizationWindowDays; - return this; - } - - public Builder setModelSnapshotRetentionDays(Long modelSnapshotRetentionDays) { - this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; - return this; - } - - public Builder setDailyModelSnapshotRetentionAfterDays(Long dailyModelSnapshotRetentionAfterDays) { - this.dailyModelSnapshotRetentionAfterDays = dailyModelSnapshotRetentionAfterDays; - return this; - } - - public Builder setResultsRetentionDays(Long resultsRetentionDays) { - this.resultsRetentionDays = resultsRetentionDays; - return this; - } - - public Builder setModelSnapshotId(String modelSnapshotId) { - this.modelSnapshotId = modelSnapshotId; - return this; - } - - public Builder setResultsIndexName(String resultsIndexName) { - this.resultsIndexName = resultsIndexName; - return this; - } - - Builder setDeleting(Boolean deleting) { - this.deleting = deleting; - return this; - } - - Builder setAllowLazyOpen(Boolean allowLazyOpen) { - this.allowLazyOpen = allowLazyOpen; - return this; - } - - /** - * Builds a job. - * - * @return The job - */ - public Job build() { - Objects.requireNonNull(id, "[" + ID.getPreferredName() + "] must not be null"); - Objects.requireNonNull(jobType, "[" + JOB_TYPE.getPreferredName() + "] must not be null"); - return new Job( - id, - jobType, - groups, - description, - createTime, - finishedTime, - analysisConfig, - analysisLimits, - dataDescription, - modelPlotConfig, - renormalizationWindowDays, - backgroundPersistInterval, - modelSnapshotRetentionDays, - dailyModelSnapshotRetentionAfterDays, - resultsRetentionDays, - customSettings, - modelSnapshotId, - resultsIndexName, - deleting, - allowLazyOpen - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java deleted file mode 100644 index 1a248ef137d53..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobState.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import java.util.Locale; - -/** - * Jobs whether running or complete are in one of these states. - * When a job is created it is initialised in the state closed - * i.e. it is not running. - */ -public enum JobState { - - CLOSING, - CLOSED, - OPENED, - FAILED, - OPENING; - - public static JobState fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - public String value() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java deleted file mode 100644 index f0d70a2509a39..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/JobUpdate.java +++ /dev/null @@ -1,588 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -/** - * POJO for updating an existing Machine Learning {@link Job} - */ -public class JobUpdate implements ToXContentObject { - public static final ParseField DETECTORS = new ParseField("detectors"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "job_update", - true, - args -> new Builder((String) args[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), Job.ID); - PARSER.declareStringArray(Builder::setGroups, Job.GROUPS); - PARSER.declareStringOrNull(Builder::setDescription, Job.DESCRIPTION); - PARSER.declareObjectArray(Builder::setDetectorUpdates, DetectorUpdate.PARSER, DETECTORS); - PARSER.declareObject(Builder::setModelPlotConfig, ModelPlotConfig.PARSER, Job.MODEL_PLOT_CONFIG); - PARSER.declareObject(Builder::setAnalysisLimits, AnalysisLimits.PARSER, Job.ANALYSIS_LIMITS); - PARSER.declareString( - (builder, val) -> builder.setBackgroundPersistInterval( - TimeValue.parseTimeValue(val, Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName()) - ), - Job.BACKGROUND_PERSIST_INTERVAL - ); - PARSER.declareLong(Builder::setRenormalizationWindowDays, Job.RENORMALIZATION_WINDOW_DAYS); - PARSER.declareLong(Builder::setResultsRetentionDays, Job.RESULTS_RETENTION_DAYS); - PARSER.declareLong(Builder::setModelSnapshotRetentionDays, Job.MODEL_SNAPSHOT_RETENTION_DAYS); - PARSER.declareLong(Builder::setDailyModelSnapshotRetentionAfterDays, Job.DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS); - PARSER.declareStringArray(Builder::setCategorizationFilters, AnalysisConfig.CATEGORIZATION_FILTERS); - PARSER.declareObject( - Builder::setPerPartitionCategorizationConfig, - PerPartitionCategorizationConfig.PARSER, - AnalysisConfig.PER_PARTITION_CATEGORIZATION - ); - PARSER.declareField(Builder::setCustomSettings, (p, c) -> p.map(), Job.CUSTOM_SETTINGS, ObjectParser.ValueType.OBJECT); - PARSER.declareBoolean(Builder::setAllowLazyOpen, Job.ALLOW_LAZY_OPEN); - PARSER.declareString( - (builder, val) -> builder.setModelPruneWindow( - TimeValue.parseTimeValue(val, AnalysisConfig.MODEL_PRUNE_WINDOW.getPreferredName()) - ), - AnalysisConfig.MODEL_PRUNE_WINDOW - ); - } - - private final String jobId; - private final List groups; - private final String description; - private final List detectorUpdates; - private final ModelPlotConfig modelPlotConfig; - private final AnalysisLimits analysisLimits; - private final Long renormalizationWindowDays; - private final TimeValue backgroundPersistInterval; - private final Long modelSnapshotRetentionDays; - private final Long dailyModelSnapshotRetentionAfterDays; - private final Long resultsRetentionDays; - private final List categorizationFilters; - private final PerPartitionCategorizationConfig perPartitionCategorizationConfig; - private final Map customSettings; - private final Boolean allowLazyOpen; - private final TimeValue modelPruneWindow; - - private JobUpdate( - String jobId, - @Nullable List groups, - @Nullable String description, - @Nullable List detectorUpdates, - @Nullable ModelPlotConfig modelPlotConfig, - @Nullable AnalysisLimits analysisLimits, - @Nullable TimeValue backgroundPersistInterval, - @Nullable Long renormalizationWindowDays, - @Nullable Long resultsRetentionDays, - @Nullable Long modelSnapshotRetentionDays, - @Nullable Long dailyModelSnapshotRetentionAfterDays, - @Nullable List categorizationFilters, - @Nullable PerPartitionCategorizationConfig perPartitionCategorizationConfig, - @Nullable Map customSettings, - @Nullable Boolean allowLazyOpen, - @Nullable TimeValue modelPruneWindow - ) { - this.jobId = jobId; - this.groups = groups; - this.description = description; - this.detectorUpdates = detectorUpdates; - this.modelPlotConfig = modelPlotConfig; - this.analysisLimits = analysisLimits; - this.renormalizationWindowDays = renormalizationWindowDays; - this.backgroundPersistInterval = backgroundPersistInterval; - this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; - this.dailyModelSnapshotRetentionAfterDays = dailyModelSnapshotRetentionAfterDays; - this.resultsRetentionDays = resultsRetentionDays; - this.categorizationFilters = categorizationFilters; - this.perPartitionCategorizationConfig = perPartitionCategorizationConfig; - this.customSettings = customSettings; - this.allowLazyOpen = allowLazyOpen; - this.modelPruneWindow = modelPruneWindow; - } - - public String getJobId() { - return jobId; - } - - public List getGroups() { - return groups; - } - - public String getDescription() { - return description; - } - - public List getDetectorUpdates() { - return detectorUpdates; - } - - public ModelPlotConfig getModelPlotConfig() { - return modelPlotConfig; - } - - public AnalysisLimits getAnalysisLimits() { - return analysisLimits; - } - - public Long getRenormalizationWindowDays() { - return renormalizationWindowDays; - } - - public TimeValue getBackgroundPersistInterval() { - return backgroundPersistInterval; - } - - public Long getModelSnapshotRetentionDays() { - return modelSnapshotRetentionDays; - } - - public Long getResultsRetentionDays() { - return resultsRetentionDays; - } - - public List getCategorizationFilters() { - return categorizationFilters; - } - - public PerPartitionCategorizationConfig getPerPartitionCategorizationConfig() { - return perPartitionCategorizationConfig; - } - - public Map getCustomSettings() { - return customSettings; - } - - public Boolean getAllowLazyOpen() { - return allowLazyOpen; - } - - public TimeValue getModelPruneWindow() { - return modelPruneWindow; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (groups != null) { - builder.field(Job.GROUPS.getPreferredName(), groups); - } - if (description != null) { - builder.field(Job.DESCRIPTION.getPreferredName(), description); - } - if (detectorUpdates != null) { - builder.field(DETECTORS.getPreferredName(), detectorUpdates); - } - if (modelPlotConfig != null) { - builder.field(Job.MODEL_PLOT_CONFIG.getPreferredName(), modelPlotConfig); - } - if (analysisLimits != null) { - builder.field(Job.ANALYSIS_LIMITS.getPreferredName(), analysisLimits); - } - if (renormalizationWindowDays != null) { - builder.field(Job.RENORMALIZATION_WINDOW_DAYS.getPreferredName(), renormalizationWindowDays); - } - if (backgroundPersistInterval != null) { - builder.field(Job.BACKGROUND_PERSIST_INTERVAL.getPreferredName(), backgroundPersistInterval); - } - if (modelSnapshotRetentionDays != null) { - builder.field(Job.MODEL_SNAPSHOT_RETENTION_DAYS.getPreferredName(), modelSnapshotRetentionDays); - } - if (dailyModelSnapshotRetentionAfterDays != null) { - builder.field(Job.DAILY_MODEL_SNAPSHOT_RETENTION_AFTER_DAYS.getPreferredName(), dailyModelSnapshotRetentionAfterDays); - } - if (resultsRetentionDays != null) { - builder.field(Job.RESULTS_RETENTION_DAYS.getPreferredName(), resultsRetentionDays); - } - if (categorizationFilters != null) { - builder.field(AnalysisConfig.CATEGORIZATION_FILTERS.getPreferredName(), categorizationFilters); - } - if (perPartitionCategorizationConfig != null) { - builder.field(AnalysisConfig.PER_PARTITION_CATEGORIZATION.getPreferredName(), perPartitionCategorizationConfig); - } - if (customSettings != null) { - builder.field(Job.CUSTOM_SETTINGS.getPreferredName(), customSettings); - } - if (allowLazyOpen != null) { - builder.field(Job.ALLOW_LAZY_OPEN.getPreferredName(), allowLazyOpen); - } - if (modelPruneWindow != null) { - builder.field(AnalysisConfig.MODEL_PRUNE_WINDOW.getPreferredName(), modelPruneWindow); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - JobUpdate that = (JobUpdate) other; - - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.groups, that.groups) - && Objects.equals(this.description, that.description) - && Objects.equals(this.detectorUpdates, that.detectorUpdates) - && Objects.equals(this.modelPlotConfig, that.modelPlotConfig) - && Objects.equals(this.analysisLimits, that.analysisLimits) - && Objects.equals(this.renormalizationWindowDays, that.renormalizationWindowDays) - && Objects.equals(this.backgroundPersistInterval, that.backgroundPersistInterval) - && Objects.equals(this.modelSnapshotRetentionDays, that.modelSnapshotRetentionDays) - && Objects.equals(this.dailyModelSnapshotRetentionAfterDays, that.dailyModelSnapshotRetentionAfterDays) - && Objects.equals(this.resultsRetentionDays, that.resultsRetentionDays) - && Objects.equals(this.categorizationFilters, that.categorizationFilters) - && Objects.equals(this.perPartitionCategorizationConfig, that.perPartitionCategorizationConfig) - && Objects.equals(this.customSettings, that.customSettings) - && Objects.equals(this.allowLazyOpen, that.allowLazyOpen) - && Objects.equals(this.modelPruneWindow, that.modelPruneWindow); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - groups, - description, - detectorUpdates, - modelPlotConfig, - analysisLimits, - renormalizationWindowDays, - backgroundPersistInterval, - modelSnapshotRetentionDays, - dailyModelSnapshotRetentionAfterDays, - resultsRetentionDays, - categorizationFilters, - perPartitionCategorizationConfig, - customSettings, - allowLazyOpen, - modelPruneWindow - ); - } - - public static class DetectorUpdate implements ToXContentObject { - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "detector_update", - true, - a -> new DetectorUpdate((int) a[0], (String) a[1], (List) a[2]) - ); - - static { - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), Detector.DETECTOR_INDEX); - PARSER.declareStringOrNull(ConstructingObjectParser.optionalConstructorArg(), Job.DESCRIPTION); - PARSER.declareObjectArray( - ConstructingObjectParser.optionalConstructorArg(), - (parser, parseFieldMatcher) -> DetectionRule.PARSER.apply(parser, parseFieldMatcher).build(), - Detector.CUSTOM_RULES_FIELD - ); - } - - private final int detectorIndex; - private final String description; - private final List rules; - - /** - * A detector update to apply to the Machine Learning Job - * - * @param detectorIndex The identifier of the detector to update. - * @param description The new description for the detector. - * @param rules The new list of rules for the detector. - */ - public DetectorUpdate(int detectorIndex, String description, List rules) { - this.detectorIndex = detectorIndex; - this.description = description; - this.rules = rules; - } - - public int getDetectorIndex() { - return detectorIndex; - } - - public String getDescription() { - return description; - } - - public List getRules() { - return rules; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - builder.field(Detector.DETECTOR_INDEX.getPreferredName(), detectorIndex); - if (description != null) { - builder.field(Job.DESCRIPTION.getPreferredName(), description); - } - if (rules != null) { - builder.field(Detector.CUSTOM_RULES_FIELD.getPreferredName(), rules); - } - builder.endObject(); - - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(detectorIndex, description, rules); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DetectorUpdate that = (DetectorUpdate) other; - return this.detectorIndex == that.detectorIndex - && Objects.equals(this.description, that.description) - && Objects.equals(this.rules, that.rules); - } - } - - public static class Builder { - - private final String jobId; - private List groups; - private String description; - private List detectorUpdates; - private ModelPlotConfig modelPlotConfig; - private AnalysisLimits analysisLimits; - private Long renormalizationWindowDays; - private TimeValue backgroundPersistInterval; - private Long modelSnapshotRetentionDays; - private Long dailyModelSnapshotRetentionAfterDays; - private Long resultsRetentionDays; - private List categorizationFilters; - private PerPartitionCategorizationConfig perPartitionCategorizationConfig; - private Map customSettings; - private Boolean allowLazyOpen; - private TimeValue modelPruneWindow; - - /** - * New {@link JobUpdate.Builder} object for the existing job - * - * @param jobId non-null `jobId` for referencing an exising {@link Job} - */ - public Builder(String jobId) { - this.jobId = jobId; - } - - /** - * Set the job groups - * - * Updates the {@link Job#groups} setting - * - * @param groups A list of group names - */ - public Builder setGroups(List groups) { - this.groups = groups; - return this; - } - - /** - * Set the job description - * - * Updates the {@link Job#description} setting - * - * @param description the desired Machine Learning job description - */ - public Builder setDescription(String description) { - this.description = description; - return this; - } - - /** - * The detector updates to apply to the job - * - * Updates the {@link AnalysisConfig#detectors} setting - * - * @param detectorUpdates list of {@link JobUpdate.DetectorUpdate} objects - */ - public Builder setDetectorUpdates(List detectorUpdates) { - this.detectorUpdates = detectorUpdates; - return this; - } - - /** - * Enables/disables the model plot config setting through {@link ModelPlotConfig#enabled} - * - * Updates the {@link Job#modelPlotConfig} setting - * - * @param modelPlotConfig {@link ModelPlotConfig} object with updated fields - */ - public Builder setModelPlotConfig(ModelPlotConfig modelPlotConfig) { - this.modelPlotConfig = modelPlotConfig; - return this; - } - - /** - * Sets new {@link AnalysisLimits} for the {@link Job} - * - * Updates the {@link Job#analysisLimits} setting - * - * @param analysisLimits Updates to {@link AnalysisLimits} - */ - public Builder setAnalysisLimits(AnalysisLimits analysisLimits) { - this.analysisLimits = analysisLimits; - return this; - } - - /** - * Advanced configuration option. The period over which adjustments to the score are applied, as new data is seen - * - * Updates the {@link Job#renormalizationWindowDays} setting - * - * @param renormalizationWindowDays number of renormalization window days - */ - public Builder setRenormalizationWindowDays(Long renormalizationWindowDays) { - this.renormalizationWindowDays = renormalizationWindowDays; - return this; - } - - /** - * Advanced configuration option. The time between each periodic persistence of the model - * - * Updates the {@link Job#backgroundPersistInterval} setting - * - * @param backgroundPersistInterval the time between background persistence - */ - public Builder setBackgroundPersistInterval(TimeValue backgroundPersistInterval) { - this.backgroundPersistInterval = backgroundPersistInterval; - return this; - } - - /** - * The time in days that model snapshots are retained for the job. - * - * Updates the {@link Job#modelSnapshotRetentionDays} setting - * - * @param modelSnapshotRetentionDays number of days to keep a model snapshot - */ - public Builder setModelSnapshotRetentionDays(Long modelSnapshotRetentionDays) { - this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; - return this; - } - - /** - * The time in days after which only one model snapshot per day is retained for the job. - * - * Updates the {@link Job#dailyModelSnapshotRetentionAfterDays} setting - * - * @param dailyModelSnapshotRetentionAfterDays number of days to keep a model snapshot - */ - public Builder setDailyModelSnapshotRetentionAfterDays(Long dailyModelSnapshotRetentionAfterDays) { - this.dailyModelSnapshotRetentionAfterDays = dailyModelSnapshotRetentionAfterDays; - return this; - } - - /** - * Advanced configuration option. The number of days for which job results are retained - * - * Updates the {@link Job#resultsRetentionDays} setting - * - * @param resultsRetentionDays number of days to keep results. - */ - public Builder setResultsRetentionDays(Long resultsRetentionDays) { - this.resultsRetentionDays = resultsRetentionDays; - return this; - } - - /** - * Sets the categorization filters on the {@link Job} - * - * Updates the {@link AnalysisConfig#categorizationFilters} setting. - * Requires {@link AnalysisConfig#categorizationFieldName} to have been set on the existing Job. - * - * @param categorizationFilters list of categorization filters for the Job's {@link AnalysisConfig} - */ - public Builder setCategorizationFilters(List categorizationFilters) { - this.categorizationFilters = categorizationFilters; - return this; - } - - /** - * Sets the per-partition categorization options on the {@link Job} - * - * Updates the {@link AnalysisConfig#perPartitionCategorizationConfig} setting. - * Requires {@link AnalysisConfig#perPartitionCategorizationConfig} to have been set on the existing Job. - * - * @param perPartitionCategorizationConfig per-partition categorization options for the Job's {@link AnalysisConfig} - */ - public Builder setPerPartitionCategorizationConfig(PerPartitionCategorizationConfig perPartitionCategorizationConfig) { - this.perPartitionCategorizationConfig = perPartitionCategorizationConfig; - return this; - } - - /** - * Contains custom meta data about the job. - * - * Updates the {@link Job#customSettings} setting - * - * @param customSettings custom settings map for the job - */ - public Builder setCustomSettings(Map customSettings) { - this.customSettings = customSettings; - return this; - } - - public Builder setAllowLazyOpen(boolean allowLazyOpen) { - this.allowLazyOpen = allowLazyOpen; - return this; - } - - public Builder setModelPruneWindow(TimeValue modelPruneWindow) { - this.modelPruneWindow = modelPruneWindow; - return this; - } - - public JobUpdate build() { - return new JobUpdate( - jobId, - groups, - description, - detectorUpdates, - modelPlotConfig, - analysisLimits, - backgroundPersistInterval, - renormalizationWindowDays, - resultsRetentionDays, - modelSnapshotRetentionDays, - dailyModelSnapshotRetentionAfterDays, - categorizationFilters, - perPartitionCategorizationConfig, - customSettings, - allowLazyOpen, - modelPruneWindow - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/MlFilter.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/MlFilter.java deleted file mode 100644 index 7079ff69ea106..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/MlFilter.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.SortedSet; -import java.util.TreeSet; - -/** - * An MlFilter Object - * - * A filter contains a list of strings. - * It can be used by one or more jobs. - * - * Specifically, filters are referenced in the custom_rules property of detector configuration objects. - */ -public class MlFilter implements ToXContentObject { - - public static final ParseField TYPE = new ParseField("type"); - public static final ParseField ID = new ParseField("filter_id"); - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField ITEMS = new ParseField("items"); - - // For QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("filters"); - - public static final ObjectParser PARSER = new ObjectParser<>(TYPE.getPreferredName(), true, Builder::new); - - static { - PARSER.declareString((builder, s) -> {}, TYPE); - PARSER.declareString(Builder::setId, ID); - PARSER.declareStringOrNull(Builder::setDescription, DESCRIPTION); - PARSER.declareStringArray(Builder::setItems, ITEMS); - } - - private final String id; - private final String description; - private final SortedSet items; - - private MlFilter(String id, String description, SortedSet items) { - this.id = Objects.requireNonNull(id); - this.description = description; - this.items = Collections.unmodifiableSortedSet(items); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ID.getPreferredName(), id); - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - builder.field(ITEMS.getPreferredName(), items); - // Don't include TYPE as it's fixed - builder.endObject(); - return builder; - } - - public String getId() { - return id; - } - - public String getDescription() { - return description; - } - - public SortedSet getItems() { - return items; - } - - @Override - public boolean equals(Object obj) { - if (obj == this) { - return true; - } - - if (obj instanceof MlFilter == false) { - return false; - } - - MlFilter other = (MlFilter) obj; - return id.equals(other.id) && Objects.equals(description, other.description) && items.equals(other.items); - } - - @Override - public int hashCode() { - return Objects.hash(id, description, items); - } - - /** - * Creates a new Builder object for creating an MlFilter object - * @param filterId The ID of the filter to create - */ - public static Builder builder(String filterId) { - return new Builder().setId(filterId); - } - - public static class Builder { - - private String id; - private String description; - private SortedSet items = new TreeSet<>(); - - private Builder() {} - - /** - * Set the ID of the filter - * @param id The id desired - */ - public Builder setId(String id) { - this.id = Objects.requireNonNull(id); - return this; - } - - @Nullable - public String getId() { - return id; - } - - /** - * Set the description of the filter - * @param description The description desired - */ - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setItems(SortedSet items) { - this.items = Objects.requireNonNull(items); - return this; - } - - public Builder setItems(List items) { - this.items = new TreeSet<>(items); - return this; - } - - /** - * The items of the filter. - * - * A wildcard * can be used at the beginning or the end of an item. Up to 10000 items are allowed in each filter. - * - * @param items String list of items to be applied in the filter - */ - public Builder setItems(String... items) { - setItems(Arrays.asList(items)); - return this; - } - - public MlFilter build() { - return new MlFilter(id, description, items); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/ModelPlotConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/ModelPlotConfig.java deleted file mode 100644 index 4581409d64989..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/ModelPlotConfig.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class ModelPlotConfig implements ToXContentObject { - - private static final ParseField TYPE_FIELD = new ParseField("model_plot_config"); - private static final ParseField ENABLED_FIELD = new ParseField("enabled"); - private static final ParseField TERMS_FIELD = new ParseField("terms"); - private static final ParseField ANNOTATIONS_ENABLED_FIELD = new ParseField("annotations_enabled"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - TYPE_FIELD.getPreferredName(), - true, - a -> new ModelPlotConfig((boolean) a[0], (String) a[1], (Boolean) a[2]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), TERMS_FIELD); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), ANNOTATIONS_ENABLED_FIELD); - } - - private final boolean enabled; - private final String terms; - private final Boolean annotationsEnabled; - - public ModelPlotConfig(boolean enabled, String terms, Boolean annotationsEnabled) { - this.enabled = enabled; - this.terms = terms; - this.annotationsEnabled = annotationsEnabled; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(ENABLED_FIELD.getPreferredName(), enabled); - if (terms != null) { - builder.field(TERMS_FIELD.getPreferredName(), terms); - } - if (annotationsEnabled != null) { - builder.field(ANNOTATIONS_ENABLED_FIELD.getPreferredName(), annotationsEnabled); - } - builder.endObject(); - return builder; - } - - public boolean isEnabled() { - return enabled; - } - - public String getTerms() { - return this.terms; - } - - public Boolean annotationsEnabled() { - return annotationsEnabled; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof ModelPlotConfig == false) { - return false; - } - - ModelPlotConfig that = (ModelPlotConfig) other; - return this.enabled == that.enabled - && Objects.equals(this.terms, that.terms) - && Objects.equals(this.annotationsEnabled, that.annotationsEnabled); - } - - @Override - public int hashCode() { - return Objects.hash(enabled, terms, annotationsEnabled); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Operator.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Operator.java deleted file mode 100644 index 3d7ac2af70a66..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/Operator.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ParseField; - -import java.util.Locale; - -/** - * Enum representing logical comparisons on doubles - */ -public enum Operator { - GT { - @Override - public boolean test(double lhs, double rhs) { - return Double.compare(lhs, rhs) > 0; - } - }, - GTE { - @Override - public boolean test(double lhs, double rhs) { - return Double.compare(lhs, rhs) >= 0; - } - }, - LT { - @Override - public boolean test(double lhs, double rhs) { - return Double.compare(lhs, rhs) < 0; - } - }, - LTE { - @Override - public boolean test(double lhs, double rhs) { - return Double.compare(lhs, rhs) <= 0; - } - }; - // EQ was considered but given the oddity of such a - // condition and the fact that it would be a numerically - // unstable condition, it was rejected. - - public static final ParseField OPERATOR_FIELD = new ParseField("operator"); - - public boolean test(double lhs, double rhs) { - return false; - } - - public static Operator fromString(String name) { - return valueOf(name.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/PerPartitionCategorizationConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/PerPartitionCategorizationConfig.java deleted file mode 100644 index 86cd40f45c601..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/PerPartitionCategorizationConfig.java +++ /dev/null @@ -1,86 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -public class PerPartitionCategorizationConfig implements ToXContentObject { - - public static final ParseField TYPE_FIELD = new ParseField("per_partition_categorization"); - public static final ParseField ENABLED_FIELD = new ParseField("enabled"); - public static final ParseField STOP_ON_WARN = new ParseField("stop_on_warn"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - TYPE_FIELD.getPreferredName(), - true, - a -> new PerPartitionCategorizationConfig((boolean) a[0], (Boolean) a[1]) - ); - - static { - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), ENABLED_FIELD); - PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), STOP_ON_WARN); - } - - private final boolean enabled; - private final boolean stopOnWarn; - - public PerPartitionCategorizationConfig() { - this(false, null); - } - - public PerPartitionCategorizationConfig(boolean enabled, Boolean stopOnWarn) { - this.enabled = enabled; - this.stopOnWarn = (stopOnWarn == null) ? false : stopOnWarn; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(ENABLED_FIELD.getPreferredName(), enabled); - if (enabled) { - builder.field(STOP_ON_WARN.getPreferredName(), stopOnWarn); - } - builder.endObject(); - return builder; - } - - public boolean isEnabled() { - return enabled; - } - - public boolean isStopOnWarn() { - return stopOnWarn; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other instanceof PerPartitionCategorizationConfig == false) { - return false; - } - - PerPartitionCategorizationConfig that = (PerPartitionCategorizationConfig) other; - return this.enabled == that.enabled && this.stopOnWarn == that.stopOnWarn; - } - - @Override - public int hashCode() { - return Objects.hash(enabled, stopOnWarn); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleAction.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleAction.java deleted file mode 100644 index 21d9c99134660..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleAction.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import java.util.Locale; - -public enum RuleAction { - SKIP_RESULT, - SKIP_MODEL_UPDATE; - - /** - * Case-insensitive from string method. - * - * @param value String representation - * @return The rule action - */ - public static RuleAction fromString(String value) { - return RuleAction.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleCondition.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleCondition.java deleted file mode 100644 index d7f9054c23485..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleCondition.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Locale; -import java.util.Objects; - -public class RuleCondition implements ToXContentObject { - - public static final ParseField RULE_CONDITION_FIELD = new ParseField("rule_condition"); - - public static final ParseField APPLIES_TO_FIELD = new ParseField("applies_to"); - public static final ParseField VALUE_FIELD = new ParseField("value"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RULE_CONDITION_FIELD.getPreferredName(), - true, - a -> new RuleCondition((AppliesTo) a[0], (Operator) a[1], (double) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), AppliesTo::fromString, APPLIES_TO_FIELD); - PARSER.declareString(ConstructingObjectParser.constructorArg(), Operator::fromString, Operator.OPERATOR_FIELD); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), VALUE_FIELD); - } - - private final AppliesTo appliesTo; - private final Operator operator; - private final double value; - - public RuleCondition(AppliesTo appliesTo, Operator operator, double value) { - this.appliesTo = appliesTo; - this.operator = operator; - this.value = value; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(APPLIES_TO_FIELD.getPreferredName(), appliesTo); - builder.field(Operator.OPERATOR_FIELD.getPreferredName(), operator); - builder.field(VALUE_FIELD.getPreferredName(), value); - builder.endObject(); - return builder; - } - - public AppliesTo getAppliesTo() { - return appliesTo; - } - - public Operator getOperator() { - return operator; - } - - public double getValue() { - return value; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj instanceof RuleCondition == false) { - return false; - } - - RuleCondition other = (RuleCondition) obj; - return appliesTo == other.appliesTo && operator == other.operator && value == other.value; - } - - @Override - public int hashCode() { - return Objects.hash(appliesTo, operator, value); - } - - public static RuleCondition createTime(Operator operator, long epochSeconds) { - return new RuleCondition(AppliesTo.TIME, operator, epochSeconds); - } - - public enum AppliesTo { - ACTUAL, - TYPICAL, - DIFF_FROM_TYPICAL, - TIME; - - public static AppliesTo fromString(String value) { - return valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java deleted file mode 100644 index 352d240943a9c..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/config/RuleScope.java +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.config; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.xcontent.ContextParser; -import org.elasticsearch.xcontent.DeprecationHandler; -import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xcontent.XContentParser; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; -import java.util.Set; -import java.util.stream.Collectors; - -public class RuleScope implements ToXContentObject { - - public static ContextParser parser() { - return (p, c) -> { - Map unparsedScope = p.map(); - if (unparsedScope.isEmpty()) { - return new RuleScope(); - } - Map scope = new HashMap<>(); - for (Map.Entry entry : unparsedScope.entrySet()) { - try (XContentBuilder builder = XContentFactory.jsonBuilder()) { - @SuppressWarnings("unchecked") - Map value = (Map) entry.getValue(); - builder.map(value); - try ( - XContentParser scopeParser = XContentFactory.xContent(builder.contentType()) - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.IGNORE_DEPRECATIONS, Strings.toString(builder)) - ) { - scope.put(entry.getKey(), FilterRef.PARSER.parse(scopeParser, null)); - } - } - } - return new RuleScope(scope); - }; - } - - private final Map scope; - - public RuleScope() { - scope = Collections.emptyMap(); - } - - public RuleScope(Map scope) { - this.scope = Collections.unmodifiableMap(scope); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.map(scope); - } - - public boolean isEmpty() { - return scope.isEmpty(); - } - - public Set getReferencedFilters() { - return scope.values().stream().map(FilterRef::getFilterId).collect(Collectors.toSet()); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj instanceof RuleScope == false) { - return false; - } - - RuleScope other = (RuleScope) obj; - return Objects.equals(scope, other.scope); - } - - @Override - public int hashCode() { - return Objects.hash(scope); - } - - public static Builder builder() { - return new Builder(); - } - - public static class Builder { - - private Map scope = new HashMap<>(); - - public Builder() {} - - public Builder(RuleScope otherScope) { - scope = new HashMap<>(otherScope.scope); - } - - public Builder exclude(String field, String filterId) { - scope.put(field, new FilterRef(filterId, FilterRef.FilterType.EXCLUDE)); - return this; - } - - public Builder include(String field, String filterId) { - scope.put(field, new FilterRef(filterId, FilterRef.FilterType.INCLUDE)); - return this; - } - - public RuleScope build() { - return new RuleScope(scope); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/DataCounts.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/DataCounts.java deleted file mode 100644 index 20cc5db284302..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/DataCounts.java +++ /dev/null @@ -1,480 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.time.Instant; -import java.util.Date; -import java.util.Objects; - -/** - * Job processed record counts. - *

- * The getInput... methods return the actual number of - * fields/records sent the API including invalid records. - * The getProcessed... methods are the number sent to the - * Engine. - *

- * The inputRecordCount field is calculated so it - * should not be set in deserialization but it should be serialised - * so the field is visible. - */ -public class DataCounts implements ToXContentObject { - - public static final ParseField PROCESSED_RECORD_COUNT = new ParseField("processed_record_count"); - public static final ParseField PROCESSED_FIELD_COUNT = new ParseField("processed_field_count"); - public static final ParseField INPUT_BYTES = new ParseField("input_bytes"); - public static final ParseField INPUT_RECORD_COUNT = new ParseField("input_record_count"); - public static final ParseField INPUT_FIELD_COUNT = new ParseField("input_field_count"); - public static final ParseField INVALID_DATE_COUNT = new ParseField("invalid_date_count"); - public static final ParseField MISSING_FIELD_COUNT = new ParseField("missing_field_count"); - public static final ParseField OUT_OF_ORDER_TIME_COUNT = new ParseField("out_of_order_timestamp_count"); - public static final ParseField EMPTY_BUCKET_COUNT = new ParseField("empty_bucket_count"); - public static final ParseField SPARSE_BUCKET_COUNT = new ParseField("sparse_bucket_count"); - public static final ParseField BUCKET_COUNT = new ParseField("bucket_count"); - public static final ParseField EARLIEST_RECORD_TIME = new ParseField("earliest_record_timestamp"); - public static final ParseField LATEST_RECORD_TIME = new ParseField("latest_record_timestamp"); - public static final ParseField LAST_DATA_TIME = new ParseField("last_data_time"); - public static final ParseField LATEST_EMPTY_BUCKET_TIME = new ParseField("latest_empty_bucket_timestamp"); - public static final ParseField LATEST_SPARSE_BUCKET_TIME = new ParseField("latest_sparse_bucket_timestamp"); - public static final ParseField LOG_TIME = new ParseField("log_time"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "data_counts", - true, - a -> new DataCounts( - (String) a[0], - (long) a[1], - (long) a[2], - (long) a[3], - (long) a[4], - (long) a[5], - (long) a[6], - (long) a[7], - (long) a[8], - (long) a[9], - (long) a[10], - (Date) a[11], - (Date) a[12], - (Date) a[13], - (Date) a[14], - (Date) a[15], - (Instant) a[16] - ) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), PROCESSED_RECORD_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), PROCESSED_FIELD_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), INPUT_BYTES); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), INPUT_FIELD_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), INVALID_DATE_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), MISSING_FIELD_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), OUT_OF_ORDER_TIME_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), EMPTY_BUCKET_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), SPARSE_BUCKET_COUNT); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_COUNT); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, EARLIEST_RECORD_TIME.getPreferredName()), - EARLIEST_RECORD_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, LATEST_RECORD_TIME.getPreferredName()), - LATEST_RECORD_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, LAST_DATA_TIME.getPreferredName()), - LAST_DATA_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, LATEST_EMPTY_BUCKET_TIME.getPreferredName()), - LATEST_EMPTY_BUCKET_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p) -> TimeUtil.parseTimeField(p, LATEST_SPARSE_BUCKET_TIME.getPreferredName()), - LATEST_SPARSE_BUCKET_TIME, - ValueType.VALUE - ); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - p -> TimeUtil.parseTimeFieldToInstant(p, LOG_TIME.getPreferredName()), - LOG_TIME, - ValueType.VALUE - ); - } - - private final String jobId; - private long processedRecordCount; - private long processedFieldCount; - private long inputBytes; - private long inputFieldCount; - private long invalidDateCount; - private long missingFieldCount; - private long outOfOrderTimeStampCount; - private long emptyBucketCount; - private long sparseBucketCount; - private long bucketCount; - private Date earliestRecordTimeStamp; - private Date latestRecordTimeStamp; - private Date lastDataTimeStamp; - private Date latestEmptyBucketTimeStamp; - private Date latestSparseBucketTimeStamp; - private Instant logTime; - - public DataCounts( - String jobId, - long processedRecordCount, - long processedFieldCount, - long inputBytes, - long inputFieldCount, - long invalidDateCount, - long missingFieldCount, - long outOfOrderTimeStampCount, - long emptyBucketCount, - long sparseBucketCount, - long bucketCount, - Date earliestRecordTimeStamp, - Date latestRecordTimeStamp, - Date lastDataTimeStamp, - Date latestEmptyBucketTimeStamp, - Date latestSparseBucketTimeStamp, - Instant logTime - ) { - this.jobId = jobId; - this.processedRecordCount = processedRecordCount; - this.processedFieldCount = processedFieldCount; - this.inputBytes = inputBytes; - this.inputFieldCount = inputFieldCount; - this.invalidDateCount = invalidDateCount; - this.missingFieldCount = missingFieldCount; - this.outOfOrderTimeStampCount = outOfOrderTimeStampCount; - this.emptyBucketCount = emptyBucketCount; - this.sparseBucketCount = sparseBucketCount; - this.bucketCount = bucketCount; - this.latestRecordTimeStamp = latestRecordTimeStamp; - this.earliestRecordTimeStamp = earliestRecordTimeStamp; - this.lastDataTimeStamp = lastDataTimeStamp; - this.latestEmptyBucketTimeStamp = latestEmptyBucketTimeStamp; - this.latestSparseBucketTimeStamp = latestSparseBucketTimeStamp; - this.logTime = logTime == null ? null : Instant.ofEpochMilli(logTime.toEpochMilli()); - } - - DataCounts(String jobId) { - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - /** - * Number of records processed by this job. - * This value is the number of records sent passed on to - * the engine i.e. {@linkplain #getInputRecordCount()} minus - * records with bad dates or out of order - * - * @return Number of records processed by this job {@code long} - */ - public long getProcessedRecordCount() { - return processedRecordCount; - } - - /** - * Number of data points (processed record count * the number - * of analysed fields) processed by this job. This count does - * not include the time field. - * - * @return Number of data points processed by this job {@code long} - */ - public long getProcessedFieldCount() { - return processedFieldCount; - } - - /** - * Total number of input records read. - * This = processed record count + date parse error records count - * + out of order record count. - *

- * Records with missing fields are counted as they are still written. - * - * @return Total number of input records read {@code long} - */ - public long getInputRecordCount() { - return processedRecordCount + outOfOrderTimeStampCount + invalidDateCount; - } - - /** - * The total number of bytes sent to this job. - * This value includes the bytes from any records - * that have been discarded for any reason - * e.g. because the date cannot be read - * - * @return Volume in bytes - */ - public long getInputBytes() { - return inputBytes; - } - - /** - * The total number of fields sent to the job - * including fields that aren't analysed. - * - * @return The total number of fields sent to the job - */ - public long getInputFieldCount() { - return inputFieldCount; - } - - /** - * The number of records with an invalid date field that could - * not be parsed or converted to epoch time. - * - * @return The number of records with an invalid date field - */ - public long getInvalidDateCount() { - return invalidDateCount; - } - - /** - * The number of missing fields that had been - * configured for analysis. - * - * @return The number of missing fields - */ - public long getMissingFieldCount() { - return missingFieldCount; - } - - /** - * The number of records with a timestamp that is - * before the time of the latest record. Records should - * be in ascending chronological order - * - * @return The number of records with a timestamp that is before the time of the latest record - */ - public long getOutOfOrderTimeStampCount() { - return outOfOrderTimeStampCount; - } - - /** - * The number of buckets with no records in it. Used to measure general data fitness and/or - * configuration problems (bucket span). - * - * @return Number of empty buckets processed by this job {@code long} - */ - public long getEmptyBucketCount() { - return emptyBucketCount; - } - - /** - * The number of buckets with few records compared to the overall counts. - * Used to measure general data fitness and/or configuration problems (bucket span). - * - * @return Number of sparse buckets processed by this job {@code long} - */ - public long getSparseBucketCount() { - return sparseBucketCount; - } - - /** - * The number of buckets overall. - * - * @return Number of buckets processed by this job {@code long} - */ - public long getBucketCount() { - return bucketCount; - } - - /** - * The time of the first record seen. - * - * @return The first record time - */ - public Date getEarliestRecordTimeStamp() { - return earliestRecordTimeStamp; - } - - /** - * The time of the latest record seen. - * - * @return Latest record time - */ - public Date getLatestRecordTimeStamp() { - return latestRecordTimeStamp; - } - - /** - * The wall clock time the latest record was seen. - * - * @return Wall clock time of the lastest record - */ - public Date getLastDataTimeStamp() { - return lastDataTimeStamp; - } - - /** - * The time of the latest empty bucket seen. - * - * @return Latest empty bucket time - */ - public Date getLatestEmptyBucketTimeStamp() { - return latestEmptyBucketTimeStamp; - } - - /** - * The time of the latest sparse bucket seen. - * - * @return Latest sparse bucket time - */ - public Date getLatestSparseBucketTimeStamp() { - return latestSparseBucketTimeStamp; - } - - /** - * The wall clock time at the point when this instance was created. - * - * @return The wall clock time - */ - public Instant getLogTime() { - return logTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(PROCESSED_RECORD_COUNT.getPreferredName(), processedRecordCount); - builder.field(PROCESSED_FIELD_COUNT.getPreferredName(), processedFieldCount); - builder.field(INPUT_BYTES.getPreferredName(), inputBytes); - builder.field(INPUT_FIELD_COUNT.getPreferredName(), inputFieldCount); - builder.field(INVALID_DATE_COUNT.getPreferredName(), invalidDateCount); - builder.field(MISSING_FIELD_COUNT.getPreferredName(), missingFieldCount); - builder.field(OUT_OF_ORDER_TIME_COUNT.getPreferredName(), outOfOrderTimeStampCount); - builder.field(EMPTY_BUCKET_COUNT.getPreferredName(), emptyBucketCount); - builder.field(SPARSE_BUCKET_COUNT.getPreferredName(), sparseBucketCount); - builder.field(BUCKET_COUNT.getPreferredName(), bucketCount); - if (earliestRecordTimeStamp != null) { - builder.timeField( - EARLIEST_RECORD_TIME.getPreferredName(), - EARLIEST_RECORD_TIME.getPreferredName() + "_string", - earliestRecordTimeStamp.getTime() - ); - } - if (latestRecordTimeStamp != null) { - builder.timeField( - LATEST_RECORD_TIME.getPreferredName(), - LATEST_RECORD_TIME.getPreferredName() + "_string", - latestRecordTimeStamp.getTime() - ); - } - if (lastDataTimeStamp != null) { - builder.timeField( - LAST_DATA_TIME.getPreferredName(), - LAST_DATA_TIME.getPreferredName() + "_string", - lastDataTimeStamp.getTime() - ); - } - if (latestEmptyBucketTimeStamp != null) { - builder.timeField( - LATEST_EMPTY_BUCKET_TIME.getPreferredName(), - LATEST_EMPTY_BUCKET_TIME.getPreferredName() + "_string", - latestEmptyBucketTimeStamp.getTime() - ); - } - if (latestSparseBucketTimeStamp != null) { - builder.timeField( - LATEST_SPARSE_BUCKET_TIME.getPreferredName(), - LATEST_SPARSE_BUCKET_TIME.getPreferredName() + "_string", - latestSparseBucketTimeStamp.getTime() - ); - } - builder.field(INPUT_RECORD_COUNT.getPreferredName(), getInputRecordCount()); - if (logTime != null) { - builder.timeField(LOG_TIME.getPreferredName(), LOG_TIME.getPreferredName() + "_string", logTime.toEpochMilli()); - } - - builder.endObject(); - return builder; - } - - /** - * Equality test - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - DataCounts that = (DataCounts) other; - - return Objects.equals(this.jobId, that.jobId) - && this.processedRecordCount == that.processedRecordCount - && this.processedFieldCount == that.processedFieldCount - && this.inputBytes == that.inputBytes - && this.inputFieldCount == that.inputFieldCount - && this.invalidDateCount == that.invalidDateCount - && this.missingFieldCount == that.missingFieldCount - && this.outOfOrderTimeStampCount == that.outOfOrderTimeStampCount - && this.emptyBucketCount == that.emptyBucketCount - && this.sparseBucketCount == that.sparseBucketCount - && this.bucketCount == that.bucketCount - && Objects.equals(this.latestRecordTimeStamp, that.latestRecordTimeStamp) - && Objects.equals(this.earliestRecordTimeStamp, that.earliestRecordTimeStamp) - && Objects.equals(this.lastDataTimeStamp, that.lastDataTimeStamp) - && Objects.equals(this.latestEmptyBucketTimeStamp, that.latestEmptyBucketTimeStamp) - && Objects.equals(this.latestSparseBucketTimeStamp, that.latestSparseBucketTimeStamp) - && Objects.equals(this.logTime, that.logTime); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - processedRecordCount, - processedFieldCount, - inputBytes, - inputFieldCount, - invalidDateCount, - missingFieldCount, - outOfOrderTimeStampCount, - lastDataTimeStamp, - emptyBucketCount, - sparseBucketCount, - bucketCount, - latestRecordTimeStamp, - earliestRecordTimeStamp, - latestEmptyBucketTimeStamp, - latestSparseBucketTimeStamp, - logTime - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java deleted file mode 100644 index db3a3fa011738..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSizeStats.java +++ /dev/null @@ -1,606 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.results.Result; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Locale; -import java.util.Objects; - -/** - * Provide access to the C++ model size stats for the Java process. - */ -public class ModelSizeStats implements ToXContentObject { - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "model_size_stats"; - public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); - - /** - * Field Names - */ - public static final ParseField MODEL_BYTES_FIELD = new ParseField("model_bytes"); - public static final ParseField PEAK_MODEL_BYTES_FIELD = new ParseField("peak_model_bytes"); - public static final ParseField MODEL_BYTES_EXCEEDED_FIELD = new ParseField("model_bytes_exceeded"); - public static final ParseField MODEL_BYTES_MEMORY_LIMIT_FIELD = new ParseField("model_bytes_memory_limit"); - public static final ParseField TOTAL_BY_FIELD_COUNT_FIELD = new ParseField("total_by_field_count"); - public static final ParseField TOTAL_OVER_FIELD_COUNT_FIELD = new ParseField("total_over_field_count"); - public static final ParseField TOTAL_PARTITION_FIELD_COUNT_FIELD = new ParseField("total_partition_field_count"); - public static final ParseField BUCKET_ALLOCATION_FAILURES_COUNT_FIELD = new ParseField("bucket_allocation_failures_count"); - public static final ParseField MEMORY_STATUS_FIELD = new ParseField("memory_status"); - public static final ParseField ASSIGNMENT_MEMORY_BASIS_FIELD = new ParseField("assignment_memory_basis"); - public static final ParseField CATEGORIZED_DOC_COUNT_FIELD = new ParseField("categorized_doc_count"); - public static final ParseField TOTAL_CATEGORY_COUNT_FIELD = new ParseField("total_category_count"); - public static final ParseField FREQUENT_CATEGORY_COUNT_FIELD = new ParseField("frequent_category_count"); - public static final ParseField RARE_CATEGORY_COUNT_FIELD = new ParseField("rare_category_count"); - public static final ParseField DEAD_CATEGORY_COUNT_FIELD = new ParseField("dead_category_count"); - public static final ParseField FAILED_CATEGORY_COUNT_FIELD = new ParseField("failed_category_count"); - public static final ParseField CATEGORIZATION_STATUS_FIELD = new ParseField("categorization_status"); - public static final ParseField LOG_TIME_FIELD = new ParseField("log_time"); - public static final ParseField TIMESTAMP_FIELD = new ParseField("timestamp"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_VALUE, - true, - a -> new Builder((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareLong(Builder::setModelBytes, MODEL_BYTES_FIELD); - PARSER.declareLong(Builder::setPeakModelBytes, PEAK_MODEL_BYTES_FIELD); - PARSER.declareLong(Builder::setModelBytesExceeded, MODEL_BYTES_EXCEEDED_FIELD); - PARSER.declareLong(Builder::setModelBytesMemoryLimit, MODEL_BYTES_MEMORY_LIMIT_FIELD); - PARSER.declareLong(Builder::setBucketAllocationFailuresCount, BUCKET_ALLOCATION_FAILURES_COUNT_FIELD); - PARSER.declareLong(Builder::setTotalByFieldCount, TOTAL_BY_FIELD_COUNT_FIELD); - PARSER.declareLong(Builder::setTotalOverFieldCount, TOTAL_OVER_FIELD_COUNT_FIELD); - PARSER.declareLong(Builder::setTotalPartitionFieldCount, TOTAL_PARTITION_FIELD_COUNT_FIELD); - PARSER.declareField(Builder::setMemoryStatus, p -> MemoryStatus.fromString(p.text()), MEMORY_STATUS_FIELD, ValueType.STRING); - PARSER.declareField( - Builder::setAssignmentMemoryBasis, - p -> AssignmentMemoryBasis.fromString(p.text()), - ASSIGNMENT_MEMORY_BASIS_FIELD, - ValueType.STRING - ); - PARSER.declareLong(Builder::setCategorizedDocCount, CATEGORIZED_DOC_COUNT_FIELD); - PARSER.declareLong(Builder::setTotalCategoryCount, TOTAL_CATEGORY_COUNT_FIELD); - PARSER.declareLong(Builder::setFrequentCategoryCount, FREQUENT_CATEGORY_COUNT_FIELD); - PARSER.declareLong(Builder::setRareCategoryCount, RARE_CATEGORY_COUNT_FIELD); - PARSER.declareLong(Builder::setDeadCategoryCount, DEAD_CATEGORY_COUNT_FIELD); - PARSER.declareLong(Builder::setFailedCategoryCount, FAILED_CATEGORY_COUNT_FIELD); - PARSER.declareField( - Builder::setCategorizationStatus, - p -> CategorizationStatus.fromString(p.text()), - CATEGORIZATION_STATUS_FIELD, - ValueType.STRING - ); - PARSER.declareField( - Builder::setLogTime, - (p) -> TimeUtil.parseTimeField(p, LOG_TIME_FIELD.getPreferredName()), - LOG_TIME_FIELD, - ValueType.VALUE - ); - PARSER.declareField( - Builder::setTimestamp, - (p) -> TimeUtil.parseTimeField(p, TIMESTAMP_FIELD.getPreferredName()), - TIMESTAMP_FIELD, - ValueType.VALUE - ); - } - - /** - * The status of the memory monitored by the ResourceMonitor. OK is default, - * SOFT_LIMIT means that the models have done some aggressive pruning to - * keep the memory below the limit, and HARD_LIMIT means that samples have - * been dropped - */ - public enum MemoryStatus { - OK, - SOFT_LIMIT, - HARD_LIMIT; - - public static MemoryStatus fromString(String statusName) { - return valueOf(statusName.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - /** - * Where will we get the memory requirement from when assigning this job to - * a node? There are three possibilities: - * 1. The job's model_memory_limit - * 2. The current model memory, i.e. what's reported in model_bytes of this object - * 3. The peak model memory, i.e. what's reported in peak_model_bytes of this object - * The field storing this enum can also be null, which means the - * assignment code will decide on the fly - this was the old behaviour prior - * to 7.11. - */ - public enum AssignmentMemoryBasis { - MODEL_MEMORY_LIMIT, - CURRENT_MODEL_BYTES, - PEAK_MODEL_BYTES; - - public static AssignmentMemoryBasis fromString(String statusName) { - return valueOf(statusName.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - /** - * The status of categorization for a job. OK is default, WARN - * means that inappropriate numbers of categories are being found - */ - public enum CategorizationStatus { - OK, - WARN; - - public static CategorizationStatus fromString(String statusName) { - return valueOf(statusName.trim().toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - - private final String jobId; - private final long modelBytes; - private final Long peakModelBytes; - private final Long modelBytesExceeded; - private final Long modelBytesMemoryLimit; - private final long totalByFieldCount; - private final long totalOverFieldCount; - private final long totalPartitionFieldCount; - private final long bucketAllocationFailuresCount; - private final MemoryStatus memoryStatus; - private final AssignmentMemoryBasis assignmentMemoryBasis; - private final long categorizedDocCount; - private final long totalCategoryCount; - private final long frequentCategoryCount; - private final long rareCategoryCount; - private final long deadCategoryCount; - private final long failedCategoryCount; - private final CategorizationStatus categorizationStatus; - private final Date timestamp; - private final Date logTime; - - private ModelSizeStats( - String jobId, - long modelBytes, - Long peakModelBytes, - Long modelBytesExceeded, - Long modelBytesMemoryLimit, - long totalByFieldCount, - long totalOverFieldCount, - long totalPartitionFieldCount, - long bucketAllocationFailuresCount, - MemoryStatus memoryStatus, - AssignmentMemoryBasis assignmentMemoryBasis, - long categorizedDocCount, - long totalCategoryCount, - long frequentCategoryCount, - long rareCategoryCount, - long deadCategoryCount, - long failedCategoryCount, - CategorizationStatus categorizationStatus, - Date timestamp, - Date logTime - ) { - this.jobId = jobId; - this.modelBytes = modelBytes; - this.peakModelBytes = peakModelBytes; - this.modelBytesExceeded = modelBytesExceeded; - this.modelBytesMemoryLimit = modelBytesMemoryLimit; - this.totalByFieldCount = totalByFieldCount; - this.totalOverFieldCount = totalOverFieldCount; - this.totalPartitionFieldCount = totalPartitionFieldCount; - this.bucketAllocationFailuresCount = bucketAllocationFailuresCount; - this.memoryStatus = memoryStatus; - this.assignmentMemoryBasis = assignmentMemoryBasis; - this.categorizedDocCount = categorizedDocCount; - this.totalCategoryCount = totalCategoryCount; - this.frequentCategoryCount = frequentCategoryCount; - this.rareCategoryCount = rareCategoryCount; - this.deadCategoryCount = deadCategoryCount; - this.failedCategoryCount = failedCategoryCount; - this.categorizationStatus = categorizationStatus; - this.timestamp = timestamp; - this.logTime = logTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.field(MODEL_BYTES_FIELD.getPreferredName(), modelBytes); - if (peakModelBytes != null) { - builder.field(PEAK_MODEL_BYTES_FIELD.getPreferredName(), peakModelBytes); - } - if (modelBytesExceeded != null) { - builder.field(MODEL_BYTES_EXCEEDED_FIELD.getPreferredName(), modelBytesExceeded); - } - if (modelBytesMemoryLimit != null) { - builder.field(MODEL_BYTES_MEMORY_LIMIT_FIELD.getPreferredName(), modelBytesMemoryLimit); - } - builder.field(TOTAL_BY_FIELD_COUNT_FIELD.getPreferredName(), totalByFieldCount); - builder.field(TOTAL_OVER_FIELD_COUNT_FIELD.getPreferredName(), totalOverFieldCount); - builder.field(TOTAL_PARTITION_FIELD_COUNT_FIELD.getPreferredName(), totalPartitionFieldCount); - builder.field(BUCKET_ALLOCATION_FAILURES_COUNT_FIELD.getPreferredName(), bucketAllocationFailuresCount); - builder.field(MEMORY_STATUS_FIELD.getPreferredName(), memoryStatus); - if (assignmentMemoryBasis != null) { - builder.field(ASSIGNMENT_MEMORY_BASIS_FIELD.getPreferredName(), assignmentMemoryBasis); - } - builder.field(CATEGORIZED_DOC_COUNT_FIELD.getPreferredName(), categorizedDocCount); - builder.field(TOTAL_CATEGORY_COUNT_FIELD.getPreferredName(), totalCategoryCount); - builder.field(FREQUENT_CATEGORY_COUNT_FIELD.getPreferredName(), frequentCategoryCount); - builder.field(RARE_CATEGORY_COUNT_FIELD.getPreferredName(), rareCategoryCount); - builder.field(DEAD_CATEGORY_COUNT_FIELD.getPreferredName(), deadCategoryCount); - builder.field(FAILED_CATEGORY_COUNT_FIELD.getPreferredName(), failedCategoryCount); - builder.field(CATEGORIZATION_STATUS_FIELD.getPreferredName(), categorizationStatus); - builder.timeField(LOG_TIME_FIELD.getPreferredName(), LOG_TIME_FIELD.getPreferredName() + "_string", logTime.getTime()); - if (timestamp != null) { - builder.timeField(TIMESTAMP_FIELD.getPreferredName(), TIMESTAMP_FIELD.getPreferredName() + "_string", timestamp.getTime()); - } - - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public long getModelBytes() { - return modelBytes; - } - - public Long getPeakModelBytes() { - return peakModelBytes; - } - - public Long getModelBytesExceeded() { - return modelBytesExceeded; - } - - public Long getModelBytesMemoryLimit() { - return modelBytesMemoryLimit; - } - - public long getTotalByFieldCount() { - return totalByFieldCount; - } - - public long getTotalPartitionFieldCount() { - return totalPartitionFieldCount; - } - - public long getTotalOverFieldCount() { - return totalOverFieldCount; - } - - public long getBucketAllocationFailuresCount() { - return bucketAllocationFailuresCount; - } - - public MemoryStatus getMemoryStatus() { - return memoryStatus; - } - - @Nullable - public AssignmentMemoryBasis getAssignmentMemoryBasis() { - return assignmentMemoryBasis; - } - - public long getCategorizedDocCount() { - return categorizedDocCount; - } - - public long getTotalCategoryCount() { - return totalCategoryCount; - } - - public long getFrequentCategoryCount() { - return frequentCategoryCount; - } - - public long getRareCategoryCount() { - return rareCategoryCount; - } - - public long getDeadCategoryCount() { - return deadCategoryCount; - } - - public long getFailedCategoryCount() { - return failedCategoryCount; - } - - public CategorizationStatus getCategorizationStatus() { - return categorizationStatus; - } - - /** - * The timestamp of the last processed record when this instance was created. - * - * @return The record time - */ - public Date getTimestamp() { - return timestamp; - } - - /** - * The wall clock time at the point when this instance was created. - * - * @return The wall clock time - */ - public Date getLogTime() { - return logTime; - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - modelBytes, - peakModelBytes, - modelBytesExceeded, - modelBytesMemoryLimit, - totalByFieldCount, - totalOverFieldCount, - totalPartitionFieldCount, - this.bucketAllocationFailuresCount, - memoryStatus, - assignmentMemoryBasis, - categorizedDocCount, - totalCategoryCount, - frequentCategoryCount, - rareCategoryCount, - deadCategoryCount, - failedCategoryCount, - categorizationStatus, - timestamp, - logTime - ); - } - - /** - * Compare all the fields. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - ModelSizeStats that = (ModelSizeStats) other; - - return this.modelBytes == that.modelBytes - && Objects.equals(this.peakModelBytes, that.peakModelBytes) - && Objects.equals(this.modelBytesExceeded, that.modelBytesExceeded) - && Objects.equals(this.modelBytesMemoryLimit, that.modelBytesMemoryLimit) - && this.totalByFieldCount == that.totalByFieldCount - && this.totalOverFieldCount == that.totalOverFieldCount - && this.totalPartitionFieldCount == that.totalPartitionFieldCount - && this.bucketAllocationFailuresCount == that.bucketAllocationFailuresCount - && Objects.equals(this.memoryStatus, that.memoryStatus) - && Objects.equals(this.assignmentMemoryBasis, that.assignmentMemoryBasis) - && this.categorizedDocCount == that.categorizedDocCount - && this.totalCategoryCount == that.totalCategoryCount - && this.frequentCategoryCount == that.frequentCategoryCount - && this.rareCategoryCount == that.rareCategoryCount - && this.deadCategoryCount == that.deadCategoryCount - && this.failedCategoryCount == that.failedCategoryCount - && Objects.equals(this.categorizationStatus, that.categorizationStatus) - && Objects.equals(this.timestamp, that.timestamp) - && Objects.equals(this.logTime, that.logTime) - && Objects.equals(this.jobId, that.jobId); - } - - public static class Builder { - - private final String jobId; - private long modelBytes; - private Long peakModelBytes; - private Long modelBytesExceeded; - private Long modelBytesMemoryLimit; - private long totalByFieldCount; - private long totalOverFieldCount; - private long totalPartitionFieldCount; - private long bucketAllocationFailuresCount; - private MemoryStatus memoryStatus; - private AssignmentMemoryBasis assignmentMemoryBasis; - private long categorizedDocCount; - private long totalCategoryCount; - private long frequentCategoryCount; - private long rareCategoryCount; - private long deadCategoryCount; - private long failedCategoryCount; - private CategorizationStatus categorizationStatus; - private Date timestamp; - private Date logTime; - - public Builder(String jobId) { - this.jobId = jobId; - memoryStatus = MemoryStatus.OK; - categorizationStatus = CategorizationStatus.OK; - logTime = new Date(); - } - - public Builder(ModelSizeStats modelSizeStats) { - this.jobId = modelSizeStats.jobId; - this.modelBytes = modelSizeStats.modelBytes; - this.peakModelBytes = modelSizeStats.peakModelBytes; - this.modelBytesExceeded = modelSizeStats.modelBytesExceeded; - this.modelBytesMemoryLimit = modelSizeStats.modelBytesMemoryLimit; - this.totalByFieldCount = modelSizeStats.totalByFieldCount; - this.totalOverFieldCount = modelSizeStats.totalOverFieldCount; - this.totalPartitionFieldCount = modelSizeStats.totalPartitionFieldCount; - this.bucketAllocationFailuresCount = modelSizeStats.bucketAllocationFailuresCount; - this.memoryStatus = modelSizeStats.memoryStatus; - this.assignmentMemoryBasis = modelSizeStats.assignmentMemoryBasis; - this.categorizedDocCount = modelSizeStats.categorizedDocCount; - this.totalCategoryCount = modelSizeStats.totalCategoryCount; - this.frequentCategoryCount = modelSizeStats.frequentCategoryCount; - this.rareCategoryCount = modelSizeStats.rareCategoryCount; - this.deadCategoryCount = modelSizeStats.deadCategoryCount; - this.failedCategoryCount = modelSizeStats.failedCategoryCount; - this.categorizationStatus = modelSizeStats.categorizationStatus; - this.timestamp = modelSizeStats.timestamp; - this.logTime = modelSizeStats.logTime; - } - - public Builder setModelBytes(long modelBytes) { - this.modelBytes = modelBytes; - return this; - } - - public Builder setPeakModelBytes(long peakModelBytes) { - this.peakModelBytes = peakModelBytes; - return this; - } - - public Builder setModelBytesExceeded(long modelBytesExceeded) { - this.modelBytesExceeded = modelBytesExceeded; - return this; - } - - public Builder setModelBytesMemoryLimit(long modelBytesMemoryLimit) { - this.modelBytesMemoryLimit = modelBytesMemoryLimit; - return this; - } - - public Builder setTotalByFieldCount(long totalByFieldCount) { - this.totalByFieldCount = totalByFieldCount; - return this; - } - - public Builder setTotalPartitionFieldCount(long totalPartitionFieldCount) { - this.totalPartitionFieldCount = totalPartitionFieldCount; - return this; - } - - public Builder setTotalOverFieldCount(long totalOverFieldCount) { - this.totalOverFieldCount = totalOverFieldCount; - return this; - } - - public Builder setBucketAllocationFailuresCount(long bucketAllocationFailuresCount) { - this.bucketAllocationFailuresCount = bucketAllocationFailuresCount; - return this; - } - - public Builder setMemoryStatus(MemoryStatus memoryStatus) { - Objects.requireNonNull(memoryStatus, "[" + MEMORY_STATUS_FIELD.getPreferredName() + "] must not be null"); - this.memoryStatus = memoryStatus; - return this; - } - - public Builder setAssignmentMemoryBasis(AssignmentMemoryBasis assignmentMemoryBasis) { - this.assignmentMemoryBasis = assignmentMemoryBasis; - return this; - } - - public Builder setCategorizedDocCount(long categorizedDocCount) { - this.categorizedDocCount = categorizedDocCount; - return this; - } - - public Builder setTotalCategoryCount(long totalCategoryCount) { - this.totalCategoryCount = totalCategoryCount; - return this; - } - - public Builder setFrequentCategoryCount(long frequentCategoryCount) { - this.frequentCategoryCount = frequentCategoryCount; - return this; - } - - public Builder setRareCategoryCount(long rareCategoryCount) { - this.rareCategoryCount = rareCategoryCount; - return this; - } - - public Builder setDeadCategoryCount(long deadCategoryCount) { - this.deadCategoryCount = deadCategoryCount; - return this; - } - - public Builder setFailedCategoryCount(long failedCategoryCount) { - this.failedCategoryCount = failedCategoryCount; - return this; - } - - public Builder setCategorizationStatus(CategorizationStatus categorizationStatus) { - Objects.requireNonNull(categorizationStatus, "[" + CATEGORIZATION_STATUS_FIELD.getPreferredName() + "] must not be null"); - this.categorizationStatus = categorizationStatus; - return this; - } - - public Builder setTimestamp(Date timestamp) { - this.timestamp = timestamp; - return this; - } - - public Builder setLogTime(Date logTime) { - this.logTime = logTime; - return this; - } - - public ModelSizeStats build() { - return new ModelSizeStats( - jobId, - modelBytes, - peakModelBytes, - modelBytesExceeded, - modelBytesMemoryLimit, - totalByFieldCount, - totalOverFieldCount, - totalPartitionFieldCount, - bucketAllocationFailuresCount, - memoryStatus, - assignmentMemoryBasis, - categorizedDocCount, - totalCategoryCount, - frequentCategoryCount, - rareCategoryCount, - deadCategoryCount, - failedCategoryCount, - categorizationStatus, - timestamp, - logTime - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSnapshot.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSnapshot.java deleted file mode 100644 index e21b8f512a143..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/ModelSnapshot.java +++ /dev/null @@ -1,361 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.Version; -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -/** - * ModelSnapshot Result POJO - */ -public class ModelSnapshot implements ToXContentObject { - /** - * Field Names - */ - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField DESCRIPTION = new ParseField("description"); - public static final ParseField SNAPSHOT_DOC_COUNT = new ParseField("snapshot_doc_count"); - public static final ParseField LATEST_RECORD_TIME = new ParseField("latest_record_time_stamp"); - public static final ParseField LATEST_RESULT_TIME = new ParseField("latest_result_time_stamp"); - public static final ParseField QUANTILES = new ParseField("quantiles"); - public static final ParseField RETAIN = new ParseField("retain"); - public static final ParseField MIN_VERSION = new ParseField("min_version"); - public static final ParseField SNAPSHOT_ID = new ParseField("snapshot_id"); - - public static final ObjectParser PARSER = new ObjectParser<>("model_snapshot", true, Builder::new); - - static { - PARSER.declareString(Builder::setJobId, Job.ID); - PARSER.declareString(Builder::setMinVersion, MIN_VERSION); - PARSER.declareField( - Builder::setTimestamp, - (p) -> TimeUtil.parseTimeField(p, TIMESTAMP.getPreferredName()), - TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareString(Builder::setDescription, DESCRIPTION); - PARSER.declareString(Builder::setSnapshotId, SNAPSHOT_ID); - PARSER.declareInt(Builder::setSnapshotDocCount, SNAPSHOT_DOC_COUNT); - PARSER.declareObject(Builder::setModelSizeStats, ModelSizeStats.PARSER, ModelSizeStats.RESULT_TYPE_FIELD); - PARSER.declareField( - Builder::setLatestRecordTimeStamp, - (p) -> TimeUtil.parseTimeField(p, LATEST_RECORD_TIME.getPreferredName()), - LATEST_RECORD_TIME, - ValueType.VALUE - ); - PARSER.declareField( - Builder::setLatestResultTimeStamp, - (p) -> TimeUtil.parseTimeField(p, LATEST_RESULT_TIME.getPreferredName()), - LATEST_RESULT_TIME, - ValueType.VALUE - ); - PARSER.declareObject(Builder::setQuantiles, Quantiles.PARSER, QUANTILES); - PARSER.declareBoolean(Builder::setRetain, RETAIN); - } - - private final String jobId; - - /** - * The minimum version a node should have to be able - * to read this model snapshot. - */ - private final Version minVersion; - - private final Date timestamp; - private final String description; - private final String snapshotId; - private final int snapshotDocCount; - private final ModelSizeStats modelSizeStats; - private final Date latestRecordTimeStamp; - private final Date latestResultTimeStamp; - private final Quantiles quantiles; - private final boolean retain; - - private ModelSnapshot( - String jobId, - Version minVersion, - Date timestamp, - String description, - String snapshotId, - int snapshotDocCount, - ModelSizeStats modelSizeStats, - Date latestRecordTimeStamp, - Date latestResultTimeStamp, - Quantiles quantiles, - boolean retain - ) { - this.jobId = jobId; - this.minVersion = minVersion; - this.timestamp = timestamp; - this.description = description; - this.snapshotId = snapshotId; - this.snapshotDocCount = snapshotDocCount; - this.modelSizeStats = modelSizeStats; - this.latestRecordTimeStamp = latestRecordTimeStamp; - this.latestResultTimeStamp = latestResultTimeStamp; - this.quantiles = quantiles; - this.retain = retain; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(MIN_VERSION.getPreferredName(), minVersion); - if (timestamp != null) { - builder.timeField(TIMESTAMP.getPreferredName(), TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - } - if (description != null) { - builder.field(DESCRIPTION.getPreferredName(), description); - } - if (snapshotId != null) { - builder.field(SNAPSHOT_ID.getPreferredName(), snapshotId); - } - builder.field(SNAPSHOT_DOC_COUNT.getPreferredName(), snapshotDocCount); - if (modelSizeStats != null) { - builder.field(ModelSizeStats.RESULT_TYPE_FIELD.getPreferredName(), modelSizeStats); - } - if (latestRecordTimeStamp != null) { - builder.timeField( - LATEST_RECORD_TIME.getPreferredName(), - LATEST_RECORD_TIME.getPreferredName() + "_string", - latestRecordTimeStamp.getTime() - ); - } - if (latestResultTimeStamp != null) { - builder.timeField( - LATEST_RESULT_TIME.getPreferredName(), - LATEST_RESULT_TIME.getPreferredName() + "_string", - latestResultTimeStamp.getTime() - ); - } - if (quantiles != null) { - builder.field(QUANTILES.getPreferredName(), quantiles); - } - builder.field(RETAIN.getPreferredName(), retain); - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public Version getMinVersion() { - return minVersion; - } - - public Date getTimestamp() { - return timestamp; - } - - public String getDescription() { - return description; - } - - public String getSnapshotId() { - return snapshotId; - } - - public int getSnapshotDocCount() { - return snapshotDocCount; - } - - public ModelSizeStats getModelSizeStats() { - return modelSizeStats; - } - - public Quantiles getQuantiles() { - return quantiles; - } - - public boolean getRetain() { - return retain; - } - - public Date getLatestRecordTimeStamp() { - return latestRecordTimeStamp; - } - - public Date getLatestResultTimeStamp() { - return latestResultTimeStamp; - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - minVersion, - timestamp, - description, - snapshotId, - quantiles, - snapshotDocCount, - modelSizeStats, - latestRecordTimeStamp, - latestResultTimeStamp, - retain - ); - } - - /** - * Compare all the fields. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - ModelSnapshot that = (ModelSnapshot) other; - - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.minVersion, that.minVersion) - && Objects.equals(this.timestamp, that.timestamp) - && Objects.equals(this.description, that.description) - && Objects.equals(this.snapshotId, that.snapshotId) - && this.snapshotDocCount == that.snapshotDocCount - && Objects.equals(this.modelSizeStats, that.modelSizeStats) - && Objects.equals(this.quantiles, that.quantiles) - && Objects.equals(this.latestRecordTimeStamp, that.latestRecordTimeStamp) - && Objects.equals(this.latestResultTimeStamp, that.latestResultTimeStamp) - && this.retain == that.retain; - } - - public static class Builder { - private String jobId; - - // Stored snapshot documents created prior to 6.3.0 will have no value for min_version. - private Version minVersion = Version.fromString("6.3.0"); - - private Date timestamp; - private String description; - private String snapshotId; - private int snapshotDocCount; - private ModelSizeStats modelSizeStats; - private Date latestRecordTimeStamp; - private Date latestResultTimeStamp; - private Quantiles quantiles; - private boolean retain; - - public Builder() {} - - public Builder(String jobId) { - this.jobId = jobId; - } - - public Builder(ModelSnapshot modelSnapshot) { - this.jobId = modelSnapshot.jobId; - this.timestamp = modelSnapshot.timestamp; - this.description = modelSnapshot.description; - this.snapshotId = modelSnapshot.snapshotId; - this.snapshotDocCount = modelSnapshot.snapshotDocCount; - this.modelSizeStats = modelSnapshot.modelSizeStats; - this.latestRecordTimeStamp = modelSnapshot.latestRecordTimeStamp; - this.latestResultTimeStamp = modelSnapshot.latestResultTimeStamp; - this.quantiles = modelSnapshot.quantiles; - this.retain = modelSnapshot.retain; - this.minVersion = modelSnapshot.minVersion; - } - - public Builder setJobId(String jobId) { - this.jobId = jobId; - return this; - } - - Builder setMinVersion(Version minVersion) { - this.minVersion = minVersion; - return this; - } - - Builder setMinVersion(String minVersion) { - this.minVersion = Version.fromString(minVersion); - return this; - } - - public Builder setTimestamp(Date timestamp) { - this.timestamp = timestamp; - return this; - } - - public Builder setDescription(String description) { - this.description = description; - return this; - } - - public Builder setSnapshotId(String snapshotId) { - this.snapshotId = snapshotId; - return this; - } - - public Builder setSnapshotDocCount(int snapshotDocCount) { - this.snapshotDocCount = snapshotDocCount; - return this; - } - - public Builder setModelSizeStats(ModelSizeStats.Builder modelSizeStats) { - this.modelSizeStats = modelSizeStats.build(); - return this; - } - - public Builder setModelSizeStats(ModelSizeStats modelSizeStats) { - this.modelSizeStats = modelSizeStats; - return this; - } - - public Builder setLatestRecordTimeStamp(Date latestRecordTimeStamp) { - this.latestRecordTimeStamp = latestRecordTimeStamp; - return this; - } - - public Builder setLatestResultTimeStamp(Date latestResultTimeStamp) { - this.latestResultTimeStamp = latestResultTimeStamp; - return this; - } - - public Builder setQuantiles(Quantiles quantiles) { - this.quantiles = quantiles; - return this; - } - - public Builder setRetain(boolean value) { - this.retain = value; - return this; - } - - public ModelSnapshot build() { - return new ModelSnapshot( - jobId, - minVersion, - timestamp, - description, - snapshotId, - snapshotDocCount, - modelSizeStats, - latestRecordTimeStamp, - latestResultTimeStamp, - quantiles, - retain - ); - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/Quantiles.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/Quantiles.java deleted file mode 100644 index 968447bcfa4dd..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/Quantiles.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -/** - * Quantiles Result POJO - */ -public class Quantiles implements ToXContentObject { - - /** - * Field Names - */ - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField QUANTILE_STATE = new ParseField("quantile_state"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "quantiles", - true, - a -> new Quantiles((String) a[0], (Date) a[1], (String) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> new Date(p.longValue()), TIMESTAMP, ValueType.LONG); - PARSER.declareString(ConstructingObjectParser.constructorArg(), QUANTILE_STATE); - } - - private final String jobId; - private final Date timestamp; - private final String quantileState; - - public Quantiles(String jobId, Date timestamp, String quantileState) { - this.jobId = jobId; - this.timestamp = Objects.requireNonNull(timestamp); - this.quantileState = Objects.requireNonNull(quantileState); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - if (timestamp != null) { - builder.field(TIMESTAMP.getPreferredName(), timestamp.getTime()); - } - if (quantileState != null) { - builder.field(QUANTILE_STATE.getPreferredName(), quantileState); - } - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public Date getTimestamp() { - return timestamp; - } - - public String getQuantileState() { - return quantileState; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, timestamp, quantileState); - } - - /** - * Compare all the fields. - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - Quantiles that = (Quantiles) other; - - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.timestamp, that.timestamp) - && Objects.equals(this.quantileState, that.quantileState); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/TimingStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/TimingStats.java deleted file mode 100644 index 60ed9252affde..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/process/TimingStats.java +++ /dev/null @@ -1,200 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.process; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.Strings; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; - -/** - * Stats that give more insight into timing of various operations performed as part of anomaly detection job. - */ -public class TimingStats implements ToXContentObject { - - public static final ParseField BUCKET_COUNT = new ParseField("bucket_count"); - public static final ParseField TOTAL_BUCKET_PROCESSING_TIME_MS = new ParseField("total_bucket_processing_time_ms"); - public static final ParseField MIN_BUCKET_PROCESSING_TIME_MS = new ParseField("minimum_bucket_processing_time_ms"); - public static final ParseField MAX_BUCKET_PROCESSING_TIME_MS = new ParseField("maximum_bucket_processing_time_ms"); - public static final ParseField AVG_BUCKET_PROCESSING_TIME_MS = new ParseField("average_bucket_processing_time_ms"); - public static final ParseField EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_MS = new ParseField( - "exponential_average_bucket_processing_time_ms" - ); - public static final ParseField EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_PER_HOUR_MS = new ParseField( - "exponential_average_bucket_processing_time_per_hour_ms" - ); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("timing_stats", true, args -> { - String jobId = (String) args[0]; - Long bucketCount = (Long) args[1]; - Double totalBucketProcessingTimeMs = (Double) args[2]; - Double minBucketProcessingTimeMs = (Double) args[3]; - Double maxBucketProcessingTimeMs = (Double) args[4]; - Double avgBucketProcessingTimeMs = (Double) args[5]; - Double exponentialAvgBucketProcessingTimeMs = (Double) args[6]; - Double exponentialAvgBucketProcessingTimePerHourMs = (Double) args[7]; - return new TimingStats( - jobId, - getOrDefault(bucketCount, 0L), - getOrDefault(totalBucketProcessingTimeMs, 0.0), - minBucketProcessingTimeMs, - maxBucketProcessingTimeMs, - avgBucketProcessingTimeMs, - exponentialAvgBucketProcessingTimeMs, - exponentialAvgBucketProcessingTimePerHourMs - ); - }); - - static { - PARSER.declareString(constructorArg(), Job.ID); - PARSER.declareLong(optionalConstructorArg(), BUCKET_COUNT); - PARSER.declareDouble(optionalConstructorArg(), TOTAL_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), MIN_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), MAX_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), AVG_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_MS); - PARSER.declareDouble(optionalConstructorArg(), EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_PER_HOUR_MS); - } - - private final String jobId; - private long bucketCount; - private double totalBucketProcessingTimeMs; - private Double minBucketProcessingTimeMs; - private Double maxBucketProcessingTimeMs; - private Double avgBucketProcessingTimeMs; - private Double exponentialAvgBucketProcessingTimeMs; - private Double exponentialAvgBucketProcessingTimePerHourMs; - - public TimingStats( - String jobId, - long bucketCount, - double totalBucketProcessingTimeMs, - @Nullable Double minBucketProcessingTimeMs, - @Nullable Double maxBucketProcessingTimeMs, - @Nullable Double avgBucketProcessingTimeMs, - @Nullable Double exponentialAvgBucketProcessingTimeMs, - @Nullable Double exponentialAvgBucketProcessingTimePerHourMs - ) { - this.jobId = jobId; - this.bucketCount = bucketCount; - this.totalBucketProcessingTimeMs = totalBucketProcessingTimeMs; - this.minBucketProcessingTimeMs = minBucketProcessingTimeMs; - this.maxBucketProcessingTimeMs = maxBucketProcessingTimeMs; - this.avgBucketProcessingTimeMs = avgBucketProcessingTimeMs; - this.exponentialAvgBucketProcessingTimeMs = exponentialAvgBucketProcessingTimeMs; - this.exponentialAvgBucketProcessingTimePerHourMs = exponentialAvgBucketProcessingTimePerHourMs; - } - - public String getJobId() { - return jobId; - } - - public long getBucketCount() { - return bucketCount; - } - - public double getTotalBucketProcessingTimeMs() { - return totalBucketProcessingTimeMs; - } - - public Double getMinBucketProcessingTimeMs() { - return minBucketProcessingTimeMs; - } - - public Double getMaxBucketProcessingTimeMs() { - return maxBucketProcessingTimeMs; - } - - public Double getAvgBucketProcessingTimeMs() { - return avgBucketProcessingTimeMs; - } - - public Double getExponentialAvgBucketProcessingTimeMs() { - return exponentialAvgBucketProcessingTimeMs; - } - - public Double getExponentialAvgBucketProcessingTimePerHourMs() { - return exponentialAvgBucketProcessingTimePerHourMs; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(BUCKET_COUNT.getPreferredName(), bucketCount); - builder.field(TOTAL_BUCKET_PROCESSING_TIME_MS.getPreferredName(), totalBucketProcessingTimeMs); - if (minBucketProcessingTimeMs != null) { - builder.field(MIN_BUCKET_PROCESSING_TIME_MS.getPreferredName(), minBucketProcessingTimeMs); - } - if (maxBucketProcessingTimeMs != null) { - builder.field(MAX_BUCKET_PROCESSING_TIME_MS.getPreferredName(), maxBucketProcessingTimeMs); - } - if (avgBucketProcessingTimeMs != null) { - builder.field(AVG_BUCKET_PROCESSING_TIME_MS.getPreferredName(), avgBucketProcessingTimeMs); - } - if (exponentialAvgBucketProcessingTimeMs != null) { - builder.field(EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_MS.getPreferredName(), exponentialAvgBucketProcessingTimeMs); - } - if (exponentialAvgBucketProcessingTimePerHourMs != null) { - builder.field( - EXPONENTIAL_AVG_BUCKET_PROCESSING_TIME_PER_HOUR_MS.getPreferredName(), - exponentialAvgBucketProcessingTimePerHourMs - ); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object o) { - if (o == this) return true; - if (o == null || getClass() != o.getClass()) return false; - TimingStats that = (TimingStats) o; - return Objects.equals(this.jobId, that.jobId) - && this.bucketCount == that.bucketCount - && this.totalBucketProcessingTimeMs == that.totalBucketProcessingTimeMs - && Objects.equals(this.minBucketProcessingTimeMs, that.minBucketProcessingTimeMs) - && Objects.equals(this.maxBucketProcessingTimeMs, that.maxBucketProcessingTimeMs) - && Objects.equals(this.avgBucketProcessingTimeMs, that.avgBucketProcessingTimeMs) - && Objects.equals(this.exponentialAvgBucketProcessingTimeMs, that.exponentialAvgBucketProcessingTimeMs) - && Objects.equals(this.exponentialAvgBucketProcessingTimePerHourMs, that.exponentialAvgBucketProcessingTimePerHourMs); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - bucketCount, - totalBucketProcessingTimeMs, - minBucketProcessingTimeMs, - maxBucketProcessingTimeMs, - avgBucketProcessingTimeMs, - exponentialAvgBucketProcessingTimeMs, - exponentialAvgBucketProcessingTimePerHourMs - ); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - private static T getOrDefault(@Nullable T value, T defaultValue) { - return value != null ? value : defaultValue; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyCause.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyCause.java deleted file mode 100644 index 5be75c52b19a6..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyCause.java +++ /dev/null @@ -1,322 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.ml.job.config.DetectorFunction; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * Anomaly Cause POJO. - * Used as a nested level inside population anomaly records. - */ -public class AnomalyCause implements ToXContentObject { - - public static final ParseField ANOMALY_CAUSE = new ParseField("anomaly_cause"); - - /** - * Result fields - */ - public static final ParseField PROBABILITY = new ParseField("probability"); - public static final ParseField OVER_FIELD_NAME = new ParseField("over_field_name"); - public static final ParseField OVER_FIELD_VALUE = new ParseField("over_field_value"); - public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name"); - public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value"); - public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlated_by_field_value"); - public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name"); - public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value"); - public static final ParseField FUNCTION = new ParseField("function"); - public static final ParseField FUNCTION_DESCRIPTION = new ParseField("function_description"); - public static final ParseField TYPICAL = new ParseField("typical"); - public static final ParseField ACTUAL = new ParseField("actual"); - public static final ParseField INFLUENCERS = new ParseField("influencers"); - - /** - * Metric Results - */ - public static final ParseField FIELD_NAME = new ParseField("field_name"); - - public static final ObjectParser PARSER = new ObjectParser<>( - ANOMALY_CAUSE.getPreferredName(), - true, - AnomalyCause::new - ); - - static { - PARSER.declareDouble(AnomalyCause::setProbability, PROBABILITY); - PARSER.declareString(AnomalyCause::setByFieldName, BY_FIELD_NAME); - PARSER.declareString(AnomalyCause::setByFieldValue, BY_FIELD_VALUE); - PARSER.declareString(AnomalyCause::setCorrelatedByFieldValue, CORRELATED_BY_FIELD_VALUE); - PARSER.declareString(AnomalyCause::setPartitionFieldName, PARTITION_FIELD_NAME); - PARSER.declareString(AnomalyCause::setPartitionFieldValue, PARTITION_FIELD_VALUE); - PARSER.declareString(AnomalyCause::setFunction, FUNCTION); - PARSER.declareString(AnomalyCause::setFunctionDescription, FUNCTION_DESCRIPTION); - PARSER.declareDoubleArray(AnomalyCause::setTypical, TYPICAL); - PARSER.declareDoubleArray(AnomalyCause::setActual, ACTUAL); - PARSER.declareString(AnomalyCause::setFieldName, FIELD_NAME); - PARSER.declareString(AnomalyCause::setOverFieldName, OVER_FIELD_NAME); - PARSER.declareString(AnomalyCause::setOverFieldValue, OVER_FIELD_VALUE); - PARSER.declareObjectArray(AnomalyCause::setInfluencers, Influence.PARSER, INFLUENCERS); - } - - private double probability; - private String byFieldName; - private String byFieldValue; - private String correlatedByFieldValue; - private String partitionFieldName; - private String partitionFieldValue; - private String function; - private String functionDescription; - private List typical; - private List actual; - private String fieldName; - private String overFieldName; - private String overFieldValue; - - private List influencers; - - AnomalyCause() {} - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(PROBABILITY.getPreferredName(), probability); - if (byFieldName != null) { - builder.field(BY_FIELD_NAME.getPreferredName(), byFieldName); - } - if (byFieldValue != null) { - builder.field(BY_FIELD_VALUE.getPreferredName(), byFieldValue); - } - if (correlatedByFieldValue != null) { - builder.field(CORRELATED_BY_FIELD_VALUE.getPreferredName(), correlatedByFieldValue); - } - if (partitionFieldName != null) { - builder.field(PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName); - } - if (partitionFieldValue != null) { - builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue); - } - if (function != null) { - builder.field(FUNCTION.getPreferredName(), function); - } - if (functionDescription != null) { - builder.field(FUNCTION_DESCRIPTION.getPreferredName(), functionDescription); - } - if (typical != null) { - builder.field(TYPICAL.getPreferredName(), typical); - } - if (actual != null) { - builder.field(ACTUAL.getPreferredName(), actual); - } - if (fieldName != null) { - builder.field(FIELD_NAME.getPreferredName(), fieldName); - } - if (overFieldName != null) { - builder.field(OVER_FIELD_NAME.getPreferredName(), overFieldName); - } - if (overFieldValue != null) { - builder.field(OVER_FIELD_VALUE.getPreferredName(), overFieldValue); - } - if (influencers != null) { - builder.field(INFLUENCERS.getPreferredName(), influencers); - } - builder.endObject(); - return builder; - } - - public double getProbability() { - return probability; - } - - void setProbability(double value) { - probability = value; - } - - public String getByFieldName() { - return byFieldName; - } - - void setByFieldName(String value) { - byFieldName = value; - } - - public String getByFieldValue() { - return byFieldValue; - } - - void setByFieldValue(String value) { - byFieldValue = value; - } - - public String getCorrelatedByFieldValue() { - return correlatedByFieldValue; - } - - void setCorrelatedByFieldValue(String value) { - correlatedByFieldValue = value; - } - - public String getPartitionFieldName() { - return partitionFieldName; - } - - void setPartitionFieldName(String field) { - partitionFieldName = field; - } - - public String getPartitionFieldValue() { - return partitionFieldValue; - } - - void setPartitionFieldValue(String value) { - partitionFieldValue = value; - } - - public String getFunction() { - return function; - } - - void setFunction(String name) { - function = name; - } - - public String getFunctionDescription() { - return functionDescription; - } - - void setFunctionDescription(String functionDescription) { - this.functionDescription = functionDescription; - } - - public List getTypical() { - return typical; - } - - void setTypical(List typical) { - this.typical = Collections.unmodifiableList(typical); - } - - public List getActual() { - return actual; - } - - void setActual(List actual) { - this.actual = Collections.unmodifiableList(actual); - } - - public String getFieldName() { - return fieldName; - } - - void setFieldName(String field) { - fieldName = field; - } - - public String getOverFieldName() { - return overFieldName; - } - - void setOverFieldName(String name) { - overFieldName = name; - } - - public String getOverFieldValue() { - return overFieldValue; - } - - void setOverFieldValue(String value) { - overFieldValue = value; - } - - public List getInfluencers() { - return influencers; - } - - void setInfluencers(List influencers) { - this.influencers = Collections.unmodifiableList(influencers); - } - - @Nullable - public GeoPoint getTypicalGeoPoint() { - if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || typical == null) { - return null; - } - if (typical.size() == 2) { - return new GeoPoint(typical.get(0), typical.get(1)); - } - return null; - } - - @Nullable - public GeoPoint getActualGeoPoint() { - if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || actual == null) { - return null; - } - if (actual.size() == 2) { - return new GeoPoint(actual.get(0), actual.get(1)); - } - return null; - } - - @Override - public int hashCode() { - return Objects.hash( - probability, - actual, - typical, - byFieldName, - byFieldValue, - correlatedByFieldValue, - fieldName, - function, - functionDescription, - overFieldName, - overFieldValue, - partitionFieldName, - partitionFieldValue, - influencers - ); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - AnomalyCause that = (AnomalyCause) other; - - return this.probability == that.probability - && Objects.equals(this.typical, that.typical) - && Objects.equals(this.actual, that.actual) - && Objects.equals(this.function, that.function) - && Objects.equals(this.functionDescription, that.functionDescription) - && Objects.equals(this.fieldName, that.fieldName) - && Objects.equals(this.byFieldName, that.byFieldName) - && Objects.equals(this.byFieldValue, that.byFieldValue) - && Objects.equals(this.correlatedByFieldValue, that.correlatedByFieldValue) - && Objects.equals(this.partitionFieldName, that.partitionFieldName) - && Objects.equals(this.partitionFieldValue, that.partitionFieldValue) - && Objects.equals(this.overFieldName, that.overFieldName) - && Objects.equals(this.overFieldValue, that.overFieldValue) - && Objects.equals(this.influencers, that.influencers); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java deleted file mode 100644 index f3a93703a0275..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/AnomalyRecord.java +++ /dev/null @@ -1,476 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.DetectorFunction; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Objects; - -/** - * Anomaly Record POJO. - * Uses the object wrappers Boolean and Double so null values - * can be returned if the members have not been set. - */ -public class AnomalyRecord implements ToXContentObject { - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "record"; - - /** - * Result fields (all detector types) - */ - public static final ParseField PROBABILITY = new ParseField("probability"); - public static final ParseField MULTI_BUCKET_IMPACT = new ParseField("multi_bucket_impact"); - public static final ParseField DETECTOR_INDEX = new ParseField("detector_index"); - public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name"); - public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value"); - public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlated_by_field_value"); - public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name"); - public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value"); - public static final ParseField FUNCTION = new ParseField("function"); - public static final ParseField FUNCTION_DESCRIPTION = new ParseField("function_description"); - public static final ParseField TYPICAL = new ParseField("typical"); - public static final ParseField ACTUAL = new ParseField("actual"); - public static final ParseField INFLUENCERS = new ParseField("influencers"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("records"); - - /** - * Metric Results (including population metrics) - */ - public static final ParseField FIELD_NAME = new ParseField("field_name"); - - /** - * Population results - */ - public static final ParseField OVER_FIELD_NAME = new ParseField("over_field_name"); - public static final ParseField OVER_FIELD_VALUE = new ParseField("over_field_value"); - public static final ParseField CAUSES = new ParseField("causes"); - - /** - * Normalization - */ - public static final ParseField RECORD_SCORE = new ParseField("record_score"); - public static final ParseField INITIAL_RECORD_SCORE = new ParseField("initial_record_score"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_VALUE, - true, - a -> new AnomalyRecord((String) a[0], (Date) a[1], (long) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE); - PARSER.declareDouble(AnomalyRecord::setProbability, PROBABILITY); - PARSER.declareDouble(AnomalyRecord::setMultiBucketImpact, MULTI_BUCKET_IMPACT); - PARSER.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE); - PARSER.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE); - PARSER.declareInt(AnomalyRecord::setDetectorIndex, DETECTOR_INDEX); - PARSER.declareBoolean(AnomalyRecord::setInterim, Result.IS_INTERIM); - PARSER.declareString(AnomalyRecord::setByFieldName, BY_FIELD_NAME); - PARSER.declareString(AnomalyRecord::setByFieldValue, BY_FIELD_VALUE); - PARSER.declareString(AnomalyRecord::setCorrelatedByFieldValue, CORRELATED_BY_FIELD_VALUE); - PARSER.declareString(AnomalyRecord::setPartitionFieldName, PARTITION_FIELD_NAME); - PARSER.declareString(AnomalyRecord::setPartitionFieldValue, PARTITION_FIELD_VALUE); - PARSER.declareString(AnomalyRecord::setFunction, FUNCTION); - PARSER.declareString(AnomalyRecord::setFunctionDescription, FUNCTION_DESCRIPTION); - PARSER.declareDoubleArray(AnomalyRecord::setTypical, TYPICAL); - PARSER.declareDoubleArray(AnomalyRecord::setActual, ACTUAL); - PARSER.declareString(AnomalyRecord::setFieldName, FIELD_NAME); - PARSER.declareString(AnomalyRecord::setOverFieldName, OVER_FIELD_NAME); - PARSER.declareString(AnomalyRecord::setOverFieldValue, OVER_FIELD_VALUE); - PARSER.declareObjectArray(AnomalyRecord::setCauses, AnomalyCause.PARSER, CAUSES); - PARSER.declareObjectArray(AnomalyRecord::setInfluencers, Influence.PARSER, INFLUENCERS); - } - - private final String jobId; - private int detectorIndex; - private double probability; - private Double multiBucketImpact; - private String byFieldName; - private String byFieldValue; - private String correlatedByFieldValue; - private String partitionFieldName; - private String partitionFieldValue; - private String function; - private String functionDescription; - private List typical; - private List actual; - private boolean isInterim; - - private String fieldName; - - private String overFieldName; - private String overFieldValue; - private List causes; - - private double recordScore; - - private double initialRecordScore; - - private final Date timestamp; - private final long bucketSpan; - - private List influences; - - AnomalyRecord(String jobId, Date timestamp, long bucketSpan) { - this.jobId = jobId; - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.field(PROBABILITY.getPreferredName(), probability); - if (multiBucketImpact != null) { - builder.field(MULTI_BUCKET_IMPACT.getPreferredName(), multiBucketImpact); - } - builder.field(RECORD_SCORE.getPreferredName(), recordScore); - builder.field(INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(DETECTOR_INDEX.getPreferredName(), detectorIndex); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - if (byFieldName != null) { - builder.field(BY_FIELD_NAME.getPreferredName(), byFieldName); - } - if (byFieldValue != null) { - builder.field(BY_FIELD_VALUE.getPreferredName(), byFieldValue); - } - if (correlatedByFieldValue != null) { - builder.field(CORRELATED_BY_FIELD_VALUE.getPreferredName(), correlatedByFieldValue); - } - if (partitionFieldName != null) { - builder.field(PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName); - } - if (partitionFieldValue != null) { - builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue); - } - if (function != null) { - builder.field(FUNCTION.getPreferredName(), function); - } - if (functionDescription != null) { - builder.field(FUNCTION_DESCRIPTION.getPreferredName(), functionDescription); - } - if (typical != null) { - builder.field(TYPICAL.getPreferredName(), typical); - } - if (actual != null) { - builder.field(ACTUAL.getPreferredName(), actual); - } - if (fieldName != null) { - builder.field(FIELD_NAME.getPreferredName(), fieldName); - } - if (overFieldName != null) { - builder.field(OVER_FIELD_NAME.getPreferredName(), overFieldName); - } - if (overFieldValue != null) { - builder.field(OVER_FIELD_VALUE.getPreferredName(), overFieldValue); - } - if (causes != null) { - builder.field(CAUSES.getPreferredName(), causes); - } - if (influences != null) { - builder.field(INFLUENCERS.getPreferredName(), influences); - } - builder.endObject(); - return builder; - } - - public String getJobId() { - return this.jobId; - } - - public int getDetectorIndex() { - return detectorIndex; - } - - void setDetectorIndex(int detectorIndex) { - this.detectorIndex = detectorIndex; - } - - public double getRecordScore() { - return recordScore; - } - - void setRecordScore(double recordScore) { - this.recordScore = recordScore; - } - - public double getInitialRecordScore() { - return initialRecordScore; - } - - void setInitialRecordScore(double initialRecordScore) { - this.initialRecordScore = initialRecordScore; - } - - public Date getTimestamp() { - return timestamp; - } - - /** - * Bucketspan expressed in seconds - */ - public long getBucketSpan() { - return bucketSpan; - } - - public double getProbability() { - return probability; - } - - void setProbability(double value) { - probability = value; - } - - public double getMultiBucketImpact() { - return multiBucketImpact; - } - - void setMultiBucketImpact(double value) { - multiBucketImpact = value; - } - - public String getByFieldName() { - return byFieldName; - } - - void setByFieldName(String value) { - byFieldName = value; - } - - public String getByFieldValue() { - return byFieldValue; - } - - void setByFieldValue(String value) { - byFieldValue = value; - } - - public String getCorrelatedByFieldValue() { - return correlatedByFieldValue; - } - - void setCorrelatedByFieldValue(String value) { - correlatedByFieldValue = value; - } - - public String getPartitionFieldName() { - return partitionFieldName; - } - - void setPartitionFieldName(String field) { - partitionFieldName = field; - } - - public String getPartitionFieldValue() { - return partitionFieldValue; - } - - void setPartitionFieldValue(String value) { - partitionFieldValue = value; - } - - public String getFunction() { - return function; - } - - void setFunction(String name) { - function = name; - } - - public String getFunctionDescription() { - return functionDescription; - } - - void setFunctionDescription(String functionDescription) { - this.functionDescription = functionDescription; - } - - public List getTypical() { - return typical; - } - - void setTypical(List typical) { - this.typical = Collections.unmodifiableList(typical); - } - - public List getActual() { - return actual; - } - - void setActual(List actual) { - this.actual = Collections.unmodifiableList(actual); - } - - public boolean isInterim() { - return isInterim; - } - - void setInterim(boolean interim) { - this.isInterim = interim; - } - - public String getFieldName() { - return fieldName; - } - - void setFieldName(String field) { - fieldName = field; - } - - public String getOverFieldName() { - return overFieldName; - } - - void setOverFieldName(String name) { - overFieldName = name; - } - - public String getOverFieldValue() { - return overFieldValue; - } - - void setOverFieldValue(String value) { - overFieldValue = value; - } - - public List getCauses() { - return causes; - } - - void setCauses(List causes) { - this.causes = Collections.unmodifiableList(causes); - } - - public List getInfluencers() { - return influences; - } - - void setInfluencers(List influencers) { - this.influences = Collections.unmodifiableList(influencers); - } - - @Nullable - public GeoPoint getTypicalGeoPoint() { - if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || typical == null) { - return null; - } - if (typical.size() == 2) { - return new GeoPoint(typical.get(0), typical.get(1)); - } - return null; - } - - @Nullable - public GeoPoint getActualGeoPoint() { - if (DetectorFunction.LAT_LONG.getFullName().equals(function) == false || actual == null) { - return null; - } - if (actual.size() == 2) { - return new GeoPoint(actual.get(0), actual.get(1)); - } - return null; - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - detectorIndex, - bucketSpan, - probability, - multiBucketImpact, - recordScore, - initialRecordScore, - typical, - actual, - function, - functionDescription, - fieldName, - byFieldName, - byFieldValue, - correlatedByFieldValue, - partitionFieldName, - partitionFieldValue, - overFieldName, - overFieldValue, - timestamp, - isInterim, - causes, - influences, - jobId - ); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - AnomalyRecord that = (AnomalyRecord) other; - - return Objects.equals(this.jobId, that.jobId) - && this.detectorIndex == that.detectorIndex - && this.bucketSpan == that.bucketSpan - && this.probability == that.probability - && Objects.equals(this.multiBucketImpact, that.multiBucketImpact) - && this.recordScore == that.recordScore - && this.initialRecordScore == that.initialRecordScore - && Objects.deepEquals(this.typical, that.typical) - && Objects.deepEquals(this.actual, that.actual) - && Objects.equals(this.function, that.function) - && Objects.equals(this.functionDescription, that.functionDescription) - && Objects.equals(this.fieldName, that.fieldName) - && Objects.equals(this.byFieldName, that.byFieldName) - && Objects.equals(this.byFieldValue, that.byFieldValue) - && Objects.equals(this.correlatedByFieldValue, that.correlatedByFieldValue) - && Objects.equals(this.partitionFieldName, that.partitionFieldName) - && Objects.equals(this.partitionFieldValue, that.partitionFieldValue) - && Objects.equals(this.overFieldName, that.overFieldName) - && Objects.equals(this.overFieldValue, that.overFieldValue) - && Objects.equals(this.timestamp, that.timestamp) - && Objects.equals(this.isInterim, that.isInterim) - && Objects.equals(this.causes, that.causes) - && Objects.equals(this.influences, that.influences); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java deleted file mode 100644 index 8d74effaac390..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Bucket.java +++ /dev/null @@ -1,249 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Objects; - -/** - * Bucket Result POJO - */ -public class Bucket implements ToXContentObject { - - public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score"); - public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score"); - public static final ParseField EVENT_COUNT = new ParseField("event_count"); - public static final ParseField RECORDS = new ParseField("records"); - public static final ParseField BUCKET_INFLUENCERS = new ParseField("bucket_influencers"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField PROCESSING_TIME_MS = new ParseField("processing_time_ms"); - public static final ParseField SCHEDULED_EVENTS = new ParseField("scheduled_events"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("buckets"); - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "bucket"; - public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_VALUE, - true, - a -> new Bucket((String) a[0], (Date) a[1], (long) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareDouble(Bucket::setAnomalyScore, ANOMALY_SCORE); - PARSER.declareDouble(Bucket::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE); - PARSER.declareBoolean(Bucket::setInterim, Result.IS_INTERIM); - PARSER.declareLong(Bucket::setEventCount, EVENT_COUNT); - PARSER.declareObjectArray(Bucket::setRecords, AnomalyRecord.PARSER, RECORDS); - PARSER.declareObjectArray(Bucket::setBucketInfluencers, BucketInfluencer.PARSER, BUCKET_INFLUENCERS); - PARSER.declareLong(Bucket::setProcessingTimeMs, PROCESSING_TIME_MS); - PARSER.declareString((bucket, s) -> {}, Result.RESULT_TYPE); - PARSER.declareStringArray(Bucket::setScheduledEvents, SCHEDULED_EVENTS); - } - - private final String jobId; - private final Date timestamp; - private final long bucketSpan; - private double anomalyScore; - private double initialAnomalyScore; - private List records = new ArrayList<>(); - private long eventCount; - private boolean isInterim; - private List bucketInfluencers = new ArrayList<>(); // Can't use emptyList as might be appended to - private long processingTimeMs; - private List scheduledEvents = Collections.emptyList(); - - Bucket(String jobId, Date timestamp, long bucketSpan) { - this.jobId = jobId; - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(INITIAL_ANOMALY_SCORE.getPreferredName(), initialAnomalyScore); - if (records.isEmpty() == false) { - builder.field(RECORDS.getPreferredName(), records); - } - builder.field(EVENT_COUNT.getPreferredName(), eventCount); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.field(BUCKET_INFLUENCERS.getPreferredName(), bucketInfluencers); - builder.field(PROCESSING_TIME_MS.getPreferredName(), processingTimeMs); - if (scheduledEvents.isEmpty() == false) { - builder.field(SCHEDULED_EVENTS.getPreferredName(), scheduledEvents); - } - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public Date getTimestamp() { - return timestamp; - } - - /** - * Bucketspan expressed in seconds - */ - public long getBucketSpan() { - return bucketSpan; - } - - public double getAnomalyScore() { - return anomalyScore; - } - - void setAnomalyScore(double anomalyScore) { - this.anomalyScore = anomalyScore; - } - - public double getInitialAnomalyScore() { - return initialAnomalyScore; - } - - void setInitialAnomalyScore(double initialAnomalyScore) { - this.initialAnomalyScore = initialAnomalyScore; - } - - /** - * Get all the anomaly records associated with this bucket. - * The records are not part of the bucket document. They will - * only be present when the bucket was retrieved and expanded - * to contain the associated records. - * - * @return the anomaly records for the bucket IF the bucket was expanded. - */ - public List getRecords() { - return records; - } - - void setRecords(List records) { - this.records = Collections.unmodifiableList(records); - } - - /** - * The number of records (events) actually processed in this bucket. - */ - public long getEventCount() { - return eventCount; - } - - void setEventCount(long value) { - eventCount = value; - } - - public boolean isInterim() { - return isInterim; - } - - void setInterim(boolean interim) { - this.isInterim = interim; - } - - public long getProcessingTimeMs() { - return processingTimeMs; - } - - void setProcessingTimeMs(long timeMs) { - processingTimeMs = timeMs; - } - - public List getBucketInfluencers() { - return bucketInfluencers; - } - - void setBucketInfluencers(List bucketInfluencers) { - this.bucketInfluencers = Collections.unmodifiableList(bucketInfluencers); - } - - public List getScheduledEvents() { - return scheduledEvents; - } - - void setScheduledEvents(List scheduledEvents) { - this.scheduledEvents = Collections.unmodifiableList(scheduledEvents); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - timestamp, - eventCount, - initialAnomalyScore, - anomalyScore, - records, - isInterim, - bucketSpan, - bucketInfluencers, - processingTimeMs, - scheduledEvents - ); - } - - /** - * Compare all the fields and embedded anomaly records (if any) - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - Bucket that = (Bucket) other; - - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.timestamp, that.timestamp) - && (this.eventCount == that.eventCount) - && (this.bucketSpan == that.bucketSpan) - && (this.anomalyScore == that.anomalyScore) - && (this.initialAnomalyScore == that.initialAnomalyScore) - && Objects.equals(this.records, that.records) - && Objects.equals(this.isInterim, that.isInterim) - && Objects.equals(this.bucketInfluencers, that.bucketInfluencers) - && (this.processingTimeMs == that.processingTimeMs) - && Objects.equals(this.scheduledEvents, that.scheduledEvents); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java deleted file mode 100644 index 62df14ce4e817..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/BucketInfluencer.java +++ /dev/null @@ -1,196 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -public class BucketInfluencer implements ToXContentObject { - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "bucket_influencer"; - public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); - - /** - * Field names - */ - public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name"); - public static final ParseField INITIAL_ANOMALY_SCORE = new ParseField("initial_anomaly_score"); - public static final ParseField ANOMALY_SCORE = new ParseField("anomaly_score"); - public static final ParseField RAW_ANOMALY_SCORE = new ParseField("raw_anomaly_score"); - public static final ParseField PROBABILITY = new ParseField("probability"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_FIELD.getPreferredName(), - true, - a -> new BucketInfluencer((String) a[0], (Date) a[1], (long) a[2]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareString((bucketInfluencer, s) -> {}, Result.RESULT_TYPE); - PARSER.declareString(BucketInfluencer::setInfluencerFieldName, INFLUENCER_FIELD_NAME); - PARSER.declareDouble(BucketInfluencer::setInitialAnomalyScore, INITIAL_ANOMALY_SCORE); - PARSER.declareDouble(BucketInfluencer::setAnomalyScore, ANOMALY_SCORE); - PARSER.declareDouble(BucketInfluencer::setRawAnomalyScore, RAW_ANOMALY_SCORE); - PARSER.declareDouble(BucketInfluencer::setProbability, PROBABILITY); - PARSER.declareBoolean(BucketInfluencer::setIsInterim, Result.IS_INTERIM); - } - - private final String jobId; - private String influenceField; - private double initialAnomalyScore; - private double anomalyScore; - private double rawAnomalyScore; - private double probability; - private boolean isInterim; - private final Date timestamp; - private final long bucketSpan; - - BucketInfluencer(String jobId, Date timestamp, long bucketSpan) { - this.jobId = jobId; - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - if (influenceField != null) { - builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField); - } - builder.field(INITIAL_ANOMALY_SCORE.getPreferredName(), initialAnomalyScore); - builder.field(ANOMALY_SCORE.getPreferredName(), anomalyScore); - builder.field(RAW_ANOMALY_SCORE.getPreferredName(), rawAnomalyScore); - builder.field(PROBABILITY.getPreferredName(), probability); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public double getProbability() { - return probability; - } - - void setProbability(double probability) { - this.probability = probability; - } - - public String getInfluencerFieldName() { - return influenceField; - } - - void setInfluencerFieldName(String fieldName) { - this.influenceField = fieldName; - } - - public double getInitialAnomalyScore() { - return initialAnomalyScore; - } - - void setInitialAnomalyScore(double influenceScore) { - this.initialAnomalyScore = influenceScore; - } - - public double getAnomalyScore() { - return anomalyScore; - } - - void setAnomalyScore(double score) { - anomalyScore = score; - } - - public double getRawAnomalyScore() { - return rawAnomalyScore; - } - - void setRawAnomalyScore(double score) { - rawAnomalyScore = score; - } - - void setIsInterim(boolean isInterim) { - this.isInterim = isInterim; - } - - public boolean isInterim() { - return isInterim; - } - - public Date getTimestamp() { - return timestamp; - } - - @Override - public int hashCode() { - return Objects.hash( - influenceField, - initialAnomalyScore, - anomalyScore, - rawAnomalyScore, - probability, - isInterim, - timestamp, - jobId, - bucketSpan - ); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null) { - return false; - } - - if (getClass() != obj.getClass()) { - return false; - } - - BucketInfluencer other = (BucketInfluencer) obj; - - return Objects.equals(influenceField, other.influenceField) - && Double.compare(initialAnomalyScore, other.initialAnomalyScore) == 0 - && Double.compare(anomalyScore, other.anomalyScore) == 0 - && Double.compare(rawAnomalyScore, other.rawAnomalyScore) == 0 - && Double.compare(probability, other.probability) == 0 - && Objects.equals(isInterim, other.isInterim) - && Objects.equals(timestamp, other.timestamp) - && Objects.equals(jobId, other.jobId) - && bucketSpan == other.bucketSpan; - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/CategoryDefinition.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/CategoryDefinition.java deleted file mode 100644 index 4b204d7279c38..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/CategoryDefinition.java +++ /dev/null @@ -1,232 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; -import java.util.Objects; -import java.util.Set; -import java.util.TreeSet; - -public class CategoryDefinition implements ToXContentObject { - - public static final ParseField TYPE = new ParseField("category_definition"); - - public static final ParseField CATEGORY_ID = new ParseField("category_id"); - public static final ParseField PARTITION_FIELD_NAME = new ParseField("partition_field_name"); - public static final ParseField PARTITION_FIELD_VALUE = new ParseField("partition_field_value"); - public static final ParseField TERMS = new ParseField("terms"); - public static final ParseField REGEX = new ParseField("regex"); - public static final ParseField MAX_MATCHING_LENGTH = new ParseField("max_matching_length"); - public static final ParseField EXAMPLES = new ParseField("examples"); - public static final ParseField GROK_PATTERN = new ParseField("grok_pattern"); - public static final ParseField NUM_MATCHES = new ParseField("num_matches"); - public static final ParseField PREFERRED_TO_CATEGORIES = new ParseField("preferred_to_categories"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("categories"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - TYPE.getPreferredName(), - true, - a -> new CategoryDefinition((String) a[0]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareLong(CategoryDefinition::setCategoryId, CATEGORY_ID); - PARSER.declareString(CategoryDefinition::setPartitionFieldName, PARTITION_FIELD_NAME); - PARSER.declareString(CategoryDefinition::setPartitionFieldValue, PARTITION_FIELD_VALUE); - PARSER.declareString(CategoryDefinition::setTerms, TERMS); - PARSER.declareString(CategoryDefinition::setRegex, REGEX); - PARSER.declareLong(CategoryDefinition::setMaxMatchingLength, MAX_MATCHING_LENGTH); - PARSER.declareStringArray(CategoryDefinition::setExamples, EXAMPLES); - PARSER.declareString(CategoryDefinition::setGrokPattern, GROK_PATTERN); - PARSER.declareLong(CategoryDefinition::setNumMatches, NUM_MATCHES); - PARSER.declareLongArray(CategoryDefinition::setPreferredToCategories, PREFERRED_TO_CATEGORIES); - } - - private final String jobId; - private long categoryId = 0L; - private String partitionFieldName; - private String partitionFieldValue; - private String terms = ""; - private String regex = ""; - private long maxMatchingLength = 0L; - private final Set examples = new TreeSet<>(); - private String grokPattern; - private long numMatches = 0L; - private List preferredToCategories; - - CategoryDefinition(String jobId) { - this.jobId = jobId; - } - - public String getJobId() { - return jobId; - } - - public long getCategoryId() { - return categoryId; - } - - void setCategoryId(long categoryId) { - this.categoryId = categoryId; - } - - public String getPartitionFieldName() { - return partitionFieldName; - } - - public void setPartitionFieldName(String partitionFieldName) { - this.partitionFieldName = partitionFieldName; - } - - public String getPartitionFieldValue() { - return partitionFieldValue; - } - - public void setPartitionFieldValue(String partitionFieldValue) { - this.partitionFieldValue = partitionFieldValue; - } - - public String getTerms() { - return terms; - } - - void setTerms(String terms) { - this.terms = terms; - } - - public String getRegex() { - return regex; - } - - void setRegex(String regex) { - this.regex = regex; - } - - public long getMaxMatchingLength() { - return maxMatchingLength; - } - - void setMaxMatchingLength(long maxMatchingLength) { - this.maxMatchingLength = maxMatchingLength; - } - - public List getExamples() { - return new ArrayList<>(examples); - } - - void setExamples(Collection examples) { - this.examples.clear(); - this.examples.addAll(examples); - } - - void addExample(String example) { - examples.add(example); - } - - public String getGrokPattern() { - return grokPattern; - } - - void setGrokPattern(String grokPattern) { - this.grokPattern = grokPattern; - } - - public long getNumMatches() { - return numMatches; - } - - public void setNumMatches(long numMatches) { - this.numMatches = numMatches; - } - - public List getPreferredToCategories() { - return preferredToCategories; - } - - public void setPreferredToCategories(List preferredToCategories) { - this.preferredToCategories = Collections.unmodifiableList(preferredToCategories); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(CATEGORY_ID.getPreferredName(), categoryId); - if (partitionFieldName != null) { - builder.field(PARTITION_FIELD_NAME.getPreferredName(), partitionFieldName); - } - if (partitionFieldValue != null) { - builder.field(PARTITION_FIELD_VALUE.getPreferredName(), partitionFieldValue); - } - builder.field(TERMS.getPreferredName(), terms); - builder.field(REGEX.getPreferredName(), regex); - builder.field(MAX_MATCHING_LENGTH.getPreferredName(), maxMatchingLength); - builder.field(EXAMPLES.getPreferredName(), examples); - if (grokPattern != null) { - builder.field(GROK_PATTERN.getPreferredName(), grokPattern); - } - builder.field(NUM_MATCHES.getPreferredName(), numMatches); - if (preferredToCategories != null && (preferredToCategories.isEmpty() == false)) { - builder.field(PREFERRED_TO_CATEGORIES.getPreferredName(), preferredToCategories); - } - builder.endObject(); - return builder; - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - if (other == null || getClass() != other.getClass()) { - return false; - } - CategoryDefinition that = (CategoryDefinition) other; - return Objects.equals(this.jobId, that.jobId) - && Objects.equals(this.categoryId, that.categoryId) - && Objects.equals(this.partitionFieldName, that.partitionFieldName) - && Objects.equals(this.partitionFieldValue, that.partitionFieldValue) - && Objects.equals(this.terms, that.terms) - && Objects.equals(this.regex, that.regex) - && Objects.equals(this.maxMatchingLength, that.maxMatchingLength) - && Objects.equals(this.examples, that.examples) - && Objects.equals(this.preferredToCategories, that.preferredToCategories) - && Objects.equals(this.numMatches, that.numMatches) - && Objects.equals(this.grokPattern, that.grokPattern); - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - categoryId, - partitionFieldName, - partitionFieldValue, - terms, - regex, - maxMatchingLength, - examples, - preferredToCategories, - numMatches, - grokPattern - ); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influence.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influence.java deleted file mode 100644 index 0969b5983c75e..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influence.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.List; -import java.util.Objects; - -/** - * Influence field name and list of influence field values/score pairs - */ -public class Influence implements ToXContentObject { - - /** - * Note all X-Content serialized field names are "influencer" not "influence" - */ - public static final ParseField INFLUENCER = new ParseField("influencer"); - public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name"); - public static final ParseField INFLUENCER_FIELD_VALUES = new ParseField("influencer_field_values"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - INFLUENCER.getPreferredName(), - true, - a -> new Influence((String) a[0], (List) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME); - PARSER.declareStringArray(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUES); - } - - private String field; - private List fieldValues; - - Influence(String field, List fieldValues) { - this.field = field; - this.fieldValues = Collections.unmodifiableList(fieldValues); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), field); - builder.field(INFLUENCER_FIELD_VALUES.getPreferredName(), fieldValues); - builder.endObject(); - return builder; - } - - public String getInfluencerFieldName() { - return field; - } - - public List getInfluencerFieldValues() { - return fieldValues; - } - - @Override - public int hashCode() { - return Objects.hash(field, fieldValues); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null) { - return false; - } - - if (getClass() != obj.getClass()) { - return false; - } - - Influence other = (Influence) obj; - return Objects.equals(field, other.field) && Objects.equals(fieldValues, other.fieldValues); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java deleted file mode 100644 index 46c7516b9853a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Influencer.java +++ /dev/null @@ -1,190 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser.ValueType; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Date; -import java.util.Objects; - -public class Influencer implements ToXContentObject { - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "influencer"; - public static final ParseField RESULT_TYPE_FIELD = new ParseField(RESULT_TYPE_VALUE); - - /* - * Field names - */ - public static final ParseField PROBABILITY = new ParseField("probability"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField INFLUENCER_FIELD_NAME = new ParseField("influencer_field_name"); - public static final ParseField INFLUENCER_FIELD_VALUE = new ParseField("influencer_field_value"); - public static final ParseField INITIAL_INFLUENCER_SCORE = new ParseField("initial_influencer_score"); - public static final ParseField INFLUENCER_SCORE = new ParseField("influencer_score"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("influencers"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_FIELD.getPreferredName(), - true, - a -> new Influencer((String) a[0], (String) a[1], (String) a[2], (Date) a[3], (long) a[4]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_NAME); - PARSER.declareString(ConstructingObjectParser.constructorArg(), INFLUENCER_FIELD_VALUE); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareString((influencer, s) -> {}, Result.RESULT_TYPE); - PARSER.declareDouble(Influencer::setProbability, PROBABILITY); - PARSER.declareDouble(Influencer::setInfluencerScore, INFLUENCER_SCORE); - PARSER.declareDouble(Influencer::setInitialInfluencerScore, INITIAL_INFLUENCER_SCORE); - PARSER.declareBoolean(Influencer::setInterim, Result.IS_INTERIM); - } - - private final String jobId; - private final Date timestamp; - private final long bucketSpan; - private String influenceField; - private String influenceValue; - private double probability; - private double initialInfluencerScore; - private double influencerScore; - private boolean isInterim; - - Influencer(String jobId, String fieldName, String fieldValue, Date timestamp, long bucketSpan) { - this.jobId = jobId; - influenceField = fieldName; - influenceValue = fieldValue; - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.field(INFLUENCER_FIELD_NAME.getPreferredName(), influenceField); - builder.field(INFLUENCER_FIELD_VALUE.getPreferredName(), influenceValue); - builder.field(INFLUENCER_SCORE.getPreferredName(), influencerScore); - builder.field(INITIAL_INFLUENCER_SCORE.getPreferredName(), initialInfluencerScore); - builder.field(PROBABILITY.getPreferredName(), probability); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - builder.endObject(); - return builder; - } - - public String getJobId() { - return jobId; - } - - public double getProbability() { - return probability; - } - - void setProbability(double probability) { - this.probability = probability; - } - - public Date getTimestamp() { - return timestamp; - } - - public String getInfluencerFieldName() { - return influenceField; - } - - public String getInfluencerFieldValue() { - return influenceValue; - } - - public double getInitialInfluencerScore() { - return initialInfluencerScore; - } - - void setInitialInfluencerScore(double score) { - initialInfluencerScore = score; - } - - public double getInfluencerScore() { - return influencerScore; - } - - void setInfluencerScore(double score) { - influencerScore = score; - } - - public boolean isInterim() { - return isInterim; - } - - void setInterim(boolean value) { - isInterim = value; - } - - @Override - public int hashCode() { - return Objects.hash( - jobId, - timestamp, - influenceField, - influenceValue, - initialInfluencerScore, - influencerScore, - probability, - isInterim, - bucketSpan - ); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null) { - return false; - } - - if (getClass() != obj.getClass()) { - return false; - } - - Influencer other = (Influencer) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(timestamp, other.timestamp) - && Objects.equals(influenceField, other.influenceField) - && Objects.equals(influenceValue, other.influenceValue) - && Double.compare(initialInfluencerScore, other.initialInfluencerScore) == 0 - && Double.compare(influencerScore, other.influencerScore) == 0 - && Double.compare(probability, other.probability) == 0 - && (isInterim == other.isInterim) - && (bucketSpan == other.bucketSpan); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java deleted file mode 100644 index 9a6bb40682e6f..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/OverallBucket.java +++ /dev/null @@ -1,206 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.client.common.TimeUtil; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.Date; -import java.util.List; -import java.util.Objects; - -/** - * Overall Bucket Result POJO - */ -public class OverallBucket implements ToXContentObject { - - public static final ParseField OVERALL_SCORE = new ParseField("overall_score"); - public static final ParseField BUCKET_SPAN = new ParseField("bucket_span"); - public static final ParseField JOBS = new ParseField("jobs"); - - // Used for QueryPage - public static final ParseField RESULTS_FIELD = new ParseField("overall_buckets"); - - /** - * Result type - */ - public static final String RESULT_TYPE_VALUE = "overall_bucket"; - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - RESULT_TYPE_VALUE, - true, - a -> new OverallBucket((Date) a[0], (long) a[1], (double) a[2], (boolean) a[3]) - ); - - static { - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> TimeUtil.parseTimeField(p, Result.TIMESTAMP.getPreferredName()), - Result.TIMESTAMP, - ObjectParser.ValueType.VALUE - ); - PARSER.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), OVERALL_SCORE); - PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), Result.IS_INTERIM); - PARSER.declareObjectArray(OverallBucket::setJobs, JobInfo.PARSER, JOBS); - } - - private final Date timestamp; - private final long bucketSpan; - private final double overallScore; - private final boolean isInterim; - private List jobs = Collections.emptyList(); - - OverallBucket(Date timestamp, long bucketSpan, double overallScore, boolean isInterim) { - this.timestamp = Objects.requireNonNull(timestamp); - this.bucketSpan = bucketSpan; - this.overallScore = overallScore; - this.isInterim = isInterim; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.timeField(Result.TIMESTAMP.getPreferredName(), Result.TIMESTAMP.getPreferredName() + "_string", timestamp.getTime()); - builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan); - builder.field(OVERALL_SCORE.getPreferredName(), overallScore); - builder.field(JOBS.getPreferredName(), jobs); - builder.field(Result.IS_INTERIM.getPreferredName(), isInterim); - builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE); - builder.endObject(); - return builder; - } - - public Date getTimestamp() { - return timestamp; - } - - /** - * Bucketspan expressed in seconds - */ - public long getBucketSpan() { - return bucketSpan; - } - - public double getOverallScore() { - return overallScore; - } - - public List getJobs() { - return jobs; - } - - void setJobs(List jobs) { - this.jobs = Collections.unmodifiableList(jobs); - } - - public boolean isInterim() { - return isInterim; - } - - @Override - public int hashCode() { - return Objects.hash(timestamp, bucketSpan, overallScore, jobs, isInterim); - } - - /** - * Compare all the fields and embedded anomaly records (if any) - */ - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - - if (other == null || getClass() != other.getClass()) { - return false; - } - - OverallBucket that = (OverallBucket) other; - - return Objects.equals(this.timestamp, that.timestamp) - && this.bucketSpan == that.bucketSpan - && this.overallScore == that.overallScore - && Objects.equals(this.jobs, that.jobs) - && this.isInterim == that.isInterim; - } - - public static class JobInfo implements ToXContentObject, Comparable { - - private static final ParseField MAX_ANOMALY_SCORE = new ParseField("max_anomaly_score"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "job_info", - true, - a -> new JobInfo((String) a[0], (double) a[1]) - ); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX_ANOMALY_SCORE); - } - - private final String jobId; - private final double maxAnomalyScore; - - JobInfo(String jobId, double maxAnomalyScore) { - this.jobId = Objects.requireNonNull(jobId); - this.maxAnomalyScore = maxAnomalyScore; - } - - public String getJobId() { - return jobId; - } - - public double getMaxAnomalyScore() { - return maxAnomalyScore; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(MAX_ANOMALY_SCORE.getPreferredName(), maxAnomalyScore); - builder.endObject(); - return builder; - } - - @Override - public int hashCode() { - return Objects.hash(jobId, maxAnomalyScore); - } - - @Override - public boolean equals(Object other) { - if (this == other) { - return true; - } - if (other == null || getClass() != other.getClass()) { - return false; - } - JobInfo that = (JobInfo) other; - return Objects.equals(this.jobId, that.jobId) && this.maxAnomalyScore == that.maxAnomalyScore; - } - - @Override - public int compareTo(JobInfo other) { - int result = this.jobId.compareTo(other.jobId); - if (result == 0) { - result = Double.compare(this.maxAnomalyScore, other.maxAnomalyScore); - } - return result; - } - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java deleted file mode 100644 index 6f5408bb2ae0a..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/results/Result.java +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.results; - -import org.elasticsearch.xcontent.ParseField; - -/** - * Contains common attributes for results. - */ -public final class Result { - - /** - * Serialisation fields - */ - public static final ParseField RESULT_TYPE = new ParseField("result_type"); - public static final ParseField TIMESTAMP = new ParseField("timestamp"); - public static final ParseField IS_INTERIM = new ParseField("is_interim"); - - private Result() {} -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java deleted file mode 100644 index 796cb18f3eb2d..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/ForecastStats.java +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.stats; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * A class to hold statistics about forecasts. - */ -public class ForecastStats implements ToXContentObject { - - public static final ParseField TOTAL = new ParseField("total"); - public static final ParseField FORECASTED_JOBS = new ParseField("forecasted_jobs"); - public static final ParseField MEMORY_BYTES = new ParseField("memory_bytes"); - public static final ParseField PROCESSING_TIME_MS = new ParseField("processing_time_ms"); - public static final ParseField RECORDS = new ParseField("records"); - public static final ParseField STATUS = new ParseField("status"); - - @SuppressWarnings("unchecked") - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "forecast_stats", - true, - (a) -> { - int i = 0; - long total = (long) a[i++]; - SimpleStats memoryStats = (SimpleStats) a[i++]; - SimpleStats recordStats = (SimpleStats) a[i++]; - SimpleStats runtimeStats = (SimpleStats) a[i++]; - Map statusCounts = (Map) a[i]; - return new ForecastStats(total, memoryStats, recordStats, runtimeStats, statusCounts); - } - ); - - static { - PARSER.declareLong(ConstructingObjectParser.constructorArg(), TOTAL); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, MEMORY_BYTES); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, RECORDS); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), SimpleStats.PARSER, PROCESSING_TIME_MS); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - Map counts = new HashMap<>(); - p.map().forEach((key, value) -> counts.put(key, ((Number) value).longValue())); - return counts; - }, STATUS, ObjectParser.ValueType.OBJECT); - } - - private final long total; - private final long forecastedJobs; - private SimpleStats memoryStats; - private SimpleStats recordStats; - private SimpleStats runtimeStats; - private Map statusCounts; - - public ForecastStats( - long total, - SimpleStats memoryStats, - SimpleStats recordStats, - SimpleStats runtimeStats, - Map statusCounts - ) { - this.total = total; - this.forecastedJobs = total > 0 ? 1 : 0; - if (total > 0) { - this.memoryStats = Objects.requireNonNull(memoryStats); - this.recordStats = Objects.requireNonNull(recordStats); - this.runtimeStats = Objects.requireNonNull(runtimeStats); - this.statusCounts = Collections.unmodifiableMap(statusCounts); - } - } - - /** - * The number of forecasts currently available for this model. - */ - public long getTotal() { - return total; - } - - /** - * The number of jobs that have at least one forecast. - */ - public long getForecastedJobs() { - return forecastedJobs; - } - - /** - * Statistics about the memory usage: minimum, maximum, average and total. - */ - public SimpleStats getMemoryStats() { - return memoryStats; - } - - /** - * Statistics about the number of forecast records: minimum, maximum, average and total. - */ - public SimpleStats getRecordStats() { - return recordStats; - } - - /** - * Statistics about the forecast runtime in milliseconds: minimum, maximum, average and total - */ - public SimpleStats getRuntimeStats() { - return runtimeStats; - } - - /** - * Counts per forecast status, for example: {"finished" : 2}. - */ - public Map getStatusCounts() { - return statusCounts; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(TOTAL.getPreferredName(), total); - builder.field(FORECASTED_JOBS.getPreferredName(), forecastedJobs); - - if (total > 0) { - builder.field(MEMORY_BYTES.getPreferredName(), memoryStats); - builder.field(RECORDS.getPreferredName(), recordStats); - builder.field(PROCESSING_TIME_MS.getPreferredName(), runtimeStats); - builder.field(STATUS.getPreferredName(), statusCounts); - } - return builder.endObject(); - } - - @Override - public int hashCode() { - return Objects.hash(total, forecastedJobs, memoryStats, recordStats, runtimeStats, statusCounts); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - ForecastStats other = (ForecastStats) obj; - return Objects.equals(total, other.total) - && Objects.equals(forecastedJobs, other.forecastedJobs) - && Objects.equals(memoryStats, other.memoryStats) - && Objects.equals(recordStats, other.recordStats) - && Objects.equals(runtimeStats, other.runtimeStats) - && Objects.equals(statusCounts, other.statusCounts); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java deleted file mode 100644 index abf2a278ba763..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/JobStats.java +++ /dev/null @@ -1,229 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.stats; - -import org.elasticsearch.client.ml.NodeAttributes; -import org.elasticsearch.client.ml.job.config.Job; -import org.elasticsearch.client.ml.job.config.JobState; -import org.elasticsearch.client.ml.job.process.DataCounts; -import org.elasticsearch.client.ml.job.process.ModelSizeStats; -import org.elasticsearch.client.ml.job.process.TimingStats; -import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Class containing the statistics for a Machine Learning job. - * - */ -public class JobStats implements ToXContentObject { - - private static final ParseField DATA_COUNTS = new ParseField("data_counts"); - private static final ParseField MODEL_SIZE_STATS = new ParseField("model_size_stats"); - private static final ParseField TIMING_STATS = new ParseField("timing_stats"); - private static final ParseField FORECASTS_STATS = new ParseField("forecasts_stats"); - private static final ParseField STATE = new ParseField("state"); - private static final ParseField NODE = new ParseField("node"); - private static final ParseField OPEN_TIME = new ParseField("open_time"); - private static final ParseField ASSIGNMENT_EXPLANATION = new ParseField("assignment_explanation"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("job_stats", true, (a) -> { - int i = 0; - String jobId = (String) a[i++]; - DataCounts dataCounts = (DataCounts) a[i++]; - JobState jobState = (JobState) a[i++]; - ModelSizeStats.Builder modelSizeStatsBuilder = (ModelSizeStats.Builder) a[i++]; - ModelSizeStats modelSizeStats = modelSizeStatsBuilder == null ? null : modelSizeStatsBuilder.build(); - TimingStats timingStats = (TimingStats) a[i++]; - ForecastStats forecastStats = (ForecastStats) a[i++]; - NodeAttributes node = (NodeAttributes) a[i++]; - String assignmentExplanation = (String) a[i++]; - TimeValue openTime = (TimeValue) a[i]; - return new JobStats(jobId, dataCounts, jobState, modelSizeStats, timingStats, forecastStats, node, assignmentExplanation, openTime); - }); - - static { - PARSER.declareString(ConstructingObjectParser.constructorArg(), Job.ID); - PARSER.declareObject(ConstructingObjectParser.constructorArg(), DataCounts.PARSER, DATA_COUNTS); - PARSER.declareField( - ConstructingObjectParser.constructorArg(), - (p) -> JobState.fromString(p.text()), - STATE, - ObjectParser.ValueType.VALUE - ); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ModelSizeStats.PARSER, MODEL_SIZE_STATS); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), TimingStats.PARSER, TIMING_STATS); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ForecastStats.PARSER, FORECASTS_STATS); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), NodeAttributes.PARSER, NODE); - PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), ASSIGNMENT_EXPLANATION); - PARSER.declareField( - ConstructingObjectParser.optionalConstructorArg(), - (p, c) -> TimeValue.parseTimeValue(p.textOrNull(), OPEN_TIME.getPreferredName()), - OPEN_TIME, - ObjectParser.ValueType.STRING_OR_NULL - ); - } - - private final String jobId; - private final DataCounts dataCounts; - private final JobState state; - private final ModelSizeStats modelSizeStats; - private final TimingStats timingStats; - private final ForecastStats forecastStats; - private final NodeAttributes node; - private final String assignmentExplanation; - private final TimeValue openTime; - - JobStats( - String jobId, - DataCounts dataCounts, - JobState state, - @Nullable ModelSizeStats modelSizeStats, - @Nullable TimingStats timingStats, - @Nullable ForecastStats forecastStats, - @Nullable NodeAttributes node, - @Nullable String assignmentExplanation, - @Nullable TimeValue openTime - ) { - this.jobId = Objects.requireNonNull(jobId); - this.dataCounts = Objects.requireNonNull(dataCounts); - this.state = Objects.requireNonNull(state); - this.modelSizeStats = modelSizeStats; - this.timingStats = timingStats; - this.forecastStats = forecastStats; - this.node = node; - this.assignmentExplanation = assignmentExplanation; - this.openTime = openTime; - } - - /** - * The jobId referencing the job for these statistics - */ - public String getJobId() { - return jobId; - } - - /** - * An object that describes the number of records processed and any related error counts - * See {@link DataCounts} - */ - public DataCounts getDataCounts() { - return dataCounts; - } - - /** - * An object that provides information about the size and contents of the model. - * See {@link ModelSizeStats} - */ - public ModelSizeStats getModelSizeStats() { - return modelSizeStats; - } - - public TimingStats getTimingStats() { - return timingStats; - } - - /** - * An object that provides statistical information about forecasts of this job. - * See {@link ForecastStats} - */ - public ForecastStats getForecastStats() { - return forecastStats; - } - - /** - * The status of the job - * See {@link JobState} - */ - public JobState getState() { - return state; - } - - /** - * For open jobs only, contains information about the node where the job runs - * See {@link NodeAttributes} - */ - public NodeAttributes getNode() { - return node; - } - - /** - * For open jobs only, contains messages relating to the selection of a node to run the job. - */ - public String getAssignmentExplanation() { - return assignmentExplanation; - } - - /** - * For open jobs only, the elapsed time for which the job has been open - */ - public TimeValue getOpenTime() { - return openTime; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Job.ID.getPreferredName(), jobId); - builder.field(DATA_COUNTS.getPreferredName(), dataCounts); - builder.field(STATE.getPreferredName(), state.toString()); - if (modelSizeStats != null) { - builder.field(MODEL_SIZE_STATS.getPreferredName(), modelSizeStats); - } - if (timingStats != null) { - builder.field(TIMING_STATS.getPreferredName(), timingStats); - } - if (forecastStats != null) { - builder.field(FORECASTS_STATS.getPreferredName(), forecastStats); - } - if (node != null) { - builder.field(NODE.getPreferredName(), node); - } - if (assignmentExplanation != null) { - builder.field(ASSIGNMENT_EXPLANATION.getPreferredName(), assignmentExplanation); - } - if (openTime != null) { - builder.field(OPEN_TIME.getPreferredName(), openTime.getStringRep()); - } - return builder.endObject(); - } - - @Override - public int hashCode() { - return Objects.hash(jobId, dataCounts, modelSizeStats, timingStats, forecastStats, state, node, assignmentExplanation, openTime); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - JobStats other = (JobStats) obj; - return Objects.equals(jobId, other.jobId) - && Objects.equals(this.dataCounts, other.dataCounts) - && Objects.equals(this.modelSizeStats, other.modelSizeStats) - && Objects.equals(this.timingStats, other.timingStats) - && Objects.equals(this.forecastStats, other.forecastStats) - && Objects.equals(this.state, other.state) - && Objects.equals(this.node, other.node) - && Objects.equals(this.assignmentExplanation, other.assignmentExplanation) - && Objects.equals(this.openTime, other.openTime); - } -} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java deleted file mode 100644 index 01050d93b1a91..0000000000000 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/job/stats/SimpleStats.java +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -package org.elasticsearch.client.ml.job.stats; - -import org.elasticsearch.xcontent.ConstructingObjectParser; -import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.Objects; - -/** - * Helper class for min, max, avg and total statistics for a quantity - */ -public class SimpleStats implements ToXContentObject { - - public static final ParseField MIN = new ParseField("min"); - public static final ParseField MAX = new ParseField("max"); - public static final ParseField AVG = new ParseField("avg"); - public static final ParseField TOTAL = new ParseField("total"); - - public static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("simple_stats", true, (a) -> { - int i = 0; - double total = (double) a[i++]; - double min = (double) a[i++]; - double max = (double) a[i++]; - double avg = (double) a[i++]; - return new SimpleStats(total, min, max, avg); - }); - - static { - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), TOTAL); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MIN); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), MAX); - PARSER.declareDouble(ConstructingObjectParser.constructorArg(), AVG); - } - - private final double total; - private final double min; - private final double max; - private final double avg; - - SimpleStats(double total, double min, double max, double avg) { - this.total = total; - this.min = min; - this.max = max; - this.avg = avg; - } - - public double getMin() { - return min; - } - - public double getMax() { - return max; - } - - public double getAvg() { - return avg; - } - - public double getTotal() { - return total; - } - - @Override - public int hashCode() { - return Objects.hash(total, min, max, avg); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - - if (obj == null || getClass() != obj.getClass()) { - return false; - } - - SimpleStats other = (SimpleStats) obj; - return Objects.equals(total, other.total) - && Objects.equals(min, other.min) - && Objects.equals(avg, other.avg) - && Objects.equals(max, other.max); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(MIN.getPreferredName(), min); - builder.field(MAX.getPreferredName(), max); - builder.field(AVG.getPreferredName(), avg); - builder.field(TOTAL.getPreferredName(), total); - builder.endObject(); - return builder; - } -} diff --git a/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider b/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider index 9426b3d1bdde7..20b144ef1c562 100644 --- a/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider +++ b/client/rest-high-level/src/main/resources/META-INF/services/org.elasticsearch.plugins.spi.NamedXContentProvider @@ -1,6 +1,2 @@ org.elasticsearch.client.ilm.IndexLifecycleNamedXContentProvider -org.elasticsearch.client.ml.dataframe.MlDataFrameAnalysisNamedXContentProvider -org.elasticsearch.client.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider -org.elasticsearch.client.ml.dataframe.stats.AnalysisStatsNamedXContentProvider -org.elasticsearch.client.ml.inference.MlInferenceNamedXContentProvider org.elasticsearch.client.transform.TransformNamedXContentProvider From ac3d0beaf00e3b9b6f95fc889b6783e627609705 Mon Sep 17 00:00:00 2001 From: Benjamin Trent Date: Tue, 15 Feb 2022 07:06:47 -0500 Subject: [PATCH 07/37] [ML] refactoring internal tokenization logic for NLP (#83835) This simplifies the internal logic used to pass tokenization results around while streamlining building the request sent to the model. This helps lay some of the ground work for windowing as collapsing request building && token results will be required (as a single sequence could result in a batch request). Additionally, many of the intellij warnings are addressed and code is modernized (i.e. taking advantage of records) --- .../deployment/DeploymentManager.java | 19 +- .../ml/inference/nlp/BertRequestBuilder.java | 71 ------- .../ml/inference/nlp/FillMaskProcessor.java | 38 ++-- .../ml/inference/nlp/MPNetRequestBuilder.java | 66 ------- .../xpack/ml/inference/nlp/NerProcessor.java | 49 ++--- .../xpack/ml/inference/nlp/NlpTask.java | 79 ++------ .../inference/nlp/PassThroughProcessor.java | 12 +- .../nlp/TextClassificationProcessor.java | 10 +- .../inference/nlp/TextEmbeddingProcessor.java | 10 +- .../nlp/ZeroShotClassificationProcessor.java | 48 +---- .../tokenizers/BertTokenizationResult.java | 118 ++++++++++++ .../nlp/tokenizers/BertTokenizer.java | 127 +++--------- .../nlp/tokenizers/CharSeqTokenTrieNode.java | 6 +- .../tokenizers/MPNetTokenizationResult.java | 78 ++++++++ .../nlp/tokenizers/MPNetTokenizer.java | 44 ++--- .../nlp/tokenizers/NlpTokenizer.java | 12 +- .../nlp/tokenizers/TokenizationResult.java | 181 +++++++++++------- .../nlp/tokenizers/WordPieceTokenFilter.java | 22 +-- .../results/PyTorchInferenceResult.java | 8 +- ....java => BertTokenizationResultTests.java} | 15 +- .../inference/nlp/FillMaskProcessorTests.java | 24 ++- ...java => MPNetTokenizationResultTests.java} | 15 +- .../ml/inference/nlp/NerProcessorTests.java | 8 +- .../nlp/TextClassificationProcessorTests.java | 2 +- .../ZeroShotClassificationProcessorTests.java | 2 +- .../nlp/tokenizers/BertTokenizerTests.java | 122 ++++++------ .../nlp/tokenizers/MPNetTokenizerTests.java | 18 +- 27 files changed, 542 insertions(+), 662 deletions(-) delete mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java delete mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilder.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizationResult.java create mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizationResult.java rename x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/{BertRequestBuilderTests.java => BertTokenizationResultTests.java} (91%) rename x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/{MPNetRequestBuilderTests.java => MPNetTokenizationResultTests.java} (89%) diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java index 34e7f6d0740e0..a8f744d7181e7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java @@ -29,6 +29,7 @@ import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; @@ -201,7 +202,11 @@ Vocabulary parseVocabularyDocLeniently(SearchHit hit) throws IOException { try ( InputStream stream = hit.getSourceRef().streamInput(); XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, stream) + .createParser( + XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry) + .withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), + stream + ) ) { return Vocabulary.createParser(true).apply(parser, null); } catch (IOException e) { @@ -374,8 +379,8 @@ protected void doRun() throws Exception { NlpConfig nlpConfig = (NlpConfig) config; NlpTask.Request request = processor.getRequestBuilder(nlpConfig) .buildRequest(text, requestIdStr, nlpConfig.getTokenization().getTruncate()); - logger.debug(() -> "Inference Request " + request.processInput.utf8ToString()); - if (request.tokenization.anyTruncated()) { + logger.debug(() -> "Inference Request " + request.processInput().utf8ToString()); + if (request.tokenization().anyTruncated()) { logger.debug("[{}] [{}] input truncated", modelId, requestId); } processContext.getResultProcessor() @@ -385,14 +390,14 @@ protected void doRun() throws Exception { inferenceResult -> processResult( inferenceResult, processContext, - request.tokenization, + request.tokenization(), processor.getResultProcessor((NlpConfig) config), this ), this::onFailure ) ); - processContext.process.get().writeInferenceRequest(request.processInput); + processContext.process.get().writeInferenceRequest(request.processInput()); } catch (IOException e) { logger.error(new ParameterizedMessage("[{}] error writing to inference process", processContext.task.getModelId()), e); onFailure(ExceptionsHelper.serverError("Error writing to inference process", e)); @@ -448,8 +453,8 @@ class ProcessContext { private volatile Instant startTime; private volatile Integer inferenceThreads; private volatile Integer modelThreads; - private AtomicInteger rejectedExecutionCount = new AtomicInteger(); - private AtomicInteger timeoutCount = new AtomicInteger(); + private final AtomicInteger rejectedExecutionCount = new AtomicInteger(); + private final AtomicInteger timeoutCount = new AtomicInteger(); ProcessContext(TrainedModelDeploymentTask task, ExecutorService executorService) { this.task = Objects.requireNonNull(task); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java deleted file mode 100644 index 88a6b6b8739f4..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilder.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.nlp; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.NlpTokenizer; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; - -import java.io.IOException; -import java.util.List; -import java.util.stream.Collectors; - -public class BertRequestBuilder implements NlpTask.RequestBuilder { - - static final String REQUEST_ID = "request_id"; - static final String TOKENS = "tokens"; - static final String ARG1 = "arg_1"; - static final String ARG2 = "arg_2"; - static final String ARG3 = "arg_3"; - - private final NlpTokenizer tokenizer; - - public BertRequestBuilder(NlpTokenizer tokenizer) { - this.tokenizer = tokenizer; - } - - @Override - public NlpTask.Request buildRequest(List inputs, String requestId, Tokenization.Truncate truncate) throws IOException { - if (tokenizer.getPadTokenId().isEmpty()) { - throw new IllegalStateException("The input tokenizer does not have a " + tokenizer.getPadToken() + " token in its vocabulary"); - } - - TokenizationResult tokenization = tokenizer.buildTokenizationResult( - inputs.stream().map(s -> tokenizer.tokenize(s, truncate)).collect(Collectors.toList()) - ); - return buildRequest(tokenization, requestId); - } - - @Override - public NlpTask.Request buildRequest(TokenizationResult tokenization, String requestId) throws IOException { - if (tokenizer.getPadTokenId().isEmpty()) { - throw new IllegalStateException("The input tokenizer does not have a " + tokenizer.getPadToken() + " token in its vocabulary"); - } - return new NlpTask.Request(tokenization, jsonRequest(tokenization, tokenizer.getPadTokenId().getAsInt(), requestId)); - } - - static BytesReference jsonRequest(TokenizationResult tokenization, int padToken, String requestId) throws IOException { - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - builder.field(REQUEST_ID, requestId); - - NlpTask.RequestBuilder.writePaddedTokens(TOKENS, tokenization, padToken, (tokens, i) -> tokens.getTokenIds()[i], builder); - NlpTask.RequestBuilder.writePaddedTokens(ARG1, tokenization, padToken, (tokens, i) -> 1, builder); - int batchSize = tokenization.getTokenizations().size(); - NlpTask.RequestBuilder.writeNonPaddedArguments(ARG2, batchSize, tokenization.getLongestSequenceLength(), i -> 0, builder); - NlpTask.RequestBuilder.writeNonPaddedArguments(ARG3, batchSize, tokenization.getLongestSequenceLength(), i -> i, builder); - builder.endObject(); - - // BytesReference.bytes closes the builder - return BytesReference.bytes(builder); - } - -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java index db79d24e35821..2ce2fc1ea471d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessor.java @@ -23,20 +23,14 @@ import java.util.ArrayList; import java.util.List; import java.util.Optional; +import java.util.OptionalInt; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class FillMaskProcessor implements NlpTask.Processor { - - private final NlpTokenizer tokenizer; +public class FillMaskProcessor extends NlpTask.Processor { FillMaskProcessor(NlpTokenizer tokenizer, FillMaskConfig config) { - this.tokenizer = tokenizer; - } - - @Override - public void close() { - tokenizer.close(); + super(tokenizer); } @Override @@ -97,7 +91,7 @@ static InferenceResults processResult( int numResults, String resultsField ) { - if (tokenization.getTokenizations().isEmpty() || tokenization.getTokenizations().get(0).getTokenIds().length == 0) { + if (tokenization.isEmpty()) { throw new ElasticsearchStatusException("tokenization is empty", RestStatus.INTERNAL_SERVER_ERROR); } @@ -108,25 +102,20 @@ static InferenceResults processResult( ); } - int maskTokenIndex = -1; int maskTokenId = tokenizer.getMaskTokenId().getAsInt(); - for (int i = 0; i < tokenization.getTokenizations().get(0).getTokenIds().length; i++) { - if (tokenization.getTokenizations().get(0).getTokenIds()[i] == maskTokenId) { - maskTokenIndex = i; - break; - } - } - if (maskTokenIndex == -1) { + OptionalInt maskTokenIndex = tokenization.getTokenization(0).getTokenIndex(maskTokenId); + if (maskTokenIndex.isEmpty()) { throw new ElasticsearchStatusException( - "mask token id [{}] not found in the tokenization {}", + "mask token id [{}] not found in the tokenization", RestStatus.INTERNAL_SERVER_ERROR, - maskTokenId, - List.of(tokenization.getTokenizations().get(0).getTokenIds()) + maskTokenId ); } // TODO - process all results in the batch - double[] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax(pyTorchResult.getInferenceResult()[0][maskTokenIndex]); + double[] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax( + pyTorchResult.getInferenceResult()[0][maskTokenIndex.getAsInt()] + ); NlpHelpers.ScoreAndIndex[] scoreAndIndices = NlpHelpers.topK( // We need at least one to record the result @@ -142,10 +131,7 @@ static InferenceResults processResult( } return new FillMaskResults( tokenization.getFromVocab(scoreAndIndices[0].index), - tokenization.getTokenizations() - .get(0) - .getInput() - .replace(tokenizer.getMaskToken(), tokenization.getFromVocab(scoreAndIndices[0].index)), + tokenization.getTokenization(0).input().replace(tokenizer.getMaskToken(), tokenization.getFromVocab(scoreAndIndices[0].index)), results, Optional.ofNullable(resultsField).orElse(DEFAULT_RESULTS_FIELD), scoreAndIndices[0].score, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilder.java deleted file mode 100644 index f8ea5a513aa76..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilder.java +++ /dev/null @@ -1,66 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.nlp; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentFactory; -import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.NlpTokenizer; -import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; - -import java.io.IOException; -import java.util.List; -import java.util.stream.Collectors; - -public class MPNetRequestBuilder implements NlpTask.RequestBuilder { - - static final String REQUEST_ID = "request_id"; - static final String TOKENS = "tokens"; - static final String ARG1 = "arg_1"; - - private final NlpTokenizer tokenizer; - - public MPNetRequestBuilder(NlpTokenizer tokenizer) { - this.tokenizer = tokenizer; - } - - @Override - public NlpTask.Request buildRequest(List inputs, String requestId, Tokenization.Truncate truncate) throws IOException { - if (tokenizer.getPadTokenId().isEmpty()) { - throw new IllegalStateException("The input tokenizer does not have a " + tokenizer.getPadToken() + " token in its vocabulary"); - } - - TokenizationResult tokenization = tokenizer.buildTokenizationResult( - inputs.stream().map(s -> tokenizer.tokenize(s, truncate)).collect(Collectors.toList()) - ); - return buildRequest(tokenization, requestId); - } - - @Override - public NlpTask.Request buildRequest(TokenizationResult tokenization, String requestId) throws IOException { - if (tokenizer.getPadTokenId().isEmpty()) { - throw new IllegalStateException("The input tokenizer does not have a " + tokenizer.getPadToken() + " token in its vocabulary"); - } - return new NlpTask.Request(tokenization, jsonRequest(tokenization, tokenizer.getPadTokenId().getAsInt(), requestId)); - } - - static BytesReference jsonRequest(TokenizationResult tokenization, int padToken, String requestId) throws IOException { - XContentBuilder builder = XContentFactory.jsonBuilder(); - builder.startObject(); - builder.field(REQUEST_ID, requestId); - - NlpTask.RequestBuilder.writePaddedTokens(TOKENS, tokenization, padToken, (tokens, i) -> tokens.getTokenIds()[i], builder); - NlpTask.RequestBuilder.writePaddedTokens(ARG1, tokenization, padToken, (tokens, i) -> 1, builder); - builder.endObject(); - - // BytesReference.bytes closes the builder - return BytesReference.bytes(builder); - } - -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java index 177bc387ea87a..1aa9ce8e6b0f6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessor.java @@ -32,7 +32,7 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class NerProcessor implements NlpTask.Processor { +public class NerProcessor extends NlpTask.Processor { public enum Entity implements Writeable { NONE, @@ -83,20 +83,14 @@ boolean isBeginning() { private final IobTag[] iobMap; private final String resultsField; private final boolean ignoreCase; - private final NlpTokenizer tokenizer; NerProcessor(NlpTokenizer tokenizer, NerConfig config) { + super(tokenizer); validate(config.getClassificationLabels()); this.iobMap = buildIobMap(config.getClassificationLabels()); this.requestBuilder = tokenizer.requestBuilder(); this.resultsField = config.getResultsField(); this.ignoreCase = config.getTokenization().doLowerCase(); - this.tokenizer = tokenizer; - } - - @Override - public void close() { - tokenizer.close(); } /** @@ -188,11 +182,7 @@ static String buildAnnotatedText(String seq, List entiti return annotatedResultBuilder.toString(); } - static class NerResultProcessor implements NlpTask.ResultProcessor { - private final IobTag[] iobMap; - private final String resultsField; - private final boolean ignoreCase; - + record NerResultProcessor(IobTag[] iobMap, String resultsField, boolean ignoreCase) implements NlpTask.ResultProcessor { NerResultProcessor(IobTag[] iobMap, String resultsField, boolean ignoreCase) { this.iobMap = iobMap; this.resultsField = Optional.ofNullable(resultsField).orElse(DEFAULT_RESULTS_FIELD); @@ -201,7 +191,7 @@ static class NerResultProcessor implements NlpTask.ResultProcessor { @Override public InferenceResults processResult(TokenizationResult tokenization, PyTorchInferenceResult pyTorchResult) { - if (tokenization.getTokenizations().isEmpty() || tokenization.getTokenizations().get(0).getTokenIds().length == 0) { + if (tokenization.isEmpty()) { throw new ElasticsearchStatusException("no valid tokenization to build result", RestStatus.INTERNAL_SERVER_ERROR); } // TODO - process all results in the batch @@ -213,18 +203,16 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchIn // of maybe (1 + 0) / 2 = 0.5 while before softmax it'd be exp(10 - 5) / normalization // which could easily be close to 1. double[][] normalizedScores = NlpHelpers.convertToProbabilitiesBySoftMax(pyTorchResult.getInferenceResult()[0]); - List taggedTokens = tagTokens(tokenization.getTokenizations().get(0), normalizedScores, iobMap); + List taggedTokens = tagTokens(tokenization.getTokenization(0), normalizedScores, iobMap); List entities = groupTaggedTokens( taggedTokens, - ignoreCase - ? tokenization.getTokenizations().get(0).getInput().toLowerCase(Locale.ROOT) - : tokenization.getTokenizations().get(0).getInput() + ignoreCase ? tokenization.getTokenization(0).input().toLowerCase(Locale.ROOT) : tokenization.getTokenization(0).input() ); return new NerResults( resultsField, - buildAnnotatedText(tokenization.getTokenizations().get(0).getInput(), entities), + buildAnnotatedText(tokenization.getTokenization(0).input(), entities), entities, tokenization.anyTruncated() ); @@ -236,12 +224,12 @@ public InferenceResults processResult(TokenizationResult tokenization, PyTorchIn * in the original input replacing them with a single token that * gets labelled based on the average score of all its sub-tokens. */ - static List tagTokens(TokenizationResult.Tokenization tokenization, double[][] scores, IobTag[] iobMap) { + static List tagTokens(TokenizationResult.Tokens tokenization, double[][] scores, IobTag[] iobMap) { List taggedTokens = new ArrayList<>(); int startTokenIndex = 0; int numSpecialTokens = 0; - while (startTokenIndex < tokenization.getTokenIds().length) { - int inputMapping = tokenization.getTokenMap()[startTokenIndex]; + while (startTokenIndex < tokenization.tokenIds().length) { + int inputMapping = tokenization.tokenIds()[startTokenIndex]; if (inputMapping < 0) { // This token does not map to a token in the input (special tokens) startTokenIndex++; @@ -249,8 +237,7 @@ static List tagTokens(TokenizationResult.Tokenization tokenization, continue; } int endTokenIndex = startTokenIndex; - while (endTokenIndex < tokenization.getTokenMap().length - 1 - && tokenization.getTokenMap()[endTokenIndex + 1] == inputMapping) { + while (endTokenIndex < tokenization.tokenMap().length - 1 && tokenization.tokenMap()[endTokenIndex + 1] == inputMapping) { endTokenIndex++; } double[] avgScores = Arrays.copyOf(scores[startTokenIndex], iobMap.length); @@ -268,7 +255,7 @@ static List tagTokens(TokenizationResult.Tokenization tokenization, int maxScoreIndex = NlpHelpers.argmax(avgScores); double score = avgScores[maxScoreIndex]; taggedTokens.add( - new TaggedToken(tokenization.getTokens().get(startTokenIndex - numSpecialTokens), iobMap[maxScoreIndex], score) + new TaggedToken(tokenization.tokens().get(startTokenIndex - numSpecialTokens), iobMap[maxScoreIndex], score) ); startTokenIndex = endTokenIndex + 1; } @@ -325,17 +312,7 @@ static List groupTaggedTokens(List tokens, return entities; } - static class TaggedToken { - private final DelimitedToken token; - private final IobTag tag; - private final double score; - - TaggedToken(DelimitedToken token, IobTag tag, double score) { - this.token = token; - this.tag = tag; - this.score = score; - } - + record TaggedToken(DelimitedToken token, IobTag tag, double score) { @Override public String toString() { return new StringBuilder("{").append("token:") diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java index 4ee96e78db0a6..43fa0d8a2488d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/NlpTask.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Releasable; -import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; import org.elasticsearch.xpack.core.ml.inference.results.InferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig; @@ -46,78 +45,37 @@ public Processor createProcessor() throws ValidationException { } public interface RequestBuilder { - @FunctionalInterface - interface IntToIntFunction { - int applyAsInt(int value); - } - - @FunctionalInterface - interface TokenLookupFunction { - int apply(TokenizationResult.Tokenization tokenization, int index); - } - Request buildRequest(List inputs, String requestId, Tokenization.Truncate truncate) throws IOException; - - Request buildRequest(TokenizationResult tokenizationResult, String requestId) throws IOException; - - static void writePaddedTokens( - String fieldName, - TokenizationResult tokenization, - int padToken, - TokenLookupFunction generator, - XContentBuilder builder - ) throws IOException { - builder.startArray(fieldName); - for (var inputTokens : tokenization.getTokenizations()) { - builder.startArray(); - int i = 0; - for (; i < inputTokens.getTokenIds().length; i++) { - builder.value(generator.apply(inputTokens, i)); - } - - for (; i < tokenization.getLongestSequenceLength(); i++) { - builder.value(padToken); - } - builder.endArray(); - } - builder.endArray(); - } - - static void writeNonPaddedArguments( - String fieldName, - int numTokenizations, - int longestSequenceLength, - IntToIntFunction generator, - XContentBuilder builder - ) throws IOException { - builder.startArray(fieldName); - for (int i = 0; i < numTokenizations; i++) { - builder.startArray(); - for (int j = 0; j < longestSequenceLength; j++) { - builder.value(generator.applyAsInt(j)); - } - builder.endArray(); - } - builder.endArray(); - } } public interface ResultProcessor { InferenceResults processResult(TokenizationResult tokenization, PyTorchInferenceResult pyTorchResult); } - public interface Processor extends Releasable { + public abstract static class Processor implements Releasable { + + protected final NlpTokenizer tokenizer; + + public Processor(NlpTokenizer tokenizer) { + this.tokenizer = tokenizer; + } + + @Override + public void close() { + tokenizer.close(); + } + /** * Validate the task input string. * Throws an exception if the inputs fail validation * * @param inputs Text to validate */ - void validateInputs(List inputs); + public abstract void validateInputs(List inputs); - RequestBuilder getRequestBuilder(NlpConfig config); + public abstract RequestBuilder getRequestBuilder(NlpConfig config); - ResultProcessor getResultProcessor(NlpConfig config); + public abstract ResultProcessor getResultProcessor(NlpConfig config); } public static String extractInput(TrainedModelInput input, Map doc) { @@ -133,10 +91,7 @@ public static String extractInput(TrainedModelInput input, Map d throw ExceptionsHelper.badRequestException("Input value [{}] for field [{}] must be a string", inputValue, inputField); } - public static class Request { - public final TokenizationResult tokenization; - public final BytesReference processInput; - + public record Request(TokenizationResult tokenization, BytesReference processInput) { public Request(TokenizationResult tokenization, BytesReference processInput) { this.tokenization = Objects.requireNonNull(tokenization); this.processInput = Objects.requireNonNull(processInput); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java index 146967ffb04d4..f4859405d35b9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/PassThroughProcessor.java @@ -24,21 +24,13 @@ * A NLP processor that directly returns the PyTorch result * without any post-processing */ -public class PassThroughProcessor implements NlpTask.Processor { +public class PassThroughProcessor extends NlpTask.Processor { private final NlpTask.RequestBuilder requestBuilder; - private final NlpTokenizer tokenizer; - private final String resultsField; PassThroughProcessor(NlpTokenizer tokenizer, PassThroughConfig config) { + super(tokenizer); this.requestBuilder = tokenizer.requestBuilder(); - this.resultsField = config.getResultsField(); - this.tokenizer = tokenizer; - } - - @Override - public void close() { - tokenizer.close(); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java index 77e994cbd06f0..a3dd5e619e5e6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessor.java @@ -27,26 +27,20 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class TextClassificationProcessor implements NlpTask.Processor { +public class TextClassificationProcessor extends NlpTask.Processor { private final NlpTask.RequestBuilder requestBuilder; - private final NlpTokenizer tokenizer; private final String[] classLabels; private final int numTopClasses; TextClassificationProcessor(NlpTokenizer tokenizer, TextClassificationConfig config) { + super(tokenizer); this.requestBuilder = tokenizer.requestBuilder(); List classLabels = config.getClassificationLabels(); this.classLabels = classLabels.toArray(String[]::new); // negative values are a special case of asking for ALL classes. Since we require the output size to equal the classLabel size // This is a nice way of setting the value this.numTopClasses = config.getNumTopClasses() < 0 ? this.classLabels.length : config.getNumTopClasses(); - this.tokenizer = tokenizer; - } - - @Override - public void close() { - tokenizer.close(); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java index b26355fa9f473..0671235176ad2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/TextEmbeddingProcessor.java @@ -23,19 +23,13 @@ /** * A NLP processor that returns a single double[] output from the model. Assumes that only one tensor is returned via inference **/ -public class TextEmbeddingProcessor implements NlpTask.Processor { +public class TextEmbeddingProcessor extends NlpTask.Processor { private final NlpTask.RequestBuilder requestBuilder; - private final NlpTokenizer tokenizer; TextEmbeddingProcessor(NlpTokenizer tokenizer, TextEmbeddingConfig config) { + super(tokenizer); this.requestBuilder = tokenizer.requestBuilder(); - this.tokenizer = tokenizer; - } - - @Override - public void close() { - tokenizer.close(); } @Override diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java index 699dd0084f1b7..861506606e21f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessor.java @@ -33,9 +33,8 @@ import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig.DEFAULT_RESULTS_FIELD; -public class ZeroShotClassificationProcessor implements NlpTask.Processor { +public class ZeroShotClassificationProcessor extends NlpTask.Processor { - private final NlpTokenizer tokenizer; private final int entailmentPos; private final int contraPos; private final String[] labels; @@ -44,7 +43,7 @@ public class ZeroShotClassificationProcessor implements NlpTask.Processor { private final String resultsField; ZeroShotClassificationProcessor(NlpTokenizer tokenizer, ZeroShotClassificationConfig config) { - this.tokenizer = tokenizer; + super(tokenizer); List lowerCased = config.getClassificationLabels() .stream() .map(s -> s.toLowerCase(Locale.ROOT)) @@ -62,11 +61,6 @@ public class ZeroShotClassificationProcessor implements NlpTask.Processor { this.resultsField = config.getResultsField(); } - @Override - public void close() { - tokenizer.close(); - } - @Override public void validateInputs(List inputs) { // nothing to validate @@ -103,51 +97,25 @@ public NlpTask.ResultProcessor getResultProcessor(NlpConfig nlpConfig) { return new ResultProcessor(entailmentPos, contraPos, labelsValue, isMultiLabelValue, resultsFieldValue); } - static class RequestBuilder implements NlpTask.RequestBuilder { - - private final NlpTokenizer tokenizer; - private final String[] labels; - private final String hypothesisTemplate; - - RequestBuilder(NlpTokenizer tokenizer, String[] labels, String hypothesisTemplate) { - this.tokenizer = tokenizer; - this.labels = labels; - this.hypothesisTemplate = hypothesisTemplate; - } + record RequestBuilder(NlpTokenizer tokenizer, String[] labels, String hypothesisTemplate) implements NlpTask.RequestBuilder { @Override public NlpTask.Request buildRequest(List inputs, String requestId, Tokenization.Truncate truncate) throws IOException { if (inputs.size() > 1) { throw ExceptionsHelper.badRequestException("Unable to do zero-shot classification on more than one text input at a time"); } - List tokenizations = new ArrayList<>(labels.length); + List tokenizations = new ArrayList<>(labels.length); for (String label : labels) { tokenizations.add(tokenizer.tokenize(inputs.get(0), LoggerMessageFormat.format(null, hypothesisTemplate, label), truncate)); } TokenizationResult result = tokenizer.buildTokenizationResult(tokenizations); - return buildRequest(result, requestId); - } - - @Override - public NlpTask.Request buildRequest(TokenizationResult tokenizationResult, String requestId) throws IOException { - return tokenizer.requestBuilder().buildRequest(tokenizationResult, requestId); + return result.buildRequest(requestId, truncate); } } - static class ResultProcessor implements NlpTask.ResultProcessor { - private final int entailmentPos; - private final int contraPos; - private final String[] labels; - private final boolean isMultiLabel; - private final String resultsField; - - ResultProcessor(int entailmentPos, int contraPos, String[] labels, boolean isMultiLabel, String resultsField) { - this.entailmentPos = entailmentPos; - this.contraPos = contraPos; - this.labels = labels; - this.isMultiLabel = isMultiLabel; - this.resultsField = resultsField; - } + record ResultProcessor(int entailmentPos, int contraPos, String[] labels, boolean isMultiLabel, String resultsField) + implements + NlpTask.ResultProcessor { @Override public InferenceResults processResult(TokenizationResult tokenization, PyTorchInferenceResult pyTorchResult) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizationResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizationResult.java new file mode 100644 index 0000000000000..87429d2bcf2eb --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizationResult.java @@ -0,0 +1,118 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; +import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; + +import java.io.IOException; +import java.util.List; +import java.util.function.Function; +import java.util.stream.IntStream; +import java.util.stream.Stream; + +public class BertTokenizationResult extends TokenizationResult { + + static final String REQUEST_ID = "request_id"; + static final String TOKENS = "tokens"; + static final String ARG1 = "arg_1"; + static final String ARG2 = "arg_2"; + static final String ARG3 = "arg_3"; + + public BertTokenizationResult(List vocab, List tokenizations, int padTokenId) { + super(vocab, tokenizations, padTokenId); + } + + @Override + public NlpTask.Request buildRequest(String requestId, Tokenization.Truncate t) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + builder.field(REQUEST_ID, requestId); + writePaddedTokens(TOKENS, builder); + writeAttentionMask(ARG1, builder); + writeTokenTypeIds(ARG2, builder); + writePositionIds(ARG3, builder); + builder.endObject(); + + // BytesReference.bytes closes the builder + BytesReference jsonRequest = BytesReference.bytes(builder); + return new NlpTask.Request(this, jsonRequest); + } + + static class BertTokensBuilder implements TokensBuilder { + protected final Stream.Builder tokenIds; + protected final Stream.Builder tokenMap; + protected final boolean withSpecialTokens; + protected final int clsTokenId; + protected final int sepTokenId; + + BertTokensBuilder(boolean withSpecialTokens, int clsTokenId, int sepTokenId) { + this.withSpecialTokens = withSpecialTokens; + this.clsTokenId = clsTokenId; + this.sepTokenId = sepTokenId; + this.tokenIds = Stream.builder(); + this.tokenMap = Stream.builder(); + } + + @Override + public TokensBuilder addSequence(List wordPieceTokenIds, List tokenPositionMap) { + if (withSpecialTokens) { + tokenIds.add(IntStream.of(clsTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(wordPieceTokenIds.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenPositionMap.stream().mapToInt(Integer::valueOf)); + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + return this; + } + + @Override + public TokensBuilder addSequencePair( + List tokenId1s, + List tokenMap1, + List tokenId2s, + List tokenMap2 + ) { + if (withSpecialTokens) { + tokenIds.add(IntStream.of(clsTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(tokenId1s.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenMap1.stream().mapToInt(Integer::valueOf)); + int previouslyFinalMap = tokenMap1.get(tokenMap1.size() - 1); + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(tokenId2s.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenMap2.stream().mapToInt(i -> i + previouslyFinalMap)); + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + return this; + } + + @Override + public Tokens build(String input, boolean truncated, List allTokens) { + return new Tokens( + input, + allTokens, + truncated, + tokenIds.build().flatMapToInt(Function.identity()).toArray(), + tokenMap.build().flatMapToInt(Function.identity()).toArray() + ); + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java index 7ada856f5dd0a..4b9b63ca57f79 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizer.java @@ -11,7 +11,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.inference.nlp.BertRequestBuilder; import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; import java.io.IOException; @@ -23,10 +22,7 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; -import java.util.function.Function; import java.util.stream.Collectors; -import java.util.stream.IntStream; -import java.util.stream.Stream; /** * Performs basic tokenization and normalization of input text @@ -49,17 +45,17 @@ public class BertTokenizer implements NlpTokenizer { private static final Set NEVER_SPLIT = Set.of(MASK_TOKEN); private final WordPieceAnalyzer wordPieceAnalyzer; - private final List originalVocab; + protected final List originalVocab; // TODO Not sure this needs to be a sorted map private final SortedMap vocab; protected final boolean withSpecialTokens; private final int maxSequenceLength; - private final NlpTask.RequestBuilder requestBuilder; private final String sepToken; protected final int sepTokenId; private final String clsToken; private final int clsTokenId; private final String padToken; + protected final int padTokenId; private final String maskToken; private final String unknownToken; @@ -71,7 +67,6 @@ protected BertTokenizer( boolean doStripAccents, boolean withSpecialTokens, int maxSequenceLength, - Function requestBuilderFactory, Set neverSplit ) { this( @@ -82,7 +77,6 @@ protected BertTokenizer( doStripAccents, withSpecialTokens, maxSequenceLength, - requestBuilderFactory, Sets.union(neverSplit, NEVER_SPLIT), SEPARATOR_TOKEN, CLASS_TOKEN, @@ -100,7 +94,6 @@ protected BertTokenizer( boolean doStripAccents, boolean withSpecialTokens, int maxSequenceLength, - Function requestBuilderFactory, Set neverSplit, String sepToken, String clsToken, @@ -120,13 +113,13 @@ protected BertTokenizer( this.vocab = vocab; this.withSpecialTokens = withSpecialTokens; this.maxSequenceLength = maxSequenceLength; - this.requestBuilder = requestBuilderFactory.apply(this); if (vocab.containsKey(unknownToken) == false) { throw ExceptionsHelper.conflictStatusException("stored vocabulary is missing required [{}] token", unknownToken); } if (vocab.containsKey(padToken) == false) { throw ExceptionsHelper.conflictStatusException("stored vocabulary is missing required [{}] token", padToken); } + this.padTokenId = vocab.get(padToken); if (withSpecialTokens) { Set missingSpecialTokens = Sets.difference(Set.of(sepToken, clsToken), vocab.keySet()); @@ -188,12 +181,12 @@ public String getMaskToken() { } @Override - public TokenizationResult buildTokenizationResult(List tokenizations) { - TokenizationResult tokenizationResult = new TokenizationResult(originalVocab); - for (TokenizationResult.Tokenization tokenization : tokenizations) { - tokenizationResult.addTokenization(tokenization); - } - return tokenizationResult; + public TokenizationResult buildTokenizationResult(List tokenizations) { + return new BertTokenizationResult(originalVocab, tokenizations, vocab.get(this.padToken)); + } + + TokenizationResult.TokensBuilder createTokensBuilder(int clsTokenId, int sepTokenId, boolean withSpecialTokens) { + return new BertTokenizationResult.BertTokensBuilder(withSpecialTokens, clsTokenId, sepTokenId); } /** @@ -208,7 +201,7 @@ public TokenizationResult buildTokenizationResult(List wordPieceTokenIds = innerResult.tokens; List tokenPositionMap = innerResult.tokenPositionMap; @@ -229,21 +222,14 @@ public TokenizationResult.Tokenization tokenize(String seq, Tokenization.Truncat ); } } - BertTokenizationBuilder bertTokenizationBuilder = bertTokenizationBuilder().addTokens( + return createTokensBuilder(clsTokenId, sepTokenId, withSpecialTokens).addSequence( wordPieceTokenIds.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), tokenPositionMap - ).addEndTokensIfNecessary(); - return new TokenizationResult.Tokenization( - seq, - innerResult.tokens, - isTruncated, - bertTokenizationBuilder.buildIds(), - bertTokenizationBuilder.buildMap() - ); + ).build(seq, isTruncated, innerResult.tokens); } @Override - public TokenizationResult.Tokenization tokenize(String seq1, String seq2, Tokenization.Truncate truncate) { + public TokenizationResult.Tokens tokenize(String seq1, String seq2, Tokenization.Truncate truncate) { var innerResultSeq1 = innerTokenize(seq1); List wordPieceTokenIdsSeq1 = innerResultSeq1.tokens; List tokenPositionMapSeq1 = innerResultSeq1.tokenPositionMap; @@ -302,28 +288,21 @@ public TokenizationResult.Tokenization tokenize(String seq1, String seq2, Tokeni ); } } - BertTokenizationBuilder bertTokenizationBuilder = bertTokenizationBuilder().addTokens( - wordPieceTokenIdsSeq1.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), - tokenPositionMapSeq1 - ) - .addTokens( - wordPieceTokenIdsSeq2.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), - tokenPositionMapSeq2 - ) - .addEndTokensIfNecessary(); List tokens = new ArrayList<>(innerResultSeq1.tokens); tokens.addAll(innerResultSeq2.tokens); - return new TokenizationResult.Tokenization( - seq1 + seq2, - tokens, - isTruncated, - bertTokenizationBuilder.buildIds(), - bertTokenizationBuilder.buildMap() - ); + return createTokensBuilder(clsTokenId, sepTokenId, withSpecialTokens).addSequencePair( + wordPieceTokenIdsSeq1.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), + tokenPositionMapSeq1, + wordPieceTokenIdsSeq2.stream().map(WordPieceTokenFilter.WordPieceToken::getEncoding).collect(Collectors.toList()), + tokenPositionMapSeq2 + ).build(seq1 + seq2, isTruncated, tokens); } - protected BertTokenizationBuilder bertTokenizationBuilder() { - return new BertTokenizationBuilder(); + @Override + public NlpTask.RequestBuilder requestBuilder() { + return (inputs, requestId, truncate) -> buildTokenizationResult( + inputs.stream().map(s -> tokenize(s, truncate)).collect(Collectors.toList()) + ).buildRequest(requestId, truncate); } protected int getNumExtraTokensForSeqPair() { @@ -361,11 +340,6 @@ private static class InnerTokenization { } } - @Override - public NlpTask.RequestBuilder requestBuilder() { - return requestBuilder; - } - public int getMaxSequenceLength() { return maxSequenceLength; } @@ -374,59 +348,16 @@ public static Builder builder(List vocab, Tokenization tokenization) { return new Builder(vocab, tokenization); } - protected class BertTokenizationBuilder { - Stream.Builder tokenIds; - Stream.Builder tokenMap; - int numSeq; - - BertTokenizationBuilder() { - tokenIds = Stream.builder(); - tokenMap = Stream.builder(); - if (withSpecialTokens) { - tokenIds.add(IntStream.of(clsTokenId)); - tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); - } - } - - BertTokenizationBuilder addTokens(List wordPieceTokenIds, List tokenPositionMap) { - if (numSeq > 0 && withSpecialTokens) { - tokenIds.add(IntStream.of(sepTokenId)); - tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); - } - tokenIds.add(wordPieceTokenIds.stream().mapToInt(Integer::valueOf)); - tokenMap.add(tokenPositionMap.stream().mapToInt(Integer::valueOf)); - numSeq++; - return this; - } - - BertTokenizationBuilder addEndTokensIfNecessary() { - if (withSpecialTokens) { - tokenIds.add(IntStream.of(sepTokenId)); - tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); - } - return this; - } - - int[] buildIds() { - return tokenIds.build().flatMapToInt(Function.identity()).toArray(); - } - - int[] buildMap() { - return tokenMap.build().flatMapToInt(Function.identity()).toArray(); - } - } - public static class Builder { protected final List originalVocab; protected final SortedMap vocab; - protected boolean doLowerCase = false; + protected boolean doLowerCase; protected boolean doTokenizeCjKChars = true; - protected boolean withSpecialTokens = true; + protected boolean withSpecialTokens; protected int maxSequenceLength; protected Boolean doStripAccents = null; protected Set neverSplit; - protected Function requestBuilderFactory = BertRequestBuilder::new; protected Builder(List vocab, Tokenization tokenization) { this.originalVocab = vocab; @@ -479,11 +410,6 @@ public Builder setWithSpecialTokens(boolean withSpecialTokens) { return this; } - public Builder setRequestBuilderFactory(Function requestBuilderFactory) { - this.requestBuilderFactory = requestBuilderFactory; - return this; - } - public BertTokenizer build() { // if not set strip accents defaults to the value of doLowerCase if (doStripAccents == null) { @@ -502,7 +428,6 @@ public BertTokenizer build() { doStripAccents, withSpecialTokens, maxSequenceLength, - requestBuilderFactory, neverSplit ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNode.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNode.java index 9253759e41232..7b5514b692285 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNode.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/CharSeqTokenTrieNode.java @@ -16,13 +16,11 @@ import java.util.List; import java.util.Objects; -public class CharSeqTokenTrieNode { +public record CharSeqTokenTrieNode(CharArrayMap children) { public static final CharSeqTokenTrieNode EMPTY = new CharSeqTokenTrieNode(new CharArrayMap<>(0, false)); - private final CharArrayMap children; - - private CharSeqTokenTrieNode(CharArrayMap children) { + public CharSeqTokenTrieNode(CharArrayMap children) { this.children = Objects.requireNonNull(children); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizationResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizationResult.java new file mode 100644 index 0000000000000..44cd29309f648 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizationResult.java @@ -0,0 +1,78 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; +import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; + +import java.io.IOException; +import java.util.List; +import java.util.stream.IntStream; + +public class MPNetTokenizationResult extends TokenizationResult { + + static final String REQUEST_ID = "request_id"; + static final String TOKENS = "tokens"; + static final String ARG1 = "arg_1"; + + public MPNetTokenizationResult(List vocab, List tokenizations, int padTokenId) { + super(vocab, tokenizations, padTokenId); + } + + @Override + public NlpTask.Request buildRequest(String requestId, Tokenization.Truncate t) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject(); + builder.field(REQUEST_ID, requestId); + writePaddedTokens(TOKENS, builder); + writeAttentionMask(ARG1, builder); + builder.endObject(); + + // BytesReference.bytes closes the builder + BytesReference jsonRequest = BytesReference.bytes(builder); + return new NlpTask.Request(this, jsonRequest); + } + + static class MPNetTokensBuilder extends BertTokenizationResult.BertTokensBuilder { + + MPNetTokensBuilder(boolean withSpecialTokens, int clsTokenId, int sepTokenId) { + super(withSpecialTokens, clsTokenId, sepTokenId); + } + + @Override + public TokensBuilder addSequencePair( + List tokenId1s, + List tokenMap1, + List tokenId2s, + List tokenMap2 + ) { + if (withSpecialTokens) { + tokenIds.add(IntStream.of(clsTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(tokenId1s.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenMap1.stream().mapToInt(Integer::valueOf)); + int previouslyFinalMap = tokenMap1.get(tokenMap1.size() - 1); + // MPNet adds two `` betwee sequence pairs + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId, sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION, SPECIAL_TOKEN_POSITION)); + } + tokenIds.add(tokenId2s.stream().mapToInt(Integer::valueOf)); + tokenMap.add(tokenMap2.stream().mapToInt(i -> i + previouslyFinalMap)); + if (withSpecialTokens) { + tokenIds.add(IntStream.of(sepTokenId)); + tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION)); + } + return this; + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizer.java index e2468041b8df0..5639cac1aa758 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizer.java @@ -8,7 +8,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; -import org.elasticsearch.xpack.ml.inference.nlp.MPNetRequestBuilder; import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; import java.util.Collections; @@ -16,8 +15,7 @@ import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; -import java.util.function.Function; -import java.util.stream.IntStream; +import java.util.stream.Collectors; /** * Performs basic tokenization and normalization of input text @@ -41,7 +39,6 @@ protected MPNetTokenizer( boolean doStripAccents, boolean withSpecialTokens, int maxSequenceLength, - Function requestBuilderFactory, Set neverSplit ) { super( @@ -52,7 +49,6 @@ protected MPNetTokenizer( doStripAccents, withSpecialTokens, maxSequenceLength, - requestBuilderFactory, Sets.union(neverSplit, NEVER_SPLIT), SEPARATOR_TOKEN, CLASS_TOKEN, @@ -67,25 +63,20 @@ protected int getNumExtraTokensForSeqPair() { return 4; } - @Override - protected BertTokenizationBuilder bertTokenizationBuilder() { - return new MPNetTokenizationBuilder(); + TokenizationResult.TokensBuilder createTokensBuilder(int clsTokenId, int sepTokenId, boolean withSpecialTokens) { + return new MPNetTokenizationResult.MPNetTokensBuilder(withSpecialTokens, clsTokenId, sepTokenId); } - protected class MPNetTokenizationBuilder extends BertTokenizationBuilder { - - @Override - BertTokenizationBuilder addTokens(List wordPieceTokenIds, List tokenPositionMap) { - if (numSeq > 0 && withSpecialTokens) { - tokenIds.add(IntStream.of(sepTokenId, sepTokenId)); - tokenMap.add(IntStream.of(SPECIAL_TOKEN_POSITION, SPECIAL_TOKEN_POSITION)); - } - tokenIds.add(wordPieceTokenIds.stream().mapToInt(Integer::valueOf)); - tokenMap.add(tokenPositionMap.stream().mapToInt(Integer::valueOf)); - numSeq++; - return this; - } + @Override + public NlpTask.RequestBuilder requestBuilder() { + return (inputs, requestId, truncate) -> buildTokenizationResult( + inputs.stream().map(s -> tokenize(s, truncate)).collect(Collectors.toList()) + ).buildRequest(requestId, truncate); + } + @Override + public TokenizationResult buildTokenizationResult(List tokenizations) { + return new MPNetTokenizationResult(originalVocab, tokenizations, getPadTokenId().orElseThrow()); } public static Builder mpBuilder(List vocab, Tokenization tokenization) { @@ -96,13 +87,12 @@ public static class Builder { protected final List originalVocab; protected final SortedMap vocab; - protected boolean doLowerCase = false; + protected boolean doLowerCase; protected boolean doTokenizeCjKChars = true; - protected boolean withSpecialTokens = true; + protected boolean withSpecialTokens; protected int maxSequenceLength; protected Boolean doStripAccents = null; protected Set neverSplit; - protected Function requestBuilderFactory = MPNetRequestBuilder::new; protected Builder(List vocab, Tokenization tokenization) { this.originalVocab = vocab; @@ -155,11 +145,6 @@ public Builder setWithSpecialTokens(boolean withSpecialTokens) { return this; } - public Builder setRequestBuilderFactory(Function requestBuilderFactory) { - this.requestBuilderFactory = requestBuilderFactory; - return this; - } - public MPNetTokenizer build() { // if not set strip accents defaults to the value of doLowerCase if (doStripAccents == null) { @@ -178,7 +163,6 @@ public MPNetTokenizer build() { doStripAccents, withSpecialTokens, maxSequenceLength, - requestBuilderFactory, neverSplit ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java index adf303667065b..7eab8dfcf8f50 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/NlpTokenizer.java @@ -12,8 +12,6 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.MPNetTokenization; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.inference.nlp.BertRequestBuilder; -import org.elasticsearch.xpack.ml.inference.nlp.MPNetRequestBuilder; import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; import org.elasticsearch.xpack.ml.inference.nlp.Vocabulary; @@ -25,11 +23,11 @@ public interface NlpTokenizer extends Releasable { - TokenizationResult buildTokenizationResult(List tokenizations); + TokenizationResult buildTokenizationResult(List tokenizations); - TokenizationResult.Tokenization tokenize(String seq, Tokenization.Truncate truncate); + TokenizationResult.Tokens tokenize(String seq, Tokenization.Truncate truncate); - TokenizationResult.Tokenization tokenize(String seq1, String seq2, Tokenization.Truncate truncate); + TokenizationResult.Tokens tokenize(String seq1, String seq2, Tokenization.Truncate truncate); NlpTask.RequestBuilder requestBuilder(); @@ -45,10 +43,10 @@ static NlpTokenizer build(Vocabulary vocabulary, Tokenization params) { ExceptionsHelper.requireNonNull(params, TOKENIZATION); ExceptionsHelper.requireNonNull(vocabulary, VOCABULARY); if (params instanceof BertTokenization) { - return BertTokenizer.builder(vocabulary.get(), params).setRequestBuilderFactory(BertRequestBuilder::new).build(); + return BertTokenizer.builder(vocabulary.get(), params).build(); } if (params instanceof MPNetTokenization) { - return MPNetTokenizer.mpBuilder(vocabulary.get(), params).setRequestBuilderFactory(MPNetRequestBuilder::new).build(); + return MPNetTokenizer.mpBuilder(vocabulary.get(), params).build(); } throw new IllegalArgumentException("unknown tokenization type [" + params.getName() + "]"); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java index c13df493d05ce..30ceb1c437a51 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/TokenizationResult.java @@ -7,105 +7,150 @@ package org.elasticsearch.xpack.ml.inference.nlp.tokenizers; -import java.util.ArrayList; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.ml.inference.trainedmodel.Tokenization; +import org.elasticsearch.xpack.ml.inference.nlp.NlpTask; + +import java.io.IOException; import java.util.List; +import java.util.OptionalInt; +import java.util.stream.IntStream; -public class TokenizationResult { +public abstract class TokenizationResult { + public static final int SPECIAL_TOKEN_POSITION = -1; private final List vocab; - private final List tokenizations = new ArrayList<>(); - private int maxLength; + private final List tokens; + private final int maxLength; + private final int padTokenId; - public TokenizationResult(List vocab) { + protected TokenizationResult(List vocab, List tokenizations, int padTokenId) { this.vocab = vocab; - this.maxLength = -1; + this.tokens = tokenizations; + this.padTokenId = padTokenId; + int max = 0; + for (Tokens tokenization : tokenizations) { + max = Math.max(tokenization.tokenIds.length, max); + } + this.maxLength = max; } - public boolean anyTruncated() { - return tokenizations.stream().anyMatch(Tokenization::isTruncated); + List getTokens() { + return tokens; } public String getFromVocab(int tokenId) { return vocab.get(tokenId); } - public List getTokenizations() { - return tokenizations; + public Tokens getTokenization(int tokenizationIndex) { + return tokens.get(tokenizationIndex); } - public void addTokenization( - String input, - boolean isTruncated, - List tokens, - int[] tokenIds, - int[] tokenMap - ) { - maxLength = Math.max(maxLength, tokenIds.length); - tokenizations.add(new Tokenization(input, tokens, isTruncated, tokenIds, tokenMap)); + public boolean anyTruncated() { + return tokens.stream().anyMatch(Tokens::truncated); } - public void addTokenization(Tokenization tokenization) { - maxLength = Math.max(maxLength, tokenization.tokenIds.length); - tokenizations.add(tokenization); + public boolean isEmpty() { + return this.tokens.isEmpty() || this.tokens.stream().allMatch(t -> t.tokenIds.length == 0); } - public int getLongestSequenceLength() { - return maxLength; + public abstract NlpTask.Request buildRequest(String requestId, Tokenization.Truncate t) throws IOException; + + protected void writePaddedTokens(String fieldName, XContentBuilder builder) throws IOException { + builder.startArray(fieldName); + for (var inputTokens : tokens) { + builder.startArray(); + + // Note, cannot write the array directly as the internal builder code writes start/end array values + for (int t : inputTokens.tokenIds) { + builder.value(t); + } + for (int i = inputTokens.tokenIds.length; i < maxLength; i++) { + builder.value(padTokenId); + } + builder.endArray(); + } + builder.endArray(); } - public static class Tokenization { - - private final String input; - private final List tokens; - private final int[] tokenIds; - private final int[] tokenMap; - private final boolean truncated; - - public Tokenization( - String input, - List tokens, - boolean truncated, - int[] tokenIds, - int[] tokenMap - ) { - assert tokenIds.length == tokenMap.length; - this.input = input; - this.tokens = tokens; - this.tokenIds = tokenIds; - this.tokenMap = tokenMap; - this.truncated = truncated; + protected void writeAttentionMask(String fieldName, XContentBuilder builder) throws IOException { + builder.startArray(fieldName); + for (var inputTokens : tokens) { + builder.startArray(); + // Note, cannot write the array directly as the internal builder code writes start/end array values + for (int ignored : inputTokens.tokenIds) { + builder.value(1); + } + for (int i = inputTokens.tokenIds.length; i < maxLength; i++) { + builder.value(padTokenId); + } + builder.endArray(); } + builder.endArray(); + } - /** - * The integer values of the tokens} - * - * @return A list of token Ids - */ - public int[] getTokenIds() { - return tokenIds; + protected void writeTokenTypeIds(String fieldName, XContentBuilder builder) throws IOException { + builder.startArray(fieldName); + for (int i = 0; i < tokens.size(); i++) { + builder.startArray(); + for (int j = 0; j < maxLength; j++) { + builder.value(0); + } + builder.endArray(); } + builder.endArray(); + } - /** - * Maps the token position to the position in the source text. - * Source words may be divided into more than one token so more - * than one token can map back to the source token - * - * @return Map of source token to - */ - public int[] getTokenMap() { - return tokenMap; + protected void writePositionIds(String fieldName, XContentBuilder builder) throws IOException { + builder.startArray(fieldName); + for (int i = 0; i < tokens.size(); i++) { + builder.startArray(); + for (int j = 0; j < maxLength; j++) { + builder.value(j); + } + builder.endArray(); } + builder.endArray(); + } - public String getInput() { - return input; - } + public record Tokens(String input, List tokens, boolean truncated, int[] tokenIds, int[] tokenMap) { - public List getTokens() { - return tokens; + public Tokens { + assert tokenIds.length == tokenMap.length; } - public boolean isTruncated() { - return truncated; + public OptionalInt getTokenIndex(int token) { + return IntStream.range(0, tokenIds.length).filter(tokenIndex -> token == tokenIds[tokenIndex]).findFirst(); } } + + interface TokensBuilder { + /** + * Adds tokens to the token builder + * @param tokenIds Token ids without special tokens added + * @param tokenMap Token map without considering special tokens + * @return The builder object + */ + TokensBuilder addSequence(List tokenIds, List tokenMap); + + /** + * Adds an encoded sequence pair to the token builder + * @param tokenId1s Sequence 1 ids + * @param tokenMap1 Sequence 1 token mappings + * @param tokenId2s Sequence 2 ids + * @param tokenMap2 Sequence 2 token map + * @return The builder object + */ + TokensBuilder addSequencePair(List tokenId1s, List tokenMap1, List tokenId2s, List tokenMap2); + + /** + * Builds the token object + * @param input the original sequence input, may be a simple concatenation of a sequence pair + * @param truncated Was this truncated when tokenized + * @param allTokens All the tokens with their values and offsets + * @return A new Tokens object + */ + Tokens build(String input, boolean truncated, List allTokens); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java index 2f6934d238736..eef885c5afb76 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/WordPieceTokenFilter.java @@ -19,11 +19,12 @@ import java.util.ArrayList; import java.util.LinkedList; import java.util.List; +import java.util.Objects; public final class WordPieceTokenFilter extends TokenFilter { - protected final LinkedList tokens; + private final LinkedList tokens; private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); - protected final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); + private final OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); private final PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class); private static final CharSequence CONTINUATION = "##"; @@ -105,15 +106,14 @@ public boolean incrementToken() throws IOException { if (input.incrementToken()) { if (neverSplit.contains(termAtt)) { Integer maybeTokenized = vocabulary.get(termAtt); - if (maybeTokenized == null) { - tokenizedValues.add( - new WordPieceToken(termAtt.toString(), tokenizedUnknown, offsetAtt.startOffset(), offsetAtt.endOffset()) - ); - } else { - tokenizedValues.add( - new WordPieceToken(termAtt.toString(), maybeTokenized, offsetAtt.startOffset(), offsetAtt.endOffset()) - ); - } + tokenizedValues.add( + new WordPieceToken( + termAtt.toString(), + Objects.requireNonNullElse(maybeTokenized, tokenizedUnknown), + offsetAtt.startOffset(), + offsetAtt.endOffset() + ) + ); return true; } if (termAtt.length() > maxInputCharsPerWord) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchInferenceResult.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchInferenceResult.java index 0ac37ecc633b7..75cb77fb12b05 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchInferenceResult.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/results/PyTorchInferenceResult.java @@ -91,14 +91,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field(REQUEST_ID.getPreferredName(), requestId); if (inference != null) { builder.startArray(INFERENCE.getPreferredName()); - for (int i = 0; i < inference.length; i++) { + for (double[][] doubles : inference) { builder.startArray(); for (int j = 0; j < inference[0].length; j++) { - builder.startArray(); - for (int k = 0; k < inference[0][0].length; k++) { - builder.value(inference[i][j][k]); - } - builder.endArray(); + builder.value(doubles[j]); } builder.endArray(); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertTokenizationResultTests.java similarity index 91% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilderTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertTokenizationResultTests.java index a9a3227c43971..2d01ddefa5833 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertRequestBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/BertTokenizationResultTests.java @@ -26,7 +26,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; -public class BertRequestBuilderTests extends ESTestCase { +public class BertTokenizationResultTests extends ESTestCase { private BertTokenizer tokenizer; @@ -40,9 +40,9 @@ public void closeIt() { public void testBuildRequest() throws IOException { tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, null, 512, null)).build(); - BertRequestBuilder requestBuilder = new BertRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); NlpTask.Request request = requestBuilder.buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(5)); assertEquals("request1", jsonDocAsMap.get("request_id")); @@ -52,7 +52,6 @@ public void testBuildRequest() throws IOException { assertEquals(Arrays.asList(0, 1, 2, 3, 4), firstListItemFromMap("arg_3", jsonDocAsMap)); } - @SuppressWarnings("unchecked") private List firstListItemFromMap(String name, Map jsonDocAsMap) { return nthListItemFromMap(name, 0, jsonDocAsMap); } @@ -65,7 +64,7 @@ public static List nthListItemFromMap(String name, int n, Map requestBuilder.buildRequest( @@ -81,7 +80,7 @@ public void testInputTooLarge() throws IOException { ); } { - BertRequestBuilder requestBuilder = new BertRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); // input will become 3 tokens + the Class and Separator token = 5 which is // our max sequence length requestBuilder.buildRequest(Collections.singletonList("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); @@ -92,13 +91,13 @@ public void testInputTooLarge() throws IOException { public void testBatchWithPadding() throws IOException { tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, new BertTokenization(null, null, 512, null)).build(); - BertRequestBuilder requestBuilder = new BertRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); NlpTask.Request request = requestBuilder.buildRequest( List.of("Elasticsearch", "my little red car", "Godzilla day"), "request1", Tokenization.Truncate.NONE ); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(5)); assertThat((List>) jsonDocAsMap.get("tokens"), hasSize(3)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java index fe87963271c93..a4f591ec43b36 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/FillMaskProcessorTests.java @@ -14,13 +14,13 @@ import org.elasticsearch.xpack.core.ml.inference.results.TopClassEntry; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.FillMaskConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.VocabularyConfig; +import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizationResult; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.BertTokenizer; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.TokenizationResult; import org.elasticsearch.xpack.ml.inference.nlp.tokenizers.WordPieceTokenFilter; import org.elasticsearch.xpack.ml.inference.pytorch.results.PyTorchInferenceResult; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.OptionalInt; @@ -40,25 +40,28 @@ public void testProcessResults() { { 0, 0, 0, 0, 0, 0, 0 }, // The { 0, 0, 0, 0, 0, 0, 0 }, // capital { 0, 0, 0, 0, 0, 0, 0 }, // of - { 0.01, 0.01, 0.3, 0.1, 0.01, 0.2, 1.2 }, // MASK + { 0.01, 0.01, 0.3, 0.01, 0.2, 1.2, 0.1 }, // MASK { 0, 0, 0, 0, 0, 0, 0 }, // is { 0, 0, 0, 0, 0, 0, 0 } // paris } }; String input = "The capital of " + BertTokenizer.MASK_TOKEN + " is Paris"; - List vocab = Arrays.asList("The", "capital", "of", BertTokenizer.MASK_TOKEN, "is", "Paris", "France"); + List vocab = Arrays.asList("The", "capital", "of", "is", "Paris", "France", BertTokenizer.MASK_TOKEN); List tokens = List.of(); int[] tokenMap = new int[] { 0, 1, 2, 3, 4, 5 }; - int[] tokenIds = new int[] { 0, 1, 2, 3, 4, 5 }; + int[] tokenIds = new int[] { 0, 1, 2, 6, 4, 5 }; - TokenizationResult tokenization = new TokenizationResult(vocab); - tokenization.addTokenization(input, false, tokens, tokenIds, tokenMap); + TokenizationResult tokenization = new BertTokenizationResult( + vocab, + List.of(new TokenizationResult.Tokens(input, tokens, false, tokenIds, tokenMap)), + 0 + ); BertTokenizer tokenizer = mock(BertTokenizer.class); when(tokenizer.getMaskToken()).thenReturn(BertTokenizer.MASK_TOKEN); - when(tokenizer.getMaskTokenId()).thenReturn(OptionalInt.of(3)); + when(tokenizer.getMaskTokenId()).thenReturn(OptionalInt.of(6)); String resultsField = randomAlphaOfLength(10); FillMaskResults result = (FillMaskResults) FillMaskProcessor.processResult( @@ -84,8 +87,11 @@ public void testProcessResults_GivenMissingTokens() { BertTokenizer tokenizer = mock(BertTokenizer.class); when(tokenizer.getMaskToken()).thenReturn("[MASK]"); - TokenizationResult tokenization = new TokenizationResult(Collections.emptyList()); - tokenization.addTokenization("", false, Collections.emptyList(), new int[] {}, new int[] {}); + TokenizationResult tokenization = new BertTokenizationResult( + List.of(), + List.of(new TokenizationResult.Tokens("", List.of(), false, new int[0], new int[0])), + 0 + ); PyTorchInferenceResult pyTorchResult = new PyTorchInferenceResult("1", new double[][][] { { {} } }, 0L, null); expectThrows( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetTokenizationResultTests.java similarity index 89% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilderTests.java rename to x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetTokenizationResultTests.java index 3779ab43262d9..0a2907d3c67f6 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetRequestBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/MPNetTokenizationResultTests.java @@ -26,7 +26,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.hasSize; -public class MPNetRequestBuilderTests extends ESTestCase { +public class MPNetTokenizationResultTests extends ESTestCase { private MPNetTokenizer tokenizer; @After @@ -39,9 +39,9 @@ public void closeIt() { public void testBuildRequest() throws IOException { tokenizer = MPNetTokenizer.mpBuilder(TEST_CASED_VOCAB, new MPNetTokenization(null, null, 512, null)).build(); - MPNetRequestBuilder requestBuilder = new MPNetRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); NlpTask.Request request = requestBuilder.buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(3)); assertEquals("request1", jsonDocAsMap.get("request_id")); @@ -49,7 +49,6 @@ public void testBuildRequest() throws IOException { assertEquals(Arrays.asList(1, 1, 1, 1, 1), firstListItemFromMap("arg_1", jsonDocAsMap)); } - @SuppressWarnings("unchecked") private List firstListItemFromMap(String name, Map jsonDocAsMap) { return nthListItemFromMap(name, 0, jsonDocAsMap); } @@ -62,7 +61,7 @@ public static List nthListItemFromMap(String name, int n, Map requestBuilder.buildRequest( @@ -78,7 +77,7 @@ public void testInputTooLarge() throws IOException { ); } { - MPNetRequestBuilder requestBuilder = new MPNetRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); // input will become 3 tokens + the Class and Separator token = 5 which is // our max sequence length requestBuilder.buildRequest(Collections.singletonList("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); @@ -89,13 +88,13 @@ public void testInputTooLarge() throws IOException { public void testBatchWithPadding() throws IOException { tokenizer = MPNetTokenizer.mpBuilder(TEST_CASED_VOCAB, new MPNetTokenization(null, null, 512, null)).build(); - MPNetRequestBuilder requestBuilder = new MPNetRequestBuilder(tokenizer); + var requestBuilder = tokenizer.requestBuilder(); NlpTask.Request request = requestBuilder.buildRequest( List.of("Elasticsearch", "my little red car", "Godzilla day"), "request1", Tokenization.Truncate.NONE ); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(3)); assertThat((List>) jsonDocAsMap.get("tokens"), hasSize(3)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java index c7c2e3817978c..baafecf85c30a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/NerProcessorTests.java @@ -182,7 +182,7 @@ public void testGroupTaggedTokens() throws IOException { taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); - taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.B_ORG, 1.0)); + taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i), NerProcessor.IobTag.B_ORG, 1.0)); List entityGroups = NerProcessor.NerResultProcessor.groupTaggedTokens(taggedTokens, input); assertThat(entityGroups, hasSize(3)); @@ -218,7 +218,7 @@ public void testGroupTaggedTokens_GivenConsecutiveEntities() throws IOException taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.B_PER, 1.0)); - taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); + taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i), NerProcessor.IobTag.O, 1.0)); List entityGroups = NerProcessor.NerResultProcessor.groupTaggedTokens(taggedTokens, input); assertThat(entityGroups, hasSize(3)); @@ -241,7 +241,7 @@ public void testGroupTaggedTokens_GivenConsecutiveContinuingEntities() throws IO taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.B_PER, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.I_PER, 1.0)); - taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.B_ORG, 1.0)); + taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i), NerProcessor.IobTag.B_ORG, 1.0)); List entityGroups = NerProcessor.NerResultProcessor.groupTaggedTokens(taggedTokens, input); assertThat(entityGroups, hasSize(3)); @@ -272,7 +272,7 @@ public void testEntityContainsPunctuation() throws IOException { taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.I_ORG, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.I_ORG, 1.0)); taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.I_ORG, 1.0)); - taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i++), NerProcessor.IobTag.O, 1.0)); + taggedTokens.add(new NerProcessor.NerResultProcessor.TaggedToken(tokens.get(i), NerProcessor.IobTag.O, 1.0)); assertEquals(tokens.size(), taggedTokens.size()); List entityGroups = NerProcessor.NerResultProcessor.groupTaggedTokens(taggedTokens, input); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java index 0f1b03e4bea56..2d57e997c8f5b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/TextClassificationProcessorTests.java @@ -71,7 +71,7 @@ public void testBuildRequest() throws IOException { NlpTask.Request request = processor.getRequestBuilder(config) .buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(5)); assertEquals("request1", jsonDocAsMap.get("request_id")); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java index 9fd5bb8f833c2..4f5d614348967 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/ZeroShotClassificationProcessorTests.java @@ -50,7 +50,7 @@ public void testBuildRequest() throws IOException { (NlpConfig) new ZeroShotClassificationConfigUpdate.Builder().setLabels(List.of("new", "stuff")).build().apply(config) ).buildRequest(List.of("Elasticsearch fun"), "request1", Tokenization.Truncate.NONE); - Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput, true, XContentType.JSON).v2(); + Map jsonDocAsMap = XContentHelper.convertToMap(request.processInput(), true, XContentType.JSON).v2(); assertThat(jsonDocAsMap.keySet(), hasSize(5)); assertEquals("request1", jsonDocAsMap.get("request_id")); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java index 381df2230532e..63f3af65f667d 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/BertTokenizerTests.java @@ -47,8 +47,8 @@ public class BertTokenizerTests extends ESTestCase { BertTokenizer.PAD_TOKEN ); - private List tokenStrings(List tokens) { - return tokens.stream().map(WordPieceTokenFilter.WordPieceToken::toString).collect(Collectors.toList()); + private List tokenStrings(List tokens) { + return tokens.stream().map(DelimitedToken::toString).collect(Collectors.toList()); } public void testTokenize() { @@ -58,10 +58,10 @@ public void testTokenize() { new BertTokenization(null, false, null, Tokenization.Truncate.NONE) ).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elastic", "##search", "fun")); - assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.tokens()), contains("Elastic", "##search", "fun")); + assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); } } @@ -103,11 +103,11 @@ public void testTokenizeLargeInputTruncation() { ).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + TokenizationResult.Tokens tokenization = tokenizer.tokenize( "Elasticsearch fun with Pancake and Godzilla", Tokenization.Truncate.FIRST ); - assertArrayEquals(new int[] { 0, 1, 3, 18, 17 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 0, 1, 3, 18, 17 }, tokenization.tokenIds()); } try ( @@ -120,16 +120,16 @@ public void testTokenizeLargeInputTruncation() { "Elasticsearch fun with Pancake and Godzilla", Tokenization.Truncate.FIRST ); - assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.getTokenMap()); + assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.tokenMap()); } } public void testTokenizeAppendSpecialTokens() { try (BertTokenizer tokenizer = BertTokenizer.builder(TEST_CASED_VOCAB, Tokenization.createDefault()).build()) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertArrayEquals(new int[] { 12, 0, 1, 3, 13 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 0, 1, -1 }, tokenization.tokenMap()); } } @@ -143,13 +143,13 @@ public void testNeverSplitTokens() { .build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + TokenizationResult.Tokens tokenization = tokenizer.tokenize( "Elasticsearch " + specialToken + " fun", Tokenization.Truncate.NONE ); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elastic", "##search", specialToken, "fun")); - assertArrayEquals(new int[] { 0, 1, 15, 3 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2 }, tokenization.getTokenMap()); + assertThat(tokenStrings(tokenization.tokens()), contains("Elastic", "##search", specialToken, "fun")); + assertArrayEquals(new int[] { 0, 1, 15, 3 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2 }, tokenization.tokenMap()); } } @@ -161,13 +161,13 @@ public void testDoLowerCase() { ).setDoLowerCase(false).setWithSpecialTokens(false).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 3, 2 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 1 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertArrayEquals(new int[] { 3, 2 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 1 }, tokenization.tokenMap()); tokenization = tokenizer.tokenize("elasticsearch fun", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 0, 1, 2 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + assertArrayEquals(new int[] { 0, 1, 2 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); } try ( @@ -177,9 +177,9 @@ public void testDoLowerCase() { ).setDoLowerCase(true).setWithSpecialTokens(false).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 0, 1, 2 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertArrayEquals(new int[] { 0, 1, 2 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); } } @@ -189,14 +189,14 @@ public void testPunctuation() { .setWithSpecialTokens(false) .build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch, fun.", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elastic", "##search", ",", "fun", ".")); - assertArrayEquals(new int[] { 0, 1, 11, 3, 10 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2, 3 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch, fun.", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.tokens()), contains("Elastic", "##search", ",", "fun", ".")); + assertArrayEquals(new int[] { 0, 1, 11, 3, 10 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2, 3 }, tokenization.tokenMap()); tokenization = tokenizer.tokenize("Elasticsearch, fun [MASK].", Tokenization.Truncate.NONE); - assertArrayEquals(new int[] { 0, 1, 11, 3, 14, 10 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.getTokenMap()); + assertArrayEquals(new int[] { 0, 1, 11, 3, 14, 10 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.tokenMap()); } } @@ -224,20 +224,20 @@ public void testPunctuationWithMask() { ).setWithSpecialTokens(true).setNeverSplit(Set.of("[MASK]")).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("This is [MASK]-tastic!", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "[MASK]", "-", "ta", "##stic", "!")); - assertArrayEquals(new int[] { 0, 1, 2, 3, 4, 6, 7, 8, 9 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 4, 5, -1 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("This is [MASK]-tastic!", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.tokens()), contains("This", "is", "[MASK]", "-", "ta", "##stic", "!")); + assertArrayEquals(new int[] { 0, 1, 2, 3, 4, 6, 7, 8, 9 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 4, 5, -1 }, tokenization.tokenMap()); tokenization = tokenizer.tokenize("This is sub~[MASK]!", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "sub", "~", "[MASK]", "!")); - assertArrayEquals(new int[] { 0, 1, 2, 10, 5, 3, 8, 9 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, -1 }, tokenization.getTokenMap()); + assertThat(tokenStrings(tokenization.tokens()), contains("This", "is", "sub", "~", "[MASK]", "!")); + assertArrayEquals(new int[] { 0, 1, 2, 10, 5, 3, 8, 9 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, -1 }, tokenization.tokenMap()); tokenization = tokenizer.tokenize("This is sub,[MASK].tastic!", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("This", "is", "sub", ",", "[MASK]", ".", "ta", "##stic", "!")); - assertArrayEquals(new int[] { 0, 1, 2, 10, 11, 3, 12, 6, 7, 8, 9 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, 6, 6, 7, -1 }, tokenization.getTokenMap()); + assertThat(tokenStrings(tokenization.tokens()), contains("This", "is", "sub", ",", "[MASK]", ".", "ta", "##stic", "!")); + assertArrayEquals(new int[] { 0, 1, 2, 10, 11, 3, 12, 6, 7, 8, 9 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { -1, 0, 1, 2, 3, 4, 5, 6, 6, 7, -1 }, tokenization.tokenMap()); } } @@ -257,23 +257,23 @@ public void testBatchInput() { tokenizer.tokenize("Godzilla Pancake red car day", Tokenization.Truncate.NONE) ) ); - assertThat(tr.getTokenizations(), hasSize(4)); + assertThat(tr.getTokens(), hasSize(4)); - TokenizationResult.Tokenization tokenization = tr.getTokenizations().get(0); - assertArrayEquals(new int[] { 0, 1 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tr.getTokenization(0); + assertArrayEquals(new int[] { 0, 1 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0 }, tokenization.tokenMap()); - tokenization = tr.getTokenizations().get(1); - assertArrayEquals(new int[] { 4, 5, 6, 7 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 1, 2, 3 }, tokenization.getTokenMap()); + tokenization = tr.getTokenization(1); + assertArrayEquals(new int[] { 4, 5, 6, 7 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 1, 2, 3 }, tokenization.tokenMap()); - tokenization = tr.getTokenizations().get(2); - assertArrayEquals(new int[] { 8, 9, 16 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + tokenization = tr.getTokenization(2); + assertArrayEquals(new int[] { 8, 9, 16 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); - tokenization = tr.getTokenizations().get(3); - assertArrayEquals(new int[] { 8, 9, 17, 6, 7, 16 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.getTokenMap()); + tokenization = tr.getTokenization(3); + assertArrayEquals(new int[] { 8, 9, 17, 6, 7, 16 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1, 2, 3, 4 }, tokenization.tokenMap()); } } @@ -284,13 +284,13 @@ public void testMultiSeqTokenization() { .setWithSpecialTokens(true) .build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + TokenizationResult.Tokens tokenization = tokenizer.tokenize( "Elasticsearch is fun", "Godzilla my little red car", Tokenization.Truncate.NONE ); - var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + var tokenStream = Arrays.stream(tokenization.tokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); assertThat( tokenStream, contains( @@ -309,7 +309,7 @@ public void testMultiSeqTokenization() { BertTokenizer.SEPARATOR_TOKEN ) ); - assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.tokenIds()); } } @@ -321,13 +321,13 @@ public void testTokenizeLargeInputMultiSequenceTruncation() { ).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + TokenizationResult.Tokens tokenization = tokenizer.tokenize( "Elasticsearch is fun", "Godzilla my little red car", Tokenization.Truncate.FIRST ); - var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + var tokenStream = Arrays.stream(tokenization.tokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); assertThat( tokenStream, contains( @@ -359,12 +359,12 @@ public void testTokenizeLargeInputMultiSequenceTruncation() { ).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + TokenizationResult.Tokens tokenization = tokenizer.tokenize( "Elasticsearch is fun", "Godzilla my little red car", Tokenization.Truncate.SECOND ); - var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + var tokenStream = Arrays.stream(tokenization.tokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); assertThat( tokenStream, contains( diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java index 8bec4be872ff0..a2b7b9b364e2c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/nlp/tokenizers/MPNetTokenizerTests.java @@ -42,8 +42,8 @@ public class MPNetTokenizerTests extends ESTestCase { MPNetTokenizer.PAD_TOKEN ); - private List tokenStrings(List tokens) { - return tokens.stream().map(WordPieceTokenFilter.WordPieceToken::toString).collect(Collectors.toList()); + private List tokenStrings(List tokens) { + return tokens.stream().map(DelimitedToken::toString).collect(Collectors.toList()); } public void testTokenize() { @@ -53,10 +53,10 @@ public void testTokenize() { new MPNetTokenization(null, false, null, Tokenization.Truncate.NONE) ).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); - assertThat(tokenStrings(tokenization.getTokens()), contains("Elastic", "##search", "fun")); - assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.getTokenIds()); - assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.getTokenMap()); + TokenizationResult.Tokens tokenization = tokenizer.tokenize("Elasticsearch fun", Tokenization.Truncate.NONE); + assertThat(tokenStrings(tokenization.tokens()), contains("Elastic", "##search", "fun")); + assertArrayEquals(new int[] { 0, 1, 3 }, tokenization.tokenIds()); + assertArrayEquals(new int[] { 0, 0, 1 }, tokenization.tokenMap()); } } @@ -67,13 +67,13 @@ public void testMultiSeqTokenization() { new MPNetTokenization(null, false, null, Tokenization.Truncate.NONE) ).setDoLowerCase(false).setWithSpecialTokens(true).build() ) { - TokenizationResult.Tokenization tokenization = tokenizer.tokenize( + TokenizationResult.Tokens tokenization = tokenizer.tokenize( "Elasticsearch is fun", "Godzilla my little red car", Tokenization.Truncate.NONE ); - var tokenStream = Arrays.stream(tokenization.getTokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); + var tokenStream = Arrays.stream(tokenization.tokenIds()).mapToObj(TEST_CASED_VOCAB::get).collect(Collectors.toList()); assertThat( tokenStream, contains( @@ -93,7 +93,7 @@ public void testMultiSeqTokenization() { MPNetTokenizer.SEPARATOR_TOKEN ) ); - assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.getTokenIds()); + assertArrayEquals(new int[] { 12, 0, 1, 2, 3, 13, 13, 8, 9, 4, 5, 6, 7, 13 }, tokenization.tokenIds()); } } From a86f9c5f027242065c91a7b505ac6ade9a13b933 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Tue, 15 Feb 2022 14:06:22 +0100 Subject: [PATCH 08/37] Remove usage of deprecated Provider#forUseAtConfigurationTime (#83932) Remove gradle api usage that has been deprecated with Gradle 7.4 --- .../conventions/VersionPropertiesLoader.java | 2 -- .../conventions/VersionPropertiesPlugin.java | 2 +- .../conventions/info/ParallelDetector.java | 2 +- ...icsearch.authenticated-testclusters.gradle | 2 -- .../src/main/groovy/elasticsearch.ide.gradle | 2 +- .../src/main/groovy/elasticsearch.run.gradle | 6 ++--- .../gradle/internal/InternalBwcGitPlugin.java | 23 ++++++++----------- .../internal/info/GlobalBuildInfoPlugin.java | 2 +- .../internal/test/RestTestBasePlugin.java | 2 +- build.gradle | 4 ++-- client/rest-high-level/build.gradle | 2 -- .../archives/integ-test-zip/build.gradle | 2 +- distribution/build.gradle | 1 - distribution/docker/build.gradle | 2 +- distribution/packages/build.gradle | 2 +- modules/ingest-geoip/build.gradle | 1 - modules/lang-painless/build.gradle | 2 +- modules/repository-azure/build.gradle | 2 +- qa/remote-clusters/build.gradle | 2 +- x-pack/plugin/core/build.gradle | 2 +- x-pack/plugin/eql/qa/correctness/build.gradle | 2 -- x-pack/plugin/ml/build.gradle | 2 +- x-pack/plugin/sql/sql-cli/build.gradle | 2 +- 23 files changed, 28 insertions(+), 43 deletions(-) diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesLoader.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesLoader.java index 1702c03f91177..510a8df411285 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesLoader.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesLoader.java @@ -43,7 +43,6 @@ protected static void loadBuildSrcVersion(Properties loadedProps, ProviderFactor ); } String qualifier = providers.systemProperty("build.version_qualifier") - .forUseAtConfigurationTime() .getOrElse(""); if (qualifier.isEmpty() == false) { if (qualifier.matches("(alpha|beta|rc)\\d+") == false) { @@ -52,7 +51,6 @@ protected static void loadBuildSrcVersion(Properties loadedProps, ProviderFactor elasticsearch += "-" + qualifier; } final String buildSnapshotSystemProperty = providers.systemProperty("build.snapshot") - .forUseAtConfigurationTime() .getOrElse("true"); switch (buildSnapshotSystemProperty) { case "true": diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesPlugin.java index 2d56e42e3d951..c4c664df46bc3 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/VersionPropertiesPlugin.java @@ -32,6 +32,6 @@ public void apply(Project project) { .registerIfAbsent("versions", VersionPropertiesBuildService.class, spec -> { spec.getParameters().getInfoPath().set(infoPath); }); - project.getExtensions().add("versions", serviceProvider.forUseAtConfigurationTime().get().getProperties()); + project.getExtensions().add("versions", serviceProvider.get().getProperties()); } } diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/ParallelDetector.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/ParallelDetector.java index 7359d1728b96c..53b48c0c58935 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/ParallelDetector.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/info/ParallelDetector.java @@ -78,7 +78,7 @@ public static int findDefaultParallel(Project project) { } private static boolean isMac(ProviderFactory providers) { - return providers.systemProperty("os.name").forUseAtConfigurationTime().getOrElse("").startsWith("Mac"); + return providers.systemProperty("os.name").getOrElse("").startsWith("Mac"); } } diff --git a/build-tools-internal/src/main/groovy/elasticsearch.authenticated-testclusters.gradle b/build-tools-internal/src/main/groovy/elasticsearch.authenticated-testclusters.gradle index b52e6ec7f005c..102a838235cb1 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.authenticated-testclusters.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.authenticated-testclusters.gradle @@ -10,10 +10,8 @@ import org.elasticsearch.gradle.testclusters.StandaloneRestIntegTestTask def clusterCredentials = [ username: providers.systemProperty('tests.rest.cluster.username') - .forUseAtConfigurationTime() .getOrElse('test_admin'), password: providers.systemProperty('tests.rest.cluster.password') - .forUseAtConfigurationTime() .getOrElse('x-pack-test-password') ] diff --git a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle index 84b35b9a7568c..31638c9ddb1d4 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.ide.gradle @@ -69,7 +69,7 @@ tasks.register('configureIdeCheckstyle') { } // Applying this stuff, particularly the idea-ext plugin, has a cost so avoid it unless we're running in the IDE -if (providers.systemProperty('idea.active').forUseAtConfigurationTime().getOrNull() == 'true') { +if (providers.systemProperty('idea.active').getOrNull() == 'true') { project.apply(plugin: org.jetbrains.gradle.ext.IdeaExtPlugin) tasks.register('configureIdeaGradleJvm') { diff --git a/build-tools-internal/src/main/groovy/elasticsearch.run.gradle b/build-tools-internal/src/main/groovy/elasticsearch.run.gradle index 4eb4cdcdc32d8..b49c302e6ab99 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.run.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.run.gradle @@ -14,9 +14,9 @@ import org.elasticsearch.gradle.testclusters.RunTask // apply plugin: 'elasticsearch.internal-testclusters' testClusters.register("runTask") { - testDistribution = providers.systemProperty('run.distribution').orElse('default').forUseAtConfigurationTime().get() - if (providers.systemProperty('run.distribution').forUseAtConfigurationTime().getOrElse('default') == 'default') { - String licenseType = providers.systemProperty("run.license_type").forUseAtConfigurationTime().getOrElse("basic") + testDistribution = providers.systemProperty('run.distribution').orElse('default').get() + if (providers.systemProperty('run.distribution').getOrElse('default') == 'default') { + String licenseType = providers.systemProperty("run.license_type").getOrElse("basic") if (licenseType == 'trial') { setting 'xpack.ml.enabled', 'true' setting 'xpack.graph.enabled', 'true' diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java index 1dce3a7092d85..eda600f09004c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalBwcGitPlugin.java @@ -84,25 +84,20 @@ public void apply(Project project) { String remoteRepo = remote.get(); // for testing only we can override the base remote url String remoteRepoUrl = providerFactory.systemProperty("testRemoteRepo") - .forUseAtConfigurationTime() .getOrElse("https://github.com/" + remoteRepo + "/elasticsearch.git"); addRemote.setCommandLine(asList("git", "remote", "add", remoteRepo, remoteRepoUrl)); }); TaskProvider fetchLatestTaskProvider = tasks.register("fetchLatest", LoggedExec.class, fetchLatest -> { - var gitFetchLatest = project.getProviders() - .systemProperty("tests.bwc.git_fetch_latest") - .forUseAtConfigurationTime() - .orElse("true") - .map(fetchProp -> { - if ("true".equals(fetchProp)) { - return true; - } - if ("false".equals(fetchProp)) { - return false; - } - throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + fetchProp + "]"); - }); + var gitFetchLatest = project.getProviders().systemProperty("tests.bwc.git_fetch_latest").orElse("true").map(fetchProp -> { + if ("true".equals(fetchProp)) { + return true; + } + if ("false".equals(fetchProp)) { + return false; + } + throw new GradleException("tests.bwc.git_fetch_latest must be [true] or [false] but was [" + fetchProp + "]"); + }); fetchLatest.onlyIf(t -> project.getGradle().getStartParameter().isOffline() == false && gitFetchLatest.get()); fetchLatest.dependsOn(addRemoteTaskProvider); fetchLatest.setWorkingDir(gitExtension.getCheckoutDir().get()); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index f8c9e2a86261c..be44f003bf036 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -269,7 +269,7 @@ private File findRuntimeJavaHome() { } private String findJavaHome(String version) { - Provider javaHomeNames = providers.gradleProperty("org.gradle.java.installations.fromEnv").forUseAtConfigurationTime(); + Provider javaHomeNames = providers.gradleProperty("org.gradle.java.installations.fromEnv"); String javaHomeEnvVar = getJavaHomeEnvVarName(version); // Provide a useful error if we're looking for a Java home version that we haven't told Gradle about yet diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java index 1d5d0078e771a..20cf4328e4c84 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/RestTestBasePlugin.java @@ -78,6 +78,6 @@ public void apply(Project project) { } private String systemProperty(String propName) { - return providerFactory.systemProperty(propName).forUseAtConfigurationTime().getOrNull(); + return providerFactory.systemProperty(propName).getOrNull(); } } diff --git a/build.gradle b/build.gradle index 978d2fefee794..120fadf16b31d 100644 --- a/build.gradle +++ b/build.gradle @@ -177,8 +177,8 @@ allprojects { // injecting groovy property variables into all projects project.ext { // for ide hacks... - isEclipse = providers.systemProperty("eclipse.launcher").forUseAtConfigurationTime().isPresent() || // Detects gradle launched from Eclipse's IDE - providers.systemProperty("eclipse.application").forUseAtConfigurationTime().isPresent() || // Detects gradle launched from the Eclipse compiler server + isEclipse = providers.systemProperty("eclipse.launcher").isPresent() || // Detects gradle launched from Eclipse's IDE + providers.systemProperty("eclipse.application").isPresent() || // Detects gradle launched from the Eclipse compiler server gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff gradle.startParameter.taskNames.contains('cleanEclipse') } diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 1da2f9ae57f6a..288caec3ce183 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -64,11 +64,9 @@ File pkiTrustCert = file("./src/test/resources/org/elasticsearch/client/security def clusterUserNameProvider = providers.systemProperty('tests.rest.cluster.username') .orElse('test_user') - .forUseAtConfigurationTime() def clusterPasswordProvider = providers.systemProperty('tests.rest.cluster.password') .orElse('test-user-password') - .forUseAtConfigurationTime() tasks.named('splitPackagesAudit').configure { // the client package should be owned by the client, but server has some classes there too diff --git a/distribution/archives/integ-test-zip/build.gradle b/distribution/archives/integ-test-zip/build.gradle index f83aaf74fc2a1..e8e2908a5606a 100644 --- a/distribution/archives/integ-test-zip/build.gradle +++ b/distribution/archives/integ-test-zip/build.gradle @@ -43,7 +43,7 @@ tasks.named("integTest").configure { * 2. *One* of these tests is incompatible with that and should be skipped * when running against an external cluster. */ - if (project.providers.systemProperty("tests.rest.cluster").forUseAtConfigurationTime().isPresent()) { + if (project.providers.systemProperty("tests.rest.cluster").isPresent()) { nonInputProperties.systemProperty 'tests.logfile', testClusters.named('integTest').map(c -> c.singleNode().serverLog) } else { systemProperty 'tests.logfile', '--external--' diff --git a/distribution/build.gradle b/distribution/build.gradle index 76549a83d0b0d..8fe7cfa7e2017 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -37,7 +37,6 @@ tasks.register("generateDependenciesReport", ConcatFilesTask) { headerLine = "name,version,url,license,sourceURL" target = new File(providers.systemProperty('csv') .orElse("${project.buildDir}/reports/dependencies/es-dependencies.csv") - .forUseAtConfigurationTime() .get() ) // explicitly add our dependency on the JDK diff --git a/distribution/docker/build.gradle b/distribution/docker/build.gradle index a7b0e57210366..a2fa645a63b07 100644 --- a/distribution/docker/build.gradle +++ b/distribution/docker/build.gradle @@ -18,7 +18,7 @@ apply plugin: 'elasticsearch.rest-resources' ext.cloudflareZlibVersion = '1.2.8' -String buildId = providers.systemProperty('build.id').forUseAtConfigurationTime().getOrNull() +String buildId = providers.systemProperty('build.id').getOrNull() boolean useLocalArtifacts = buildId != null && buildId.isBlank() == false repositories { diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index a0c7598352e6c..86715499368f0 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -273,7 +273,7 @@ ospackage { signingKeyPassphrase = project.property('signing.password') signingKeyRingFile = project.hasProperty('signing.secretKeyRingFile') ? project.file(project.property('signing.secretKeyRingFile')) : - new File(new File(project.providers.systemProperty('user.home').orElse('.gnupg').forUseAtConfigurationTime().get()), 'secring.gpg') + new File(new File(project.providers.systemProperty('user.home').orElse('.gnupg').get()), 'secring.gpg') } // version found on oldest supported distro, centos-6 diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index b265a162f07e7..efe677d7ef223 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -35,7 +35,6 @@ restResources { } def useFixture = providers.environmentVariable("geoip_use_service") - .forUseAtConfigurationTime() .map { s -> Boolean.parseBoolean(s) == false } .getOrElse(true) diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index ae3a9ee4ca5d7..b5c7bbf484a58 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -181,7 +181,7 @@ tasks.register("generateContextApiSpec", DefaultTestClustersTask) { classpath = sourceSets.doc.runtimeClasspath systemProperty "cluster.uri", "${-> generateContextApiSpecCluster.get().singleNode().getAllHttpSocketURI().get(0)}" systemProperty "jdksrc", providers.systemProperty("jdksrc").getOrNull() - systemProperty "packageSources", providers.systemProperty("packageSources").forUseAtConfigurationTime().getOrNull() + systemProperty "packageSources", providers.systemProperty("packageSources").getOrNull() }.assertNormalExitValue() } } diff --git a/modules/repository-azure/build.gradle b/modules/repository-azure/build.gradle index ee7a0b39b0696..e0c57ea89ff8e 100644 --- a/modules/repository-azure/build.gradle +++ b/modules/repository-azure/build.gradle @@ -358,7 +358,7 @@ testClusters.matching { it.name == "yamlRestTest" }.configureEach { setting 'azure.client.integration_test.endpoint_suffix', azureAddress String firstPartOfSeed = BuildParams.testSeed.tokenize(':').get(0) - def ignoreTestSeed = providers.systemProperty('ignore.tests.seed').forUseAtConfigurationTime().isPresent() ? PropertyNormalization.IGNORE_VALUE : PropertyNormalization.DEFAULT + def ignoreTestSeed = providers.systemProperty('ignore.tests.seed').isPresent() ? PropertyNormalization.IGNORE_VALUE : PropertyNormalization.DEFAULT setting 'thread_pool.repository_azure.max', (Math.abs(Long.parseUnsignedLong(firstPartOfSeed, 16) % 10) + 1).toString(), ignoreTestSeed } } diff --git a/qa/remote-clusters/build.gradle b/qa/remote-clusters/build.gradle index 93e1da8c52b9d..df03b37c5a603 100644 --- a/qa/remote-clusters/build.gradle +++ b/qa/remote-clusters/build.gradle @@ -65,7 +65,7 @@ tasks.named("preProcessFixture").configure { dockerCompose { tcpPortsToIgnoreWhenWaiting = [9600, 9601] - if ('default'.equalsIgnoreCase(providers.systemProperty('tests.distribution').forUseAtConfigurationTime().getOrElse('default'))) { + if ('default'.equalsIgnoreCase(providers.systemProperty('tests.distribution').getOrElse('default'))) { useComposeFiles = ['docker-compose.yml'] } else { useComposeFiles = ['docker-compose-oss.yml'] diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 768bbb936ee0f..ce2da68cfefb4 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -77,7 +77,7 @@ tasks.named("processResources").configure { inputs.properties(expansions) filter("tokens" : expansions, ReplaceTokens.class) } - String licenseKey = providers.systemProperty("license.key").forUseAtConfigurationTime().getOrNull() + String licenseKey = providers.systemProperty("license.key").getOrNull() if (licenseKey != null) { println "Using provided license key from ${licenseKey}" } else if (BuildParams.isSnapshotBuild()) { diff --git a/x-pack/plugin/eql/qa/correctness/build.gradle b/x-pack/plugin/eql/qa/correctness/build.gradle index 681a0acf71483..27f88dc65364e 100644 --- a/x-pack/plugin/eql/qa/correctness/build.gradle +++ b/x-pack/plugin/eql/qa/correctness/build.gradle @@ -15,13 +15,11 @@ dependencies { } File serviceAccountFile = providers.environmentVariable('eql_test_credentials_file') - .forUseAtConfigurationTime() .orElse(providers.systemProperty('eql.test.credentials.file').forUseAtConfigurationTime()) .map { s -> new File(s)} .getOrNull() Boolean preserveData = providers.systemProperty('eql.test.preserve.data') - .forUseAtConfigurationTime() .map { s -> Boolean.parseBoolean(s) } .getOrElse(false) diff --git a/x-pack/plugin/ml/build.gradle b/x-pack/plugin/ml/build.gradle index 0efcce2d23fc6..495e4c1097260 100644 --- a/x-pack/plugin/ml/build.gradle +++ b/x-pack/plugin/ml/build.gradle @@ -16,7 +16,7 @@ repositories { forRepository { ivy { name "ml-cpp" - url providers.systemProperty('build.ml_cpp.repo').forUseAtConfigurationTime().orElse('https://prelert-artifacts.s3.amazonaws.com').get() + url providers.systemProperty('build.ml_cpp.repo').orElse('https://prelert-artifacts.s3.amazonaws.com').get() metadataSources { // no repository metadata, look directly for the artifact artifact() diff --git a/x-pack/plugin/sql/sql-cli/build.gradle b/x-pack/plugin/sql/sql-cli/build.gradle index 426408950b890..46fc6cd554384 100644 --- a/x-pack/plugin/sql/sql-cli/build.gradle +++ b/x-pack/plugin/sql/sql-cli/build.gradle @@ -54,7 +54,7 @@ tasks.register("runcli") { dependsOn "shadowJar" doLast { List command = ["${BuildParams.runtimeJavaHome}/bin/java"] - if ('true'.equals(providers.systemProperty('debug').orElse('false').forUseAtConfigurationTime().get())) { + if ('true'.equals(providers.systemProperty('debug').orElse('false').get())) { command += '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000' } command += ['-jar', shadowJar.archiveFile.get().asFile.absolutePath] From efb76d8c162b1c58abb8fe706c3e4583eec01cb2 Mon Sep 17 00:00:00 2001 From: kkewwei Date: Tue, 15 Feb 2022 22:03:39 +0800 Subject: [PATCH 09/37] Eagerly check keyword field length (#83738) If the UTF8 encoding of a keyword field value is bigger than the max length 32766, Lucene fill fail the indexing request and, to roll back the changes, will mark the (possibly partially indexed) document as deleted. This results in deletes, even in an append-only workload, which in turn leads to slower merges, as these will potentially have to fall back to MergeStrategy.DOC instead of MergeStrategy.BULK. To avoid this, we do a preflight check here before indexing the document into Lucene. Closes #80865 --- docs/changelog/83738.yaml | 6 +++++ .../index/mapper/KeywordFieldMapper.java | 24 +++++++++++++++++++ .../index/mapper/KeywordFieldMapperTests.java | 13 ++++++++++ 3 files changed, 43 insertions(+) create mode 100644 docs/changelog/83738.yaml diff --git a/docs/changelog/83738.yaml b/docs/changelog/83738.yaml new file mode 100644 index 0000000000000..e4d661bf08284 --- /dev/null +++ b/docs/changelog/83738.yaml @@ -0,0 +1,6 @@ +pr: 83738 +summary: Check the utf8 length of keyword field is not bigger than 32766 in ES, rather than in Lucene. +area: Mapping +type: enhancement +issues: + - 80865 diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index dd934733d4a3c..bdc9976208d4e 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -61,6 +61,7 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -69,6 +70,8 @@ import java.util.Objects; import java.util.function.Supplier; +import static org.apache.lucene.util.ByteBlockPool.BYTE_BLOCK_SIZE; + /** * A field mapper for keywords. This mapper accepts strings and indexes them as-is. */ @@ -905,6 +908,27 @@ private void indexValue(DocumentParserContext context, String value) { // convert to utf8 only once before feeding postings/dv/stored fields final BytesRef binaryValue = new BytesRef(value); + + // If the UTF8 encoding of the field value is bigger than the max length 32766, Lucene fill fail the indexing request and, to roll + // back the changes, will mark the (possibly partially indexed) document as deleted. This results in deletes, even in an append-only + // workload, which in turn leads to slower merges, as these will potentially have to fall back to MergeStrategy.DOC instead of + // MergeStrategy.BULK. To avoid this, we do a preflight check here before indexing the document into Lucene. + if (binaryValue.length > BYTE_BLOCK_SIZE - 2) { + byte[] prefix = new byte[30]; + System.arraycopy(binaryValue.bytes, binaryValue.offset, prefix, 0, 30); + String msg = "Document contains at least one immense term in field=\"" + + fieldType().name() + + "\" (whose " + + "UTF8 encoding is longer than the max length " + + (BYTE_BLOCK_SIZE - 2) + + "), all of which were " + + "skipped. Please correct the analyzer to not produce such terms. The prefix of the first immense " + + "term is: '" + + Arrays.toString(prefix) + + "...'"; + throw new IllegalArgumentException(msg); + } + if (fieldType.indexOptions() != IndexOptions.NONE || fieldType.stored()) { Field field = new KeywordField(fieldType().name(), binaryValue, fieldType); context.doc().add(field); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java index b061346d0dddb..9286daa30d37d 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java @@ -605,4 +605,17 @@ public void testDimensionInRoutingPath() throws IOException { ); mapper.documentMapper().validate(settings, false); // Doesn't throw } + + public void testKeywordFieldUtf8LongerThan32766() throws Exception { + DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "keyword"))); + StringBuilder stringBuilder = new StringBuilder(32768); + for (int i = 0; i < 32768; i++) { + stringBuilder.append("a"); + } + MapperParsingException e = expectThrows( + MapperParsingException.class, + () -> mapper.parse(source(b -> b.field("field", stringBuilder.toString()))) + ); + assertThat(e.getCause().getMessage(), containsString("UTF8 encoding is longer than the max length")); + } } From c98a7299ee11d52123e896585048412326b577bb Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 15 Feb 2022 15:07:50 +0000 Subject: [PATCH 10/37] Fix LeaderCheckerTests#testFollowerBehaviour (#83897) This test computes the expected message by tracking the different kinds of failures generated by a mock transport service. This tracking counts consecutive failures so is reset on success, but it is not explicitly reset when starting the second pass. This was usually fine since we start the second pass with a long sequence of checks that do not fail which would reset the tracking. Rarely however this sequence was too short to contain any success responses, so the counters would not be reset as needed. This commit makes sure we run at least `leaderCheckRetryCount` iterations without a failure to ensure that at least one of them succeeds. Closes #83857 Closes #83924 --- .../cluster/coordination/LeaderCheckerTests.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java index 257203b75364a..f06bf27a9c806 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/LeaderCheckerTests.java @@ -205,7 +205,9 @@ public String toString() { leaderChecker.updateLeader(leader2); { checkCount.set(0); - final long maxCheckCount = randomLongBetween(2, 1000); + // run at least leaderCheckRetryCount iterations to ensure at least one success so that we reset the counters and clear out + // anything left over from the previous run + final long maxCheckCount = randomLongBetween(leaderCheckRetryCount, 1000); logger.info("--> checking again that no failure is detected in {} checks", maxCheckCount); while (checkCount.get() < maxCheckCount) { deterministicTaskQueue.runAllRunnableTasks(); From 4e6f4b0b84f09c404ac2cf7c808ff533ee2c8048 Mon Sep 17 00:00:00 2001 From: Joe Gallo Date: Tue, 15 Feb 2022 10:20:56 -0500 Subject: [PATCH 11/37] MetadataIndexStateService cleanups (#83868) --- .../elasticsearch/cluster/ClusterState.java | 4 ++ .../cluster/block/ClusterBlocks.java | 7 ++++ .../metadata/MetadataIndexStateService.java | 42 ++++++------------- 3 files changed, 24 insertions(+), 29 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index 3fd0f52ddac94..68fe938e63de7 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -631,6 +631,10 @@ public DiscoveryNodes nodes() { return nodes; } + public Builder routingTable(RoutingTable.Builder routingTableBuilder) { + return routingTable(routingTableBuilder.build()); + } + public Builder routingTable(RoutingTable routingTable) { this.routingTable = routingTable; return this; diff --git a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java index fbbf6ed2391c4..2e95915661707 100644 --- a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java +++ b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlocks.java @@ -300,6 +300,13 @@ public static Builder builder() { return new Builder(); } + /** + * Convenience method, equivalent to: {@code builder().blocks(blocks)} + */ + public static Builder builder(ClusterBlocks blocks) { + return builder().blocks(blocks); + } + public static class Builder { private final Set global = new HashSet<>(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java index ddd449f9d265d..312bb505f9059 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java @@ -225,10 +225,8 @@ public void clusterStateProcessed( ); } // acknowledged maybe be false but some indices may have been correctly - // closed, so - // we maintain a kind of coherency by overriding the shardsAcknowledged - // value - // (see ShardsAcknowledgedResponse constructor) + // closed, so we maintain a kind of coherency by overriding the + // shardsAcknowledged value (see ShardsAcknowledgedResponse constructor) boolean shardsAcked = acknowledged ? shardsAcknowledged : false; listener.onResponse( new CloseIndexResponse(acknowledged, shardsAcked, indices) @@ -271,11 +269,9 @@ static ClusterState addIndexClosedBlocks( final Map blockedIndices, final ClusterState currentState ) { - final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); - final Set indicesToClose = new HashSet<>(); for (Index index : indices) { - final IndexMetadata indexMetadata = metadata.getSafe(index); + final IndexMetadata indexMetadata = currentState.metadata().getIndexSafe(index); if (indexMetadata.getState() != IndexMetadata.State.CLOSE) { indicesToClose.add(index); } else { @@ -304,8 +300,7 @@ static ClusterState addIndexClosedBlocks( ); } - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - final RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); for (Index index : indicesToClose) { ClusterBlock indexBlock = null; @@ -334,7 +329,7 @@ static ClusterState addIndexClosedBlocks( blockedIndices.keySet().stream().map(Object::toString).collect(Collectors.joining(",")) ) ); - return ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable.build()).build(); + return ClusterState.builder(currentState).blocks(blocks).build(); } /** @@ -366,8 +361,7 @@ private static Tuple> addIndexBlock( return Tuple.tuple(currentState, Collections.emptyMap()); } - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - final RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); final Map blockedIndices = new HashMap<>(); for (Index index : indicesToAddBlock) { @@ -405,10 +399,7 @@ private static Tuple> addIndexBlock( block.name, blockedIndices.keySet().stream().map(Object::toString).collect(Collectors.toList()) ); - return Tuple.tuple( - ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable.build()).build(), - blockedIndices - ); + return Tuple.tuple(ClusterState.builder(currentState).blocks(blocks).metadata(metadata).build(), blockedIndices); } /** @@ -793,9 +784,8 @@ static Tuple> closeRoutingTable( final Map blockedIndices, final Map verifyResult ) { - final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); final RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); final Set closedIndices = new HashSet<>(); @@ -881,7 +871,7 @@ static Tuple> closeRoutingTable( } logger.info("completed closing of indices {}", closedIndices); return Tuple.tuple( - ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable.build()).build(), + ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable).build(), closingResults.values() ); } @@ -939,10 +929,7 @@ private static Tuple> finalizeBlock( final Map verifyResult, final APIBlock block ) { - - final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); - final RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); final Set effectivelyBlockedIndices = new HashSet<>(); Map blockingResults = new HashMap<>(verifyResult); @@ -995,10 +982,7 @@ private static Tuple> finalizeBlock( } } logger.info("completed adding block {} to indices {}", block.name, effectivelyBlockedIndices); - return Tuple.tuple( - ClusterState.builder(currentState).blocks(blocks).metadata(metadata).routingTable(routingTable.build()).build(), - blockingResults.values() - ); + return Tuple.tuple(ClusterState.builder(currentState).blocks(blocks).build(), blockingResults.values()); } /** @@ -1129,7 +1113,7 @@ private ClusterState openIndices(final Index[] indices, final ClusterState curre }); final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); - final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); final Version minIndexCompatibilityVersion = currentState.getNodes().getMaxNodeVersion().minimumIndexCompatibilityVersion(); for (IndexMetadata indexMetadata : indicesToOpen) { @@ -1168,7 +1152,7 @@ private ClusterState openIndices(final Index[] indices, final ClusterState curre routingTable.addAsFromCloseToOpen(updatedState.metadata().getIndexSafe(previousIndexMetadata.getIndex())); } } - return ClusterState.builder(updatedState).routingTable(routingTable.build()).build(); + return ClusterState.builder(updatedState).routingTable(routingTable).build(); } } From ff2c410c565652cc06f63097256f54042cbf00e9 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 15 Feb 2022 16:36:49 +0000 Subject: [PATCH 12/37] Fix GMSPersStateTests#testDataOnlyNodePersistence (#83900) This test assumes that we call `setLastAcceptedState` at least once in the loop, but very rarely this would not be the case. This fix ensures that we always persist at least one state. Closes #83759 --- .../gateway/GatewayMetaStatePersistedStateTests.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java index aa7ae1804cc2a..1c256f00e2dc1 100644 --- a/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/GatewayMetaStatePersistedStateTests.java @@ -480,9 +480,11 @@ public void testDataOnlyNodePersistence() throws Exception { // generate a series of updates and check if batching works final String indexName = randomAlphaOfLength(10); long currentTerm = state.term(); + boolean wroteState = false; final int iterations = randomIntBetween(1, 1000); for (int i = 0; i < iterations; i++) { - if (rarely()) { + final boolean mustWriteState = wroteState == false && i == iterations - 1; + if (rarely() && mustWriteState == false) { // bump term currentTerm = currentTerm + (rarely() ? randomIntBetween(1, 5) : 0L); persistedState.setCurrentTerm(currentTerm); @@ -496,8 +498,10 @@ public void testDataOnlyNodePersistence() throws Exception { Metadata.builder().coordinationMetadata(createCoordinationMetadata(term)).put(indexMetadata, false).build() ); persistedState.setLastAcceptedState(state); + wroteState = true; } } + assertTrue(wroteState); // must write it at least once assertEquals(currentTerm, persistedState.getCurrentTerm()); assertClusterStateEqual(state, persistedState.getLastAcceptedState()); assertBusy(() -> assertTrue(gateway.allPendingAsyncStatesWritten())); From dae889b3e5c4a31b0849e3bc440ece1d018a9050 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 15 Feb 2022 08:56:21 -0800 Subject: [PATCH 13/37] Forbid guava group instead of artifact name (#83546) We disallow including guava on the compile classpaths, but currently this only forbids the main guava artifact. Yet there are other artifacts (eg failureaccess) provided by guava. This commit changes the dependency check to use the guava group instead of artifact name. --- .../elasticsearch.forbidden-dependencies.gradle | 11 +++++------ modules/repository-gcs/build.gradle | 2 +- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/build-tools-internal/src/main/groovy/elasticsearch.forbidden-dependencies.gradle b/build-tools-internal/src/main/groovy/elasticsearch.forbidden-dependencies.gradle index 2c20d79fac711..e67cb7846c791 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.forbidden-dependencies.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.forbidden-dependencies.gradle @@ -8,16 +8,15 @@ // we do not want any of these dependencies on the compilation classpath // because they could then be used within Elasticsearch -List FORBIDDEN_DEPENDENCIES = [ - 'guava' +List FORBIDDEN_DEPENDENCY_GROUPS = [ + 'com.google.guava' ] Closure checkDeps = { Configuration configuration -> configuration.resolutionStrategy.eachDependency { - String artifactName = it.target.name - if (FORBIDDEN_DEPENDENCIES.contains(artifactName)) { - throw new GradleException("Dependency '${artifactName}' on configuration '${configuration.name}' is not allowed. " + - "If it is needed as a transitive depenency, try adding it to the runtime classpath") + if (FORBIDDEN_DEPENDENCY_GROUPS.contains(it.target.group)) { + throw new GradleException("Dependency '${it.target.group}:${it.target.name}' on configuration '${configuration.name}' is not allowed. " + + "If it is needed as a transitive dependency, try adding it to the runtime classpath") } } } diff --git a/modules/repository-gcs/build.gradle b/modules/repository-gcs/build.gradle index ff6dbe983d9c4..0f81809b15a20 100644 --- a/modules/repository-gcs/build.gradle +++ b/modules/repository-gcs/build.gradle @@ -30,7 +30,7 @@ dependencies { api 'com.google.cloud:google-cloud-core:1.95.4' api 'com.google.cloud:google-cloud-core-http:1.95.4' runtimeOnly 'com.google.guava:guava:30.1.1-jre' - api 'com.google.guava:failureaccess:1.0.1' + runtimeOnly 'com.google.guava:failureaccess:1.0.1' api "commons-logging:commons-logging:${versions.commonslogging}" api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" api "commons-codec:commons-codec:${versions.commonscodec}" From 6eddf3d29cfd5e8ea0bb7442b7d2bbc2741e0d42 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 15 Feb 2022 17:08:18 +0000 Subject: [PATCH 14/37] Fix ZeroBytesReference#indexOf (#83956) This method would claim to find a zero byte even if there are no remaining bytes in the buffer. This commit fixes that. --- .../org/elasticsearch/common/bytes/ZeroBytesReference.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReference.java b/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReference.java index 2876220483e2c..5c06e5ed5ebee 100644 --- a/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReference.java +++ b/server/src/test/java/org/elasticsearch/common/bytes/ZeroBytesReference.java @@ -20,12 +20,14 @@ public class ZeroBytesReference extends AbstractBytesReference { private final int length; public ZeroBytesReference(int length) { + assert 0 <= length : length; this.length = length; } @Override public int indexOf(byte marker, int from) { - if (marker == 0) { + assert 0 <= from && from <= length : from + " vs " + length; + if (marker == 0 && from < length) { return from; } else { return -1; @@ -34,6 +36,7 @@ public int indexOf(byte marker, int from) { @Override public byte get(int index) { + assert 0 <= index && index < length : index + " vs " + length; return 0; } @@ -44,6 +47,7 @@ public int length() { @Override public BytesReference slice(int from, int length) { + assert from + length <= this.length : from + " and " + length + " vs " + this.length; return new ZeroBytesReference(length); } From e117812ef8062f66fd224a8e849ce3b907462678 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Tue, 15 Feb 2022 12:19:04 -0500 Subject: [PATCH 15/37] [DOCS] Update migration APIs docs (#83901) * Adds a shared note that the migration APIs are mainly intended for internal use by Kibana's Upgrade Assistant. * Updates the feature migration API docs to use updated "migration" terminology. * Removes some references to major versions from the deprecation API docs. --- .../migration/apis/deprecation.asciidoc | 9 ++-- ...de.asciidoc => feature-migration.asciidoc} | 54 +++++++++++-------- .../apis/shared-migration-apis-tip.asciidoc | 4 ++ docs/reference/migration/migration.asciidoc | 7 ++- docs/reference/redirects.asciidoc | 5 ++ 5 files changed, 50 insertions(+), 29 deletions(-) rename docs/reference/migration/apis/{feature_upgrade.asciidoc => feature-migration.asciidoc} (68%) create mode 100644 docs/reference/migration/apis/shared-migration-apis-tip.asciidoc diff --git a/docs/reference/migration/apis/deprecation.asciidoc b/docs/reference/migration/apis/deprecation.asciidoc index e3ebd57263c2d..fd82bb3e0e6d2 100644 --- a/docs/reference/migration/apis/deprecation.asciidoc +++ b/docs/reference/migration/apis/deprecation.asciidoc @@ -5,14 +5,11 @@ Deprecation info ++++ -IMPORTANT: Use this API to check for deprecated configuration before performing -a major version upgrade. You should run it on the last minor version of the -major version you are upgrading from, as earlier minor versions may not include -all deprecations. +include::{es-repo-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] The deprecation API is to be used to retrieve information about different cluster, node, and index level settings that use deprecated features that will -be removed or changed in the next major version. +be removed or changed in a future version. [[migration-api-request]] ==== {api-request-title} @@ -118,7 +115,7 @@ issue. |======= |warning | You can upgrade directly, but you are using deprecated functionality -which will not be available or behave differently in the next major version. +which will not be available or behave differently in a future version. |critical | You cannot upgrade without fixing this problem. |======= diff --git a/docs/reference/migration/apis/feature_upgrade.asciidoc b/docs/reference/migration/apis/feature-migration.asciidoc similarity index 68% rename from docs/reference/migration/apis/feature_upgrade.asciidoc rename to docs/reference/migration/apis/feature-migration.asciidoc index 1f1fc5b2aa239..9cd904f42e084 100644 --- a/docs/reference/migration/apis/feature_upgrade.asciidoc +++ b/docs/reference/migration/apis/feature-migration.asciidoc @@ -1,35 +1,47 @@ [role="xpack"] -[[migration-api-feature-upgrade]] -=== Feature Upgrade APIs +[[feature-migration-api]] +=== Feature migration APIs ++++ -Feature upgrade APIs +Feature migration ++++ -IMPORTANT: Use this API to check for system features that need to be upgraded before -a major version upgrade. You should run it on the last minor version of the -major version you are upgrading from. +include::{es-repo-dir}/migration/apis/shared-migration-apis-tip.asciidoc[] -The feature upgrade APIs are to be used to retrieve information about system features -that have to be upgraded before a cluster can be migrated to the next major version number, -and to trigger an automated system upgrade that might potentially involve downtime for -{es} system features. +Version upgrades sometimes require changes to how features store configuration +information and data in system indices. The feature migration APIs enable you to +see what features require changes, initiate the automatic migration process, and +check migration status. -[[feature-upgrade-api-request]] +Some functionality might be temporarily unavailable during the migration +process. + +[[feature-migration-api-request]] ==== {api-request-title} `GET /migration/system_features` -[[feature-upgrade-api-prereqs]] +`POST /migration/system_features` + +[[feature-migration-api-prereqs]] ==== {api-prereq-title} * If the {es} {security-features} are enabled, you must have the `manage` <> to use this API. -[[feature-upgrade-api-example]] +[[feature-migration-api-desc]] +==== {api-description-title} + +Submit a GET request to the `_migration/system_features` endpoint to see what +features need to be migrated and the status of any migrations that are in +progress. + +Submit a POST request to the endpoint to start the migration process. + +[[feature-migration-api-example]] ==== {api-examples-title} -To see the list of system features needing upgrades, submit a GET request to the -`_migration/system_features` endpoint: +When you submit a GET request to the `_migration/system_features` endpoint, the +response indicates the status of any features that need to be migrated. [source,console] -------------------------------------------------- @@ -120,10 +132,10 @@ Example response: -------------------------------------------------- // TESTRESPONSE[s/"minimum_index_version" : "8.0.0"/"minimum_index_version" : $body.$_path/] -This response tells us that Elasticsearch security needs its internal -indices upgraded before we can upgrade the cluster to 8.0. -To perform the required upgrade, submit a POST request to the same endpoint. +When you submit a POST request to the `_migration/system_features` endpoint to +start the migration process, the response indicates what features will be +migrated. [source,console] -------------------------------------------------- @@ -138,13 +150,13 @@ Example response: "accepted" : true, "features" : [ { - "feature_name" : "security" + "feature_name" : "security" <1> } ] } -------------------------------------------------- // TESTRESPONSE[skip: can't actually upgrade system indices in these tests] -This tells us that the security index is being upgraded. To check the -overall status of the upgrade, call the endpoint with GET. +<1> {es} security will be migrated before the cluster is upgraded. +Subsequent GET requests will return the status of the migration process. diff --git a/docs/reference/migration/apis/shared-migration-apis-tip.asciidoc b/docs/reference/migration/apis/shared-migration-apis-tip.asciidoc new file mode 100644 index 0000000000000..6a606ac83354c --- /dev/null +++ b/docs/reference/migration/apis/shared-migration-apis-tip.asciidoc @@ -0,0 +1,4 @@ +TIP: These APIs are designed for indirect use by {kib}'s **Upgrade Assistant**. +We strongly recommend you use the **Upgrade Assistant** to upgrade from +{prev-major-last} to {version}. For upgrade instructions, refer to +{stack-ref}/upgrading-elastic-stack.html[Upgrading to Elastic {version}]. \ No newline at end of file diff --git a/docs/reference/migration/migration.asciidoc b/docs/reference/migration/migration.asciidoc index 88c1631e30903..ffb2ca7a7859d 100644 --- a/docs/reference/migration/migration.asciidoc +++ b/docs/reference/migration/migration.asciidoc @@ -2,9 +2,12 @@ [[migration-api]] == Migration APIs -The migration APIs simplify upgrading {xpack} indices from one version to another. +The migration APIs power {kib}'s **Upgrade Assistant** feature. + +include::apis/shared-migration-apis-tip.asciidoc[] * <> +* <> include::apis/deprecation.asciidoc[] -include::apis/feature_upgrade.asciidoc[] +include::apis/feature-migration.asciidoc[] diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 7badd5ce5dd45..c8c31ee3dd775 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -3,6 +3,11 @@ The following pages have moved or been deleted. +[role="exclude",id="migration-api-feature-upgrade"] +=== Feature upgrade APIs + +Refer to <>. + [role="exclude",id="java-clients"] === Java transport client and security From 11e1d009a4c114e117094374f1be513750140b0f Mon Sep 17 00:00:00 2001 From: Dan Roscigno Date: Tue, 15 Feb 2022 12:29:00 -0500 Subject: [PATCH 16/37] [DOCS] Update links to avoid redirects (#83944) Replaces two links that currently go to the redirect page with the updated links to reduce clicks. --- x-pack/docs/en/security/authentication/saml-guide.asciidoc | 3 ++- x-pack/docs/en/security/authentication/saml-realm.asciidoc | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/x-pack/docs/en/security/authentication/saml-guide.asciidoc b/x-pack/docs/en/security/authentication/saml-guide.asciidoc index e4f6bc23c1543..ba19563c75f81 100644 --- a/x-pack/docs/en/security/authentication/saml-guide.asciidoc +++ b/x-pack/docs/en/security/authentication/saml-guide.asciidoc @@ -20,7 +20,8 @@ required in {kib} to activate the SAML authentication provider. NOTE: The SAML support in {kib} is designed on the expectation that it will be the primary (or sole) authentication method for users of that {kib} instance. Once you enable SAML authentication in {kib} it will affect all users who try -to login. The <> section provides more detail about how this works. +to login. The <> section provides more detail about how +this works. [[saml-guide-idp]] === The identity provider diff --git a/x-pack/docs/en/security/authentication/saml-realm.asciidoc b/x-pack/docs/en/security/authentication/saml-realm.asciidoc index cd91505f63d32..dba414d192081 100644 --- a/x-pack/docs/en/security/authentication/saml-realm.asciidoc +++ b/x-pack/docs/en/security/authentication/saml-realm.asciidoc @@ -17,4 +17,4 @@ chain. In order to simplify the process of configuring SAML authentication within the Elastic Stack, there is a step-by-step guide to -<>. +<>. From e3deacf54712a787d573038c101254270e097381 Mon Sep 17 00:00:00 2001 From: Tobias Stadler Date: Tue, 15 Feb 2022 18:42:17 +0100 Subject: [PATCH 17/37] [DOCS] Fix typos (#83895) --- .../painless-watcher-context-variables.asciidoc | 2 +- docs/plugins/repository.asciidoc | 2 +- docs/reference/analysis/analyzers/pattern-analyzer.asciidoc | 2 +- .../analysis/tokenfilters/predicate-tokenfilter.asciidoc | 2 +- docs/reference/cat/trainedmodel.asciidoc | 2 +- docs/reference/cluster/stats.asciidoc | 2 +- docs/reference/commands/keystore.asciidoc | 2 +- docs/reference/graph/explore.asciidoc | 2 +- docs/reference/how-to/recipes/scoring.asciidoc | 2 +- docs/reference/migration/migrate_8_0/plugin-changes.asciidoc | 2 +- docs/reference/migration/migrate_8_0/sql-jdbc-changes.asciidoc | 2 +- .../ml/anomaly-detection/ml-configuring-alerts.asciidoc | 2 +- docs/reference/ml/ml-shared.asciidoc | 2 +- docs/reference/modules/discovery/bootstrapping.asciidoc | 2 +- docs/reference/snapshot-restore/apis/put-repo-api.asciidoc | 2 +- docs/reference/sql/limitations.asciidoc | 2 +- 16 files changed, 16 insertions(+), 16 deletions(-) diff --git a/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc b/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc index 0a21ae1fd2bdc..c1ef342100143 100644 --- a/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc +++ b/docs/painless/painless-contexts/painless-watcher-context-variables.asciidoc @@ -9,7 +9,7 @@ The following variables are available in all watcher contexts. The id of the watch. `ctx['id']` (`String`, read-only):: - The server generated unique identifer for the run watch. + The server generated unique identifier for the run watch. `ctx['metadata']` (`Map`, read-only):: Metadata can be added to the top level of the watch definition. This diff --git a/docs/plugins/repository.asciidoc b/docs/plugins/repository.asciidoc index 61716e5f6a059..7355231141791 100644 --- a/docs/plugins/repository.asciidoc +++ b/docs/plugins/repository.asciidoc @@ -6,7 +6,7 @@ functionality in Elasticsearch by adding repositories backed by the cloud or by distributed file systems: [discrete] -==== Offical repository plugins +==== Official repository plugins NOTE: Support for S3, GCS and Azure repositories is now bundled in {es} by default. diff --git a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc index 92c293795a3d2..e685c17f96865 100644 --- a/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/pattern-analyzer.asciidoc @@ -366,7 +366,7 @@ The regex above is easier to understand as: [discrete] === Definition -The `pattern` anlayzer consists of: +The `pattern` analyzer consists of: Tokenizer:: * <> diff --git a/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc index b90350e2bbd13..9a20131a227e7 100644 --- a/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/predicate-tokenfilter.asciidoc @@ -44,7 +44,7 @@ The filter produces the following tokens. The API response contains the position and offsets of each output token. Note the `predicate_token_filter` filter does not change the tokens' original -positions or offets. +positions or offsets. .*Response* [%collapsible] diff --git a/docs/reference/cat/trainedmodel.asciidoc b/docs/reference/cat/trainedmodel.asciidoc index a092b8a207bb2..6f305be845fc4 100644 --- a/docs/reference/cat/trainedmodel.asciidoc +++ b/docs/reference/cat/trainedmodel.asciidoc @@ -72,7 +72,7 @@ The estimated heap size to keep the trained model in memory. `id`::: (Default) -Idetifier for the trained model. +Identifier for the trained model. `ingest.count`, `ic`, `ingestCount`::: The total number of documents that are processed by the model. diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index b59b7e34689ec..8ea35d268134d 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -1096,7 +1096,7 @@ Total size of all file stores across all selected nodes. `total_in_bytes`:: (integer) -Total size, in bytes, of all file stores across all seleced nodes. +Total size, in bytes, of all file stores across all selected nodes. `free`:: (<>) diff --git a/docs/reference/commands/keystore.asciidoc b/docs/reference/commands/keystore.asciidoc index c56b7dbc6a64e..a8e542cb8e52d 100644 --- a/docs/reference/commands/keystore.asciidoc +++ b/docs/reference/commands/keystore.asciidoc @@ -218,7 +218,7 @@ password. [[show-keystore-value]] ==== Show settings in the keystore -To display the value of a setting in the keystorem use the `show` command: +To display the value of a setting in the keystore use the `show` command: [source,sh] ---------------------------------------------------------------- diff --git a/docs/reference/graph/explore.asciidoc b/docs/reference/graph/explore.asciidoc index 62d50ed23ecea..6536653ae4cd6 100644 --- a/docs/reference/graph/explore.asciidoc +++ b/docs/reference/graph/explore.asciidoc @@ -84,7 +84,7 @@ graph as vertices. For example: field::: Identifies a field in the documents of interest. include::: Identifies the terms of interest that form the starting points from which you want to spider out. You do not have to specify a seed query -if you specify an include clause. The include clause implicitly querys for +if you specify an include clause. The include clause implicitly queries for documents that contain any of the listed terms listed. In addition to specifying a simple array of strings, you can also pass objects with `term` and `boost` values to boost matches on particular terms. diff --git a/docs/reference/how-to/recipes/scoring.asciidoc b/docs/reference/how-to/recipes/scoring.asciidoc index 47a3622aabf1f..5c5a8977d34d4 100644 --- a/docs/reference/how-to/recipes/scoring.asciidoc +++ b/docs/reference/how-to/recipes/scoring.asciidoc @@ -192,7 +192,7 @@ While both options would return similar scores, there are trade-offs: <> provides a lot of flexibility, enabling you to combine the text relevance score with static signals as you prefer. On the other hand, the <> only -exposes a couple ways to incorporate static signails into the score. However, +exposes a couple ways to incorporate static signals into the score. However, it relies on the <> and <> fields, which index values in a special way that allows the <> to skip diff --git a/docs/reference/migration/migrate_8_0/plugin-changes.asciidoc b/docs/reference/migration/migrate_8_0/plugin-changes.asciidoc index 4955cfc189504..63a625c89026d 100644 --- a/docs/reference/migration/migrate_8_0/plugin-changes.asciidoc +++ b/docs/reference/migration/migrate_8_0/plugin-changes.asciidoc @@ -13,7 +13,7 @@ TIP: {ess-skip-section} ==== *Details* + In previous versions of {es}, in order to register a snapshot repository -backed by Amazon S3, Google Cloud Storge (GCS) or Microsoft Azure Blob +backed by Amazon S3, Google Cloud Storage (GCS) or Microsoft Azure Blob Storage, you first had to install the corresponding Elasticsearch plugin, for example `repository-s3`. These plugins are now included in {es} by default. diff --git a/docs/reference/migration/migrate_8_0/sql-jdbc-changes.asciidoc b/docs/reference/migration/migrate_8_0/sql-jdbc-changes.asciidoc index 4833f587d1896..0e14d5b4ec0af 100644 --- a/docs/reference/migration/migrate_8_0/sql-jdbc-changes.asciidoc +++ b/docs/reference/migration/migrate_8_0/sql-jdbc-changes.asciidoc @@ -12,7 +12,7 @@ *Details* + To reduce the dependency of the JDBC driver onto Elasticsearch classes, the JDBC driver returns geometry data as strings using the WKT (well-known text) format instead of classes from the `org.elasticsearch.geometry`. -Users can choose the geometry library desired to convert the string represantion into a full-blown objects +Users can choose the geometry library desired to convert the string representation into a full-blown objects either such as the `elasticsearch-geo` library (which returned the object `org.elasticsearch.geo` as before), jts or spatial4j. diff --git a/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc b/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc index 3844d5fcd7aed..4bbf9d800e3da 100644 --- a/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc +++ b/docs/reference/ml/anomaly-detection/ml-configuring-alerts.asciidoc @@ -330,7 +330,7 @@ formatting is based on the {kib} settings. The peak number of bytes of memory ever used by the model. ==== -==== _Data delay has occured_ +==== _Data delay has occurred_ `context.message`:: A preconstructed message for the rule. diff --git a/docs/reference/ml/ml-shared.asciidoc b/docs/reference/ml/ml-shared.asciidoc index d5b0a2fcdaded..6fc55539c439c 100644 --- a/docs/reference/ml/ml-shared.asciidoc +++ b/docs/reference/ml/ml-shared.asciidoc @@ -995,7 +995,7 @@ Tokenize with special tokens. The tokens typically included in MPNet-style token end::inference-config-nlp-tokenization-mpnet-with-special-tokens[] tag::inference-config-nlp-vocabulary[] -The configuration for retreiving the vocabulary of the model. The vocabulary is +The configuration for retrieving the vocabulary of the model. The vocabulary is then used at inference time. This information is usually provided automatically by storing vocabulary in a known, internally managed index. end::inference-config-nlp-vocabulary[] diff --git a/docs/reference/modules/discovery/bootstrapping.asciidoc b/docs/reference/modules/discovery/bootstrapping.asciidoc index be1149d54a0b7..a313f05dbf5f4 100644 --- a/docs/reference/modules/discovery/bootstrapping.asciidoc +++ b/docs/reference/modules/discovery/bootstrapping.asciidoc @@ -75,7 +75,7 @@ configuration. If each node name is a fully-qualified domain name such as `master-a.example.com` then you must use fully-qualified domain names in the `cluster.initial_master_nodes` list too; conversely if your node names are bare hostnames (without the `.example.com` suffix) then you must use bare hostnames -in the `cluster.initial_master_nodes` list. If you use a mix of fully-qualifed +in the `cluster.initial_master_nodes` list. If you use a mix of fully-qualified and bare hostnames, or there is some other mismatch between `node.name` and `cluster.initial_master_nodes`, then the cluster will not form successfully and you will see log messages like the following. diff --git a/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc b/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc index 0b77795540a14..4d578b3df489d 100644 --- a/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc +++ b/docs/reference/snapshot-restore/apis/put-repo-api.asciidoc @@ -91,7 +91,7 @@ Repository type. Other repository types are available through official plugins: -`hfds`:: {plugins}/repository-hdfs.html[Hadoop Distributed File System (HDFS) repository] +`hdfs`:: {plugins}/repository-hdfs.html[Hadoop Distributed File System (HDFS) repository] ==== [[put-snapshot-repo-api-settings-param]] diff --git a/docs/reference/sql/limitations.asciidoc b/docs/reference/sql/limitations.asciidoc index 383e5b1a08edd..fae4e97ff6eec 100644 --- a/docs/reference/sql/limitations.asciidoc +++ b/docs/reference/sql/limitations.asciidoc @@ -4,7 +4,7 @@ [discrete] [[large-parsing-trees]] -=== Large queries may throw `ParsingExpection` +=== Large queries may throw `ParsingException` Extremely large queries can consume too much memory during the parsing phase, in which case the {es-sql} engine will abort parsing and throw an error. In such cases, consider reducing the query to a smaller size by potentially From 2bcc03db274502f1531c6aab939aea21b4bd251f Mon Sep 17 00:00:00 2001 From: Claudio Marins Date: Tue, 15 Feb 2022 15:08:19 -0300 Subject: [PATCH 18/37] [GCE Discovery] Correcly handle large zones with 500 or more instances (#83785) Discovery gce plugin has some issues while running in large pools of vms instances. This pr attempts to solve it. Closes #83783 --- docs/changelog/83785.yaml | 6 +++ .../cloud/gce/GceInstancesServiceImpl.java | 19 ++++++---- .../discovery/gce/GceDiscoveryTests.java | 13 +++++++ .../discovery/gce/GceMockUtils.java | 2 +- .../zones/europe-west1-b/instances | 37 +++++++++++++++++++ .../instances%3FpageToken=next-token | 36 ++++++++++++++++++ 6 files changed, 105 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/83785.yaml create mode 100644 plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances create mode 100644 plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances%3FpageToken=next-token diff --git a/docs/changelog/83785.yaml b/docs/changelog/83785.yaml new file mode 100644 index 0000000000000..db6795c82e93e --- /dev/null +++ b/docs/changelog/83785.yaml @@ -0,0 +1,6 @@ +pr: 83785 +summary: '[GCE Discovery] Correcly handle large zones with 500 or more instances' +area: Distributed +type: bug +issues: + - 83783 diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java index 55f0292285135..5667de257d867 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java @@ -69,14 +69,19 @@ public Collection instances() { try { // hack around code messiness in GCE code // TODO: get this fixed - InstanceList instanceList = Access.doPrivilegedIOException(() -> { - Compute.Instances.List list = client().instances().list(project, zoneId); - return list.execute(); + return Access.doPrivilegedIOException(() -> { + String nextPageToken = null; + List zoneInstances = new ArrayList<>(); + do { + Compute.Instances.List list = client().instances().list(project, zoneId).setPageToken(nextPageToken); + InstanceList instanceList = list.execute(); + nextPageToken = instanceList.getNextPageToken(); + if (instanceList.isEmpty() == false && instanceList.getItems() != null) { + zoneInstances.addAll(instanceList.getItems()); + } + } while (nextPageToken != null); + return zoneInstances; }); - // assist type inference - return instanceList.isEmpty() || instanceList.getItems() == null - ? Collections.emptyList() - : instanceList.getItems(); } catch (IOException e) { logger.warn((Supplier) () -> new ParameterizedMessage("Problem fetching instance list for zone {}", zoneId), e); logger.debug("Full exception:", e); diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java index a32f54638f8d6..f363b0bd2bc94 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java @@ -272,4 +272,17 @@ public void testMetadataServerValues() { List dynamicHosts = buildDynamicNodes(mock, nodeSettings); assertThat(dynamicHosts, hasSize(1)); } + + public void testNodesWithPagination() { + Settings nodeSettings = Settings.builder() + .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) + .put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b") + .putList(GceSeedHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev") + .build(); + mock = new GceInstancesServiceMock(nodeSettings); + List dynamicHosts = buildDynamicNodes(mock, nodeSettings); + assertThat(dynamicHosts, hasSize(2)); + assertEquals("10.240.79.59", dynamicHosts.get(0).getAddress()); + assertEquals("10.240.79.60", dynamicHosts.get(1).getAddress()); + } } diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java index 889228ac838a6..f2833fda8a0c5 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java @@ -67,7 +67,7 @@ public static String readGoogleApiJsonResponse(String url) throws IOException { private static String readJsonResponse(String url, String urlRoot) throws IOException { // We extract from the url the mock file path we want to use - String mockFileName = Strings.replace(url, urlRoot, ""); + String mockFileName = Strings.replace(url, urlRoot, "").replace("?", "%3F"); URL resource = GceMockUtils.class.getResource(mockFileName); if (resource == null) { diff --git a/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances new file mode 100644 index 0000000000000..e2fb8b6c21256 --- /dev/null +++ b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances @@ -0,0 +1,37 @@ +{ + "id": "dummy", + "items":[ + { + "description": "ES Node 1", + "id": "9309873766428965105", + "kind": "compute#instance", + "machineType": "n1-standard-1", + "name": "test1", + "networkInterfaces": [ + { + "accessConfigs": [ + { + "kind": "compute#accessConfig", + "name": "External NAT", + "natIP": "104.155.13.147", + "type": "ONE_TO_ONE_NAT" + } + ], + "name": "nic0", + "network": "default", + "networkIP": "10.240.79.59" + } + ], + "status": "RUNNING", + "tags": { + "fingerprint": "xA6QJb-rGtg=", + "items": [ + "elasticsearch", + "dev" + ] + }, + "zone": "europe-west1-b" + } + ], + "nextPageToken": "next-token" +} diff --git a/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances%3FpageToken=next-token b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances%3FpageToken=next-token new file mode 100644 index 0000000000000..62bd2b2d8f4f8 --- /dev/null +++ b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/nodeswithpagination/zones/europe-west1-b/instances%3FpageToken=next-token @@ -0,0 +1,36 @@ +{ + "id": "dummy", + "items":[ + { + "description": "ES Node 2", + "id": "9309873766428965105", + "kind": "compute#instance", + "machineType": "n1-standard-1", + "name": "test2", + "networkInterfaces": [ + { + "accessConfigs": [ + { + "kind": "compute#accessConfig", + "name": "External NAT", + "natIP": "104.155.13.147", + "type": "ONE_TO_ONE_NAT" + } + ], + "name": "nic0", + "network": "default", + "networkIP": "10.240.79.60" + } + ], + "status": "RUNNING", + "tags": { + "fingerprint": "xA6QJb-rGtg=", + "items": [ + "elasticsearch", + "dev" + ] + }, + "zone": "europe-west1-b" + } + ] +} From d6aba55d3abf60d670398cb57c1af9ec15d9e3ae Mon Sep 17 00:00:00 2001 From: James Baiera Date: Tue, 15 Feb 2022 14:10:10 -0500 Subject: [PATCH 19/37] Simplify LocalExporter cleaner function to fix failing tests (#83812) LocalExporter must be initialized fully before it can be used in the CleanerService to clean up indices. Nothing about its local state is needed for cleaning indices, and I don't think anything about its initialization of monitoring resources is needed in order to delete old indices either. Waiting for initialization can be time consuming, and thus causes some test failures in the cleaner service. By slimming down the required state of the cleaner listener this should clear up some of the test failures surrounding it. --- .../exporter/local/LocalExporter.java | 78 +++++++++---------- .../AbstractIndicesCleanerTestCase.java | 8 -- 2 files changed, 39 insertions(+), 47 deletions(-) diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java index d71c9aac6167f..02a0f6b7f0a7f 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/exporter/local/LocalExporter.java @@ -598,64 +598,64 @@ private boolean canUseWatcher() { @Override public void onCleanUpIndices(TimeValue retention) { - if (state.get() != State.RUNNING) { + ClusterState clusterState = clusterService.state(); + if (clusterService.localNode() == null + || clusterState == null + || clusterState.blocks().hasGlobalBlockWithLevel(ClusterBlockLevel.METADATA_WRITE)) { logger.debug("exporter not ready"); return; } - if (clusterService.state().nodes().isLocalNodeElectedMaster()) { + if (clusterState.nodes().isLocalNodeElectedMaster()) { // Reference date time will be compared to index.creation_date settings, // that's why it must be in UTC ZonedDateTime expiration = ZonedDateTime.now(ZoneOffset.UTC).minus(retention.millis(), ChronoUnit.MILLIS); logger.debug("cleaning indices [expiration={}, retention={}]", expiration, retention); - ClusterState clusterState = clusterService.state(); - if (clusterState != null) { - final long expirationTimeMillis = expiration.toInstant().toEpochMilli(); - final long currentTimeMillis = System.currentTimeMillis(); + final long expirationTimeMillis = expiration.toInstant().toEpochMilli(); + final long currentTimeMillis = System.currentTimeMillis(); - // list of index patterns that we clean up - final String[] indexPatterns = new String[] { ".monitoring-*" }; + // list of index patterns that we clean up + final String[] indexPatterns = new String[] { ".monitoring-*" }; - // Get the names of the current monitoring indices - final Set currents = MonitoredSystem.allSystems() - .map(s -> MonitoringTemplateUtils.indexName(dateTimeFormatter, s, currentTimeMillis)) - .collect(Collectors.toSet()); + // Get the names of the current monitoring indices + final Set currents = MonitoredSystem.allSystems() + .map(s -> MonitoringTemplateUtils.indexName(dateTimeFormatter, s, currentTimeMillis)) + .collect(Collectors.toSet()); - // avoid deleting the current alerts index, but feel free to delete older ones - currents.add(MonitoringTemplateRegistry.ALERTS_INDEX_TEMPLATE_NAME); + // avoid deleting the current alerts index, but feel free to delete older ones + currents.add(MonitoringTemplateRegistry.ALERTS_INDEX_TEMPLATE_NAME); - Set indices = new HashSet<>(); - for (ObjectObjectCursor index : clusterState.getMetadata().indices()) { - String indexName = index.key; + Set indices = new HashSet<>(); + for (ObjectObjectCursor index : clusterState.getMetadata().indices()) { + String indexName = index.key; - if (Regex.simpleMatch(indexPatterns, indexName)) { - // Never delete any "current" index (e.g., today's index or the most recent version no timestamp, like alerts) - if (currents.contains(indexName)) { - continue; - } + if (Regex.simpleMatch(indexPatterns, indexName)) { + // Never delete any "current" index (e.g., today's index or the most recent version no timestamp, like alerts) + if (currents.contains(indexName)) { + continue; + } - long creationDate = index.value.getCreationDate(); - if (creationDate <= expirationTimeMillis) { - if (logger.isDebugEnabled()) { - logger.debug( - "detected expired index [name={}, created={}, expired={}]", - indexName, - Instant.ofEpochMilli(creationDate).atZone(ZoneOffset.UTC), - expiration - ); - } - indices.add(indexName); + long creationDate = index.value.getCreationDate(); + if (creationDate <= expirationTimeMillis) { + if (logger.isDebugEnabled()) { + logger.debug( + "detected expired index [name={}, created={}, expired={}]", + indexName, + Instant.ofEpochMilli(creationDate).atZone(ZoneOffset.UTC), + expiration + ); } + indices.add(indexName); } } + } - if (indices.isEmpty() == false) { - logger.info("cleaning up [{}] old indices", indices.size()); - deleteIndices(indices); - } else { - logger.debug("no old indices found for clean up"); - } + if (indices.isEmpty() == false) { + logger.info("cleaning up [{}] old indices", indices.size()); + deleteIndices(indices); + } else { + logger.debug("no old indices found for clean up"); } } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java index a78e7baab75f4..c3fcb7525775e 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/cleaner/AbstractIndicesCleanerTestCase.java @@ -14,7 +14,6 @@ import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.monitoring.exporter.Exporter; import org.elasticsearch.xpack.monitoring.exporter.Exporters; -import org.elasticsearch.xpack.monitoring.exporter.local.LocalExporter; import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase; import org.junit.Before; @@ -23,7 +22,6 @@ import java.util.Locale; import static org.elasticsearch.test.ESIntegTestCase.Scope.TEST; -import static org.hamcrest.Matchers.is; @ClusterScope(scope = TEST, numDataNodes = 0, numClientNodes = 0) public abstract class AbstractIndicesCleanerTestCase extends MonitoringIntegTestCase { @@ -40,7 +38,6 @@ public void setup() { cleanerService.setGlobalRetention(TimeValue.MAX_VALUE); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/78737") public void testNothingToDelete() throws Exception { CleanerService.Listener listener = getListener(); listener.onCleanUpIndices(days(0)); @@ -107,7 +104,6 @@ public void testIgnoreCurrentTimestampedIndex() throws Exception { assertIndicesCount(1); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/78862") public void testDeleteIndices() throws Exception { CleanerService.Listener listener = getListener(); @@ -167,10 +163,6 @@ protected CleanerService.Listener getListener() throws Exception { Exporters exporters = internalCluster().getInstance(Exporters.class, internalCluster().getMasterName()); for (Exporter exporter : exporters.getEnabledExporters()) { if (exporter instanceof CleanerService.Listener) { - // Ensure that the exporter is initialized. - if (exporter instanceof LocalExporter) { - assertBusy(() -> assertThat(((LocalExporter) exporter).isExporterReady(), is(true))); - } return (CleanerService.Listener) exporter; } } From c1aba1e109aecf376b44155b43dbc7f76cd540ec Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Tue, 15 Feb 2022 15:24:33 -0500 Subject: [PATCH 20/37] [DOCS] Move tip for percolate query example (#83972) Moves a tip for the percolate query to the beginning of the example. --- docs/reference/query-dsl/percolate-query.asciidoc | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/docs/reference/query-dsl/percolate-query.asciidoc b/docs/reference/query-dsl/percolate-query.asciidoc index 684b0b571f149..55f1fd5c705f9 100644 --- a/docs/reference/query-dsl/percolate-query.asciidoc +++ b/docs/reference/query-dsl/percolate-query.asciidoc @@ -10,7 +10,13 @@ contains the document that will be used as query to match with the stored queries. [discrete] -=== Sample Usage +=== Sample usage + +TIP: To provide a simple example, this documentation uses one index, +`my-index-000001`, for both the percolate queries and documents. This setup can +work well when there are just a few percolate queries registered. For heavier +usage, we recommend you store queries and documents in separate indices. For +more details, refer to <>. Create an index with two fields: @@ -118,10 +124,6 @@ The above request will yield the following response: <2> The `_percolator_document_slot` field indicates which document has matched with this query. Useful when percolating multiple document simultaneously. -TIP: To provide a simple example, this documentation uses one index `my-index-000001` for both the percolate queries and documents. -This set-up can work well when there are just a few percolate queries registered. However, with heavier usage it is recommended -to store queries and documents in separate indices. Please see <> for more details. - [discrete] ==== Parameters From 1fe2b0d866feb3e38409298a7e9168d554bee693 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Tue, 15 Feb 2022 15:56:04 -0500 Subject: [PATCH 21/37] [DOCS] Fix percolate query headings (#83988) Fixes the heading levels for the percolate query doc so the on-page TOC displays correctly. --- docs/reference/query-dsl/percolate-query.asciidoc | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/docs/reference/query-dsl/percolate-query.asciidoc b/docs/reference/query-dsl/percolate-query.asciidoc index 55f1fd5c705f9..24b951a46ed9d 100644 --- a/docs/reference/query-dsl/percolate-query.asciidoc +++ b/docs/reference/query-dsl/percolate-query.asciidoc @@ -9,8 +9,7 @@ stored in an index. The `percolate` query itself contains the document that will be used as query to match with the stored queries. -[discrete] -=== Sample usage +==== Sample usage TIP: To provide a simple example, this documentation uses one index, `my-index-000001`, for both the percolate queries and documents. This setup can @@ -124,7 +123,6 @@ The above request will yield the following response: <2> The `_percolator_document_slot` field indicates which document has matched with this query. Useful when percolating multiple document simultaneously. -[discrete] ==== Parameters The following parameters are required when percolating a document: @@ -150,7 +148,6 @@ In that case the `document` parameter can be substituted with the following para `preference`:: Optionally, preference to be used to fetch document to percolate. `version`:: Optionally, the expected version of the document to be fetched. -[discrete] ==== Percolating in a filter context In case you are not interested in the score, better performance can be expected by wrapping @@ -185,7 +182,6 @@ should be wrapped in a `constant_score` query or a `bool` query's filter clause. Note that the `percolate` query never gets cached by the query cache. -[discrete] ==== Percolating multiple documents The `percolate` query can match multiple documents simultaneously with the indexed percolator queries. @@ -267,14 +263,12 @@ GET /my-index-000001/_search <1> The `_percolator_document_slot` indicates that the first, second and last documents specified in the `percolate` query are matching with this query. -[discrete] ==== Percolating an Existing Document In order to percolate a newly indexed document, the `percolate` query can be used. Based on the response from an index request, the `_id` and other meta information can be used to immediately percolate the newly added document. -[discrete] ===== Example Based on the previous example. @@ -332,14 +326,12 @@ case the search request would fail with a version conflict error. The search response returned is identical as in the previous example. -[discrete] ==== Percolate query and highlighting The `percolate` query is handled in a special way when it comes to highlighting. The queries hits are used to highlight the document that is provided in the `percolate` query. Whereas with regular highlighting the query in the search request is used to highlight the hits. -[discrete] ===== Example This example is based on the mapping of the first example. @@ -557,7 +549,6 @@ The slightly different response: <1> The highlight fields have been prefixed with the document slot they belong to, in order to know which highlight field belongs to what document. -[discrete] ==== Specifying multiple percolate queries It is possible to specify multiple `percolate` queries in a single search request: @@ -643,7 +634,6 @@ The above search request returns a response similar to this: <1> The `_percolator_document_slot_query1` percolator slot field indicates that these matched slots are from the `percolate` query with `_name` parameter set to `query1`. -[discrete] [[how-it-works]] ==== How it Works Under the Hood @@ -691,6 +681,7 @@ a different index configuration, like the number of primary shards. [[percolate-query-notes]] ==== Notes + ===== Allow expensive queries Percolate queries will not be executed if <> is set to false. From a89d4c34cbd8e5b3b3879df3aec632e7494e3c35 Mon Sep 17 00:00:00 2001 From: Igor Motov Date: Tue, 15 Feb 2022 11:48:38 -1000 Subject: [PATCH 22/37] TSDB: Add time series aggs cancellation (#83492) Adds support for low-level cancelling time-series based aggregations before they reach the reduce phase. Relates to #74660 --- .../search/SearchCancellationIT.java | 135 +++++++++++++++++- .../search/aggregations/AggregationPhase.java | 35 ++++- .../timeseries/TimeSeriesIndexSearcher.java | 18 ++- .../search/query/QueryPhase.java | 2 +- .../TimeSeriesCancellationTests.java | 128 +++++++++++++++++ .../TimeSeriesIndexSearcherTests.java | 3 +- .../aggregations/AggregatorTestCase.java | 4 +- 7 files changed, 315 insertions(+), 10 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesCancellationTests.java diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index 9a800c2656c45..465c394403bef 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionFuture; +import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.bulk.BulkRequestBuilder; @@ -28,6 +29,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.rest.RestStatus; @@ -36,13 +39,16 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.ScriptedMetricAggregationBuilder; +import org.elasticsearch.search.aggregations.timeseries.TimeSeriesAggregationBuilder; import org.elasticsearch.search.lookup.LeafStoredFieldsLookup; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.transport.TransportService; +import org.junit.BeforeClass; +import java.time.Instant; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -55,9 +61,12 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; +import static org.elasticsearch.index.IndexSettings.TIME_SERIES_END_TIME; +import static org.elasticsearch.index.IndexSettings.TIME_SERIES_START_TIME; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.scriptQuery; import static org.elasticsearch.search.SearchCancellationIT.ScriptedBlockPlugin.SEARCH_BLOCK_SCRIPT_NAME; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFailures; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; import static org.hamcrest.Matchers.containsString; @@ -69,6 +78,13 @@ @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE) public class SearchCancellationIT extends ESIntegTestCase { + private static boolean lowLevelCancellation; + + @BeforeClass + public static void init() { + lowLevelCancellation = randomBoolean(); + } + @Override protected Collection> nodePlugins() { return Collections.singleton(ScriptedBlockPlugin.class); @@ -76,7 +92,6 @@ protected Collection> nodePlugins() { @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { - boolean lowLevelCancellation = randomBoolean(); logger.info("Using lowLevelCancellation: {}", lowLevelCancellation); return Settings.builder() .put(super.nodeSettings(nodeOrdinal, otherSettings)) @@ -227,7 +242,12 @@ public void testCancellationDuringAggregation() throws Exception { new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.COMBINE_SCRIPT_NAME, Collections.emptyMap()) ) .reduceScript( - new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.REDUCE_SCRIPT_NAME, Collections.emptyMap()) + new Script( + ScriptType.INLINE, + "mockscript", + ScriptedBlockPlugin.REDUCE_BLOCK_SCRIPT_NAME, + Collections.emptyMap() + ) ) ) ) @@ -238,6 +258,80 @@ public void testCancellationDuringAggregation() throws Exception { ensureSearchWasCancelled(searchResponse); } + public void testCancellationDuringTimeSeriesAggregation() throws Exception { + List plugins = initBlockFactory(); + int numberOfShards = between(2, 5); + long now = Instant.now().toEpochMilli(); + int numberOfRefreshes = between(1, 5); + int numberOfDocsPerRefresh = numberOfShards * between(1500, 2000) / numberOfRefreshes; + assertAcked( + prepareCreate("test").setSettings( + Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexSettings.MODE.getKey(), IndexMode.TIME_SERIES.name()) + .put(IndexMetadata.INDEX_ROUTING_PATH.getKey(), "dim") + .put(TIME_SERIES_START_TIME.getKey(), now) + .put(TIME_SERIES_END_TIME.getKey(), now + (long) numberOfRefreshes * numberOfDocsPerRefresh + 1) + .build() + ).setMapping(""" + { + "properties": { + "@timestamp": {"type": "date", "format": "epoch_millis"}, + "dim": {"type": "keyword", "time_series_dimension": true} + } + } + """) + ); + + for (int i = 0; i < numberOfRefreshes; i++) { + // Make sure we sometimes have a few segments + BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + for (int j = 0; j < numberOfDocsPerRefresh; j++) { + bulkRequestBuilder.add( + client().prepareIndex("test") + .setOpType(DocWriteRequest.OpType.CREATE) + .setSource("@timestamp", now + (long) i * numberOfDocsPerRefresh + j, "val", (double) j, "dim", String.valueOf(i)) + ); + } + assertNoFailures(bulkRequestBuilder.get()); + } + + logger.info("Executing search"); + TimeSeriesAggregationBuilder timeSeriesAggregationBuilder = new TimeSeriesAggregationBuilder("test_agg"); + ActionFuture searchResponse = client().prepareSearch("test") + .setQuery(matchAllQuery()) + .addAggregation( + timeSeriesAggregationBuilder.subAggregation( + new ScriptedMetricAggregationBuilder("sub_agg").initScript( + new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.INIT_SCRIPT_NAME, Collections.emptyMap()) + ) + .mapScript( + new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.MAP_BLOCK_SCRIPT_NAME, Collections.emptyMap()) + ) + .combineScript( + new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.COMBINE_SCRIPT_NAME, Collections.emptyMap()) + ) + .reduceScript( + new Script(ScriptType.INLINE, "mockscript", ScriptedBlockPlugin.REDUCE_FAIL_SCRIPT_NAME, Collections.emptyMap()) + ) + ) + ) + .execute(); + awaitForBlock(plugins); + cancelSearch(SearchAction.NAME); + disableBlocks(plugins); + + SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, searchResponse::actionGet); + assertThat(ExceptionsHelper.status(ex), equalTo(RestStatus.BAD_REQUEST)); + logger.info("All shards failed with", ex); + if (lowLevelCancellation) { + // Ensure that we cancelled in TimeSeriesIndexSearcher and not in reduce phase + assertThat(ExceptionsHelper.stackTrace(ex), containsString("TimeSeriesIndexSearcher")); + } + + } + public void testCancellationOfScrollSearches() throws Exception { List plugins = initBlockFactory(); @@ -414,8 +508,11 @@ public static class ScriptedBlockPlugin extends MockScriptPlugin { static final String SEARCH_BLOCK_SCRIPT_NAME = "search_block"; static final String INIT_SCRIPT_NAME = "init"; static final String MAP_SCRIPT_NAME = "map"; + static final String MAP_BLOCK_SCRIPT_NAME = "map_block"; static final String COMBINE_SCRIPT_NAME = "combine"; static final String REDUCE_SCRIPT_NAME = "reduce"; + static final String REDUCE_FAIL_SCRIPT_NAME = "reduce_fail"; + static final String REDUCE_BLOCK_SCRIPT_NAME = "reduce_block"; static final String TERM_SCRIPT_NAME = "term"; private final AtomicInteger hits = new AtomicInteger(); @@ -449,10 +546,16 @@ public Map, Object>> pluginScripts() { this::nullScript, MAP_SCRIPT_NAME, this::nullScript, + MAP_BLOCK_SCRIPT_NAME, + this::mapBlockScript, COMBINE_SCRIPT_NAME, this::nullScript, - REDUCE_SCRIPT_NAME, + REDUCE_BLOCK_SCRIPT_NAME, this::blockScript, + REDUCE_SCRIPT_NAME, + this::termScript, + REDUCE_FAIL_SCRIPT_NAME, + this::reduceFailScript, TERM_SCRIPT_NAME, this::termScript ); @@ -474,6 +577,11 @@ private Object searchBlockScript(Map params) { return true; } + private Object reduceFailScript(Map params) { + fail("Shouldn't reach reduce"); + return true; + } + private Object nullScript(Map params) { return null; } @@ -483,7 +591,9 @@ private Object blockScript(Map params) { if (runnable != null) { runnable.run(); } - LogManager.getLogger(SearchCancellationIT.class).info("Blocking in reduce"); + if (shouldBlock.get()) { + LogManager.getLogger(SearchCancellationIT.class).info("Blocking in reduce"); + } hits.incrementAndGet(); try { assertBusy(() -> assertFalse(shouldBlock.get())); @@ -493,6 +603,23 @@ private Object blockScript(Map params) { return 42; } + private Object mapBlockScript(Map params) { + final Runnable runnable = beforeExecution.get(); + if (runnable != null) { + runnable.run(); + } + if (shouldBlock.get()) { + LogManager.getLogger(SearchCancellationIT.class).info("Blocking in map"); + } + hits.incrementAndGet(); + try { + assertBusy(() -> assertFalse(shouldBlock.get())); + } catch (Exception e) { + throw new RuntimeException(e); + } + return 1; + } + private Object termScript(Map params) { return 1; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java index ffcc971eeda7a..ce28ab0499d54 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java @@ -8,11 +8,14 @@ package org.elasticsearch.search.aggregations; import org.apache.lucene.search.Collector; +import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.search.SearchService; import org.elasticsearch.search.aggregations.timeseries.TimeSeriesIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.profile.query.CollectorResult; import org.elasticsearch.search.profile.query.InternalProfileCollector; +import org.elasticsearch.search.query.QueryPhase; import java.io.IOException; import java.util.ArrayList; @@ -40,7 +43,7 @@ public void preProcess(SearchContext context) { } if (context.aggregations().factories().context() != null && context.aggregations().factories().context().isInSortOrderExecutionRequired()) { - TimeSeriesIndexSearcher searcher = new TimeSeriesIndexSearcher(context.searcher()); + TimeSeriesIndexSearcher searcher = new TimeSeriesIndexSearcher(context.searcher(), getCancellationChecks(context)); try { searcher.search(context.rewrittenQuery(), bucketCollector); } catch (IOException e) { @@ -55,6 +58,36 @@ public void preProcess(SearchContext context) { } } + private List getCancellationChecks(SearchContext context) { + List cancellationChecks = new ArrayList<>(); + if (context.lowLevelCancellation()) { + // This searching doesn't live beyond this phase, so we don't need to remove query cancellation + cancellationChecks.add(() -> { + final SearchShardTask task = context.getTask(); + if (task != null) { + task.ensureNotCancelled(); + } + }); + } + + boolean timeoutSet = context.scrollContext() == null + && context.timeout() != null + && context.timeout().equals(SearchService.NO_TIMEOUT) == false; + + if (timeoutSet) { + final long startTime = context.getRelativeTimeInMillis(); + final long timeout = context.timeout().millis(); + final long maxTime = startTime + timeout; + cancellationChecks.add(() -> { + final long time = context.getRelativeTimeInMillis(); + if (time > maxTime) { + throw new QueryPhase.TimeExceededException(); + } + }); + } + return cancellationChecks; + } + public void execute(SearchContext context) { if (context.aggregations() == null) { context.queryResult().aggregations(null); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcher.java b/server/src/main/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcher.java index 4837a291df98f..71ccf96fd6bc2 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcher.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcher.java @@ -37,22 +37,29 @@ * TODO: Convert it to use index sort instead of hard-coded tsid and timestamp values */ public class TimeSeriesIndexSearcher { + private static final int CHECK_CANCELLED_SCORER_INTERVAL = 1 << 11; // We need to delegate to the other searcher here as opposed to extending IndexSearcher and inheriting default implementations as the // IndexSearcher would most of the time be a ContextIndexSearcher that has important logic related to e.g. document-level security. private final IndexSearcher searcher; + private final List cancellations; - public TimeSeriesIndexSearcher(IndexSearcher searcher) { + public TimeSeriesIndexSearcher(IndexSearcher searcher, List cancellations) { this.searcher = searcher; + this.cancellations = cancellations; } public void search(Query query, BucketCollector bucketCollector) throws IOException { + int seen = 0; query = searcher.rewrite(query); Weight weight = searcher.createWeight(query, bucketCollector.scoreMode(), 1); // Create LeafWalker for each subreader List leafWalkers = new ArrayList<>(); for (LeafReaderContext leaf : searcher.getIndexReader().leaves()) { + if (++seen % CHECK_CANCELLED_SCORER_INTERVAL == 0) { + checkCancelled(); + } LeafBucketCollector leafCollector = bucketCollector.getLeafCollector(leaf); Scorer scorer = weight.scorer(leaf); if (scorer != null) { @@ -76,6 +83,9 @@ protected boolean lessThan(LeafWalker a, LeafWalker b) { // walkers are ordered by timestamp. while (populateQueue(leafWalkers, queue)) { do { + if (++seen % CHECK_CANCELLED_SCORER_INTERVAL == 0) { + checkCancelled(); + } LeafWalker walker = queue.top(); walker.collectCurrent(); if (walker.nextDoc() == DocIdSetIterator.NO_MORE_DOCS || walker.shouldPop()) { @@ -131,6 +141,12 @@ private boolean queueAllHaveTsid(PriorityQueue queue, BytesRef tsid) return true; } + private void checkCancelled() { + for (Runnable r : cancellations) { + r.run(); + } + } + private static class LeafWalker { private final LeafCollector collector; private final Bits liveDocs; diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 0b72df78a510f..937378719ff81 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -267,5 +267,5 @@ private static boolean canEarlyTerminate(IndexReader reader, SortAndFormats sort return true; } - static class TimeExceededException extends RuntimeException {} + public static class TimeExceededException extends RuntimeException {} } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesCancellationTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesCancellationTests.java new file mode 100644 index 0000000000000..b66db7736a7ff --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesCancellationTests.java @@ -0,0 +1,128 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.search.aggregations.timeseries; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedDocValuesField; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.ScoreMode; +import org.apache.lucene.search.Sort; +import org.apache.lucene.search.SortField; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.cluster.metadata.DataStream; +import org.elasticsearch.core.internal.io.IOUtils; +import org.elasticsearch.index.mapper.TimeSeriesIdFieldMapper; +import org.elasticsearch.search.aggregations.BucketCollector; +import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.test.ESTestCase; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import java.io.IOException; +import java.util.List; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.hamcrest.Matchers.equalTo; + +public class TimeSeriesCancellationTests extends ESTestCase { + + private static Directory dir; + private static IndexReader reader; + + @BeforeClass + public static void setup() throws IOException { + dir = newDirectory(); + IndexWriterConfig iwc = newIndexWriterConfig(); + iwc.setIndexSort( + new Sort( + new SortField(TimeSeriesIdFieldMapper.NAME, SortField.Type.STRING), + new SortField(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD, SortField.Type.LONG) + ) + ); + RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwc); + indexRandomDocuments(iw, randomIntBetween(2048, 4096)); + iw.flush(); + reader = iw.getReader(); + iw.close(); + } + + private static void indexRandomDocuments(RandomIndexWriter w, int numDocs) throws IOException { + for (int i = 1; i <= numDocs; ++i) { + Document doc = new Document(); + String tsid = "tsid" + randomIntBetween(0, 30); + long time = randomNonNegativeLong(); + doc.add(new SortedDocValuesField(TimeSeriesIdFieldMapper.NAME, new BytesRef(tsid))); + doc.add(new NumericDocValuesField(DataStream.TimestampField.FIXED_TIMESTAMP_FIELD, time)); + w.addDocument(doc); + } + } + + @AfterClass + public static void cleanup() throws IOException { + IOUtils.close(reader, dir); + dir = null; + reader = null; + } + + public void testLowLevelCancellationActions() throws IOException { + ContextIndexSearcher searcher = new ContextIndexSearcher( + reader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + IndexSearcher.getDefaultQueryCachingPolicy(), + true + ); + TimeSeriesIndexSearcher timeSeriesIndexSearcher = new TimeSeriesIndexSearcher( + searcher, + List.of(() -> { throw new TaskCancelledException("Cancel"); }) + ); + CountingBucketCollector bc = new CountingBucketCollector(); + expectThrows(TaskCancelledException.class, () -> timeSeriesIndexSearcher.search(new MatchAllDocsQuery(), bc)); + // We count every segment and every record as 1 and break on 2048th iteration counting from 0 + // so we expect to see 2048 - number_of_segments - 1 (-1 is because we check before we collect) + assertThat(bc.count.get(), equalTo(Math.max(0, 2048 - reader.leaves().size() - 1))); + } + + public static class CountingBucketCollector extends BucketCollector { + public AtomicInteger count = new AtomicInteger(); + + @Override + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOException { + return new LeafBucketCollector() { + @Override + public void collect(int doc, long owningBucketOrd) throws IOException { + count.incrementAndGet(); + } + }; + } + + @Override + public void preCollection() throws IOException { + + } + + @Override + public void postCollection() throws IOException { + + } + + @Override + public ScoreMode scoreMode() { + return ScoreMode.COMPLETE; + } + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcherTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcherTests.java index 670a6b1f1d31d..7bc5a2522d55b 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcherTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/timeseries/TimeSeriesIndexSearcherTests.java @@ -34,6 +34,7 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; @@ -85,7 +86,7 @@ public void testCollectInOrderAcrossSegments() throws IOException, InterruptedEx IndexReader reader = DirectoryReader.open(dir); IndexSearcher searcher = new IndexSearcher(reader); - TimeSeriesIndexSearcher indexSearcher = new TimeSeriesIndexSearcher(searcher); + TimeSeriesIndexSearcher indexSearcher = new TimeSeriesIndexSearcher(searcher, List.of()); BucketCollector collector = new BucketCollector() { diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 369d07ab26446..dfdfd267373b5 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -578,7 +578,7 @@ private A searchAndReduce( C a = createAggregator(builder, context); a.preCollection(); if (context.isInSortOrderExecutionRequired()) { - new TimeSeriesIndexSearcher(subSearcher).search(rewritten, a); + new TimeSeriesIndexSearcher(subSearcher, List.of()).search(rewritten, a); } else { Weight weight = subSearcher.createWeight(rewritten, ScoreMode.COMPLETE, 1f); subSearcher.search(weight, a); @@ -589,7 +589,7 @@ private A searchAndReduce( } else { root.preCollection(); if (context.isInSortOrderExecutionRequired()) { - new TimeSeriesIndexSearcher(searcher).search(rewritten, MultiBucketCollector.wrap(true, List.of(root))); + new TimeSeriesIndexSearcher(searcher, List.of()).search(rewritten, MultiBucketCollector.wrap(true, List.of(root))); } else { searcher.search(rewritten, MultiBucketCollector.wrap(true, List.of(root))); } From c33da22a77e7b8bd80619395d2e9c676b95ef13f Mon Sep 17 00:00:00 2001 From: James Baiera Date: Tue, 15 Feb 2022 17:00:47 -0500 Subject: [PATCH 23/37] Update YAML Rest tests to check for product header on all responses (#83290) This PR adds assertions to YAML Rest tests to ensure that product headers are always returned in rest responses. Additional work has been included to fix a number of misuses of ThreadContext, mostly because of stashing listeners without their accompanying contexts. BWC Rest tests have been disabled for a few cases while the fixes are backported. --- docs/changelog/83290.yaml | 5 ++ .../AbstractAsyncBulkByScrollAction.java | 4 +- .../test/cat.snapshots/10_basic.yml | 4 ++ .../test/snapshot.clone/10_basic.yml | 3 + .../test/snapshot.create/10_basic.yml | 3 + .../test/snapshot.get/10_basic.yml | 5 +- .../20_repository_uuid.yml | 5 ++ .../test/snapshot.restore/10_basic.yml | 3 + .../test/snapshot.status/10_basic.yml | 3 + .../rest-api-spec/test/tsdb/30_snapshot.yml | 4 ++ .../restore/RestoreClusterStateListener.java | 65 +++++++++++-------- .../TransportRestoreSnapshotAction.java | 7 +- .../service/ClusterApplierService.java | 4 +- .../snapshots/SnapshotsService.java | 4 +- .../rest/yaml/ClientYamlTestResponse.java | 15 +++-- .../test/rest/yaml/section/DoSection.java | 26 ++++++++ .../rest/yaml/section/DoSectionTests.java | 1 + .../ccr/action/TransportPutFollowAction.java | 3 +- .../ccr/action/TransportUnfollowAction.java | 9 ++- .../action/TransportXPackUsageAction.java | 53 +++++++-------- .../watcher/WatcherUsageTransportAction.java | 9 ++- 21 files changed, 162 insertions(+), 73 deletions(-) create mode 100644 docs/changelog/83290.yaml diff --git a/docs/changelog/83290.yaml b/docs/changelog/83290.yaml new file mode 100644 index 0000000000000..9b3bb8ef056e5 --- /dev/null +++ b/docs/changelog/83290.yaml @@ -0,0 +1,5 @@ +pr: 83290 +summary: Update YAML Rest tests to check for product header on all responses +area: Infra/REST API +type: enhancement +issues: [] diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java index bab93e56b653f..beac9ab88c78c 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/AbstractAsyncBulkByScrollAction.java @@ -593,7 +593,7 @@ protected void finishHim(Exception failure) { */ protected void finishHim(Exception failure, List indexingFailures, List searchFailures, boolean timedOut) { logger.debug("[{}]: finishing without any catastrophic failures", task.getId()); - scrollSource.close(() -> { + scrollSource.close(threadPool.getThreadContext().preserveContext(() -> { if (failure == null) { BulkByScrollResponse response = buildResponse( timeValueNanos(System.nanoTime() - startTime.get()), @@ -605,7 +605,7 @@ protected void finishHim(Exception failure, List indexingFailures, List } else { listener.onFailure(failure); } - }); + })); } /** diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.snapshots/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.snapshots/10_basic.yml index f7d60671c7e88..23860cb412722 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.snapshots/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/cat.snapshots/10_basic.yml @@ -23,6 +23,10 @@ $/ --- "Test cat snapshots output": + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" + - do: snapshot.create_repository: repository: test_cat_snapshots_1 diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.clone/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.clone/10_basic.yml index fb289355e08fb..80e7139cd8df3 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.clone/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.clone/10_basic.yml @@ -1,5 +1,8 @@ --- setup: + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.create/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.create/10_basic.yml index f7c522b712244..e060e7dff5bda 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.create/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.create/10_basic.yml @@ -1,5 +1,8 @@ --- setup: + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get/10_basic.yml index b50ece87e9f88..08753e4e732bf 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get/10_basic.yml @@ -1,5 +1,8 @@ --- setup: + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: @@ -61,6 +64,7 @@ setup: --- "Get snapshot info when verbose is false": + - do: indices.create: index: test_index @@ -198,7 +202,6 @@ setup: - skip: version: " - 7.12.99" reason: "Introduced in 7.13.0" - - do: indices.create: index: test_index diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get_repository/20_repository_uuid.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get_repository/20_repository_uuid.yml index 0532d208d0cba..503c6cc7133de 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get_repository/20_repository_uuid.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.get_repository/20_repository_uuid.yml @@ -1,4 +1,9 @@ --- +setup: + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" +--- "Get repository returns UUID": - skip: version: " - 7.12.99" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.restore/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.restore/10_basic.yml index 1ea5b542625e8..e91f38e985e43 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.restore/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.restore/10_basic.yml @@ -1,5 +1,8 @@ --- setup: + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.status/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.status/10_basic.yml index c35f2419bdc91..2c4573ccd58b8 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.status/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/snapshot.status/10_basic.yml @@ -1,5 +1,8 @@ --- setup: + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" - do: snapshot.create_repository: diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml index 104b383ae811f..39c6dd4345bdf 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/tsdb/30_snapshot.yml @@ -1,5 +1,9 @@ --- setup: + - skip: + version: " - 8.1.99" + reason: "Pause BWC tests until #83290 is backported" + - do: snapshot.create_repository: repository: test_repo diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreClusterStateListener.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreClusterStateListener.java index 2f3e92d2f55a9..c2931714e72a7 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreClusterStateListener.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreClusterStateListener.java @@ -16,10 +16,13 @@ import org.elasticsearch.cluster.RestoreInProgress; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.snapshots.RestoreInfo; import org.elasticsearch.snapshots.RestoreService; +import java.util.function.Supplier; + import static org.elasticsearch.snapshots.RestoreService.restoreInProgress; public class RestoreClusterStateListener implements ClusterStateListener { @@ -29,43 +32,48 @@ public class RestoreClusterStateListener implements ClusterStateListener { private final ClusterService clusterService; private final String uuid; private final ActionListener listener; + private final Supplier contextSupplier; private RestoreClusterStateListener( ClusterService clusterService, RestoreService.RestoreCompletionResponse response, - ActionListener listener + ActionListener listener, + Supplier contextSupplier ) { this.clusterService = clusterService; this.uuid = response.getUuid(); this.listener = listener; + this.contextSupplier = contextSupplier; } @Override public void clusterChanged(ClusterChangedEvent changedEvent) { - final RestoreInProgress.Entry prevEntry = restoreInProgress(changedEvent.previousState(), uuid); - final RestoreInProgress.Entry newEntry = restoreInProgress(changedEvent.state(), uuid); - if (prevEntry == null) { - // When there is a master failure after a restore has been started, this listener might not be registered - // on the current master and as such it might miss some intermediary cluster states due to batching. - // Clean up listener in that case and acknowledge completion of restore operation to client. - clusterService.removeListener(this); - listener.onResponse(new RestoreSnapshotResponse((RestoreInfo) null)); - } else if (newEntry == null) { - clusterService.removeListener(this); - ImmutableOpenMap shards = prevEntry.shards(); - assert prevEntry.state().completed() : "expected completed snapshot state but was " + prevEntry.state(); - assert RestoreService.completed(shards) : "expected all restore entries to be completed"; - RestoreInfo ri = new RestoreInfo( - prevEntry.snapshot().getSnapshotId().getName(), - prevEntry.indices(), - shards.size(), - shards.size() - RestoreService.failedShards(shards) - ); - RestoreSnapshotResponse response = new RestoreSnapshotResponse(ri); - logger.debug("restore of [{}] completed", prevEntry.snapshot().getSnapshotId()); - listener.onResponse(response); - } else { - // restore not completed yet, wait for next cluster state update + try (ThreadContext.StoredContext stored = contextSupplier.get()) { + final RestoreInProgress.Entry prevEntry = restoreInProgress(changedEvent.previousState(), uuid); + final RestoreInProgress.Entry newEntry = restoreInProgress(changedEvent.state(), uuid); + if (prevEntry == null) { + // When there is a master failure after a restore has been started, this listener might not be registered + // on the current master and as such it might miss some intermediary cluster states due to batching. + // Clean up listener in that case and acknowledge completion of restore operation to client. + clusterService.removeListener(this); + listener.onResponse(new RestoreSnapshotResponse((RestoreInfo) null)); + } else if (newEntry == null) { + clusterService.removeListener(this); + ImmutableOpenMap shards = prevEntry.shards(); + assert prevEntry.state().completed() : "expected completed snapshot state but was " + prevEntry.state(); + assert RestoreService.completed(shards) : "expected all restore entries to be completed"; + RestoreInfo ri = new RestoreInfo( + prevEntry.snapshot().getSnapshotId().getName(), + prevEntry.indices(), + shards.size(), + shards.size() - RestoreService.failedShards(shards) + ); + RestoreSnapshotResponse response = new RestoreSnapshotResponse(ri); + logger.debug("restore of [{}] completed", prevEntry.snapshot().getSnapshotId()); + listener.onResponse(response); + } else { + // restore not completed yet, wait for next cluster state update + } } } @@ -76,8 +84,11 @@ public void clusterChanged(ClusterChangedEvent changedEvent) { public static void createAndRegisterListener( ClusterService clusterService, RestoreService.RestoreCompletionResponse response, - ActionListener listener + ActionListener listener, + ThreadContext threadContext ) { - clusterService.addListener(new RestoreClusterStateListener(clusterService, response, listener)); + clusterService.addListener( + new RestoreClusterStateListener(clusterService, response, listener, threadContext.newRestorableContext(true)) + ); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java index 7b247f1b14a42..73b66fa5d1bb5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/TransportRestoreSnapshotAction.java @@ -72,7 +72,12 @@ protected void masterOperation( ) { restoreService.restoreSnapshot(request, listener.delegateFailure((delegatedListener, restoreCompletionResponse) -> { if (restoreCompletionResponse.getRestoreInfo() == null && request.waitForCompletion()) { - RestoreClusterStateListener.createAndRegisterListener(clusterService, restoreCompletionResponse, delegatedListener); + RestoreClusterStateListener.createAndRegisterListener( + clusterService, + restoreCompletionResponse, + delegatedListener, + threadPool.getThreadContext() + ); } else { delegatedListener.onResponse(new RestoreSnapshotResponse(restoreCompletionResponse.getRestoreInfo())); } diff --git a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java index 66d5428d5d135..122659c64422e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +++ b/server/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java @@ -213,7 +213,7 @@ public void removeApplier(ClusterStateApplier applier) { } /** - * Add a listener for updated cluster states + * Add a listener for updated cluster states. Listeners are executed in the system thread context. */ public void addListener(ClusterStateListener listener) { clusterStateListeners.add(listener); @@ -222,7 +222,7 @@ public void addListener(ClusterStateListener listener) { /** * Removes a listener for updated cluster states. */ - public void removeListener(ClusterStateListener listener) { + public void removeListener(final ClusterStateListener listener) { clusterStateListeners.remove(listener); } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java index 35f88ddef3ea9..a0384b9efcb43 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotsService.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.cluster.ClusterChangedEvent; @@ -2957,7 +2958,8 @@ static Map filterDataStreamAliases( * @param listener listener */ private void addListener(Snapshot snapshot, ActionListener> listener) { - snapshotCompletionListeners.computeIfAbsent(snapshot, k -> new CopyOnWriteArrayList<>()).add(listener); + snapshotCompletionListeners.computeIfAbsent(snapshot, k -> new CopyOnWriteArrayList<>()) + .add(ContextPreservingActionListener.wrapPreservingContext(listener, threadPool.getThreadContext())); } @Override diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java index bdd8ba9dab1df..86121fa0d7da0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestResponse.java @@ -87,13 +87,20 @@ public String getReasonPhrase() { * Get a list of all of the values of all warning headers returned in the response. */ public List getWarningHeaders() { - List warningHeaders = new ArrayList<>(); + return getHeaders("Warning"); + } + + /** + * Get a list of all the values of a given header returned in the response. + */ + public List getHeaders(String name) { + List headers = new ArrayList<>(); for (Header header : response.getHeaders()) { - if (header.getName().equals("Warning")) { - warningHeaders.add(header.getValue()); + if (header.getName().equalsIgnoreCase(name)) { + headers.add(header.getValue()); } } - return warningHeaders; + return headers; } /** diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 23a7146561da9..efc53b08fad27 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -367,6 +367,7 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx final String testPath = executionContext.getClientYamlTestCandidate() != null ? executionContext.getClientYamlTestCandidate().getTestPath() : null; + checkElasticProductHeader(response.getHeaders("X-elastic-product")); checkWarningHeaders(response.getWarningHeaders(), testPath); } catch (ClientYamlTestResponseException e) { ClientYamlTestResponse restTestResponse = e.getRestTestResponse(); @@ -392,6 +393,31 @@ public void execute(ClientYamlTestExecutionContext executionContext) throws IOEx } } + void checkElasticProductHeader(final List productHeaders) { + if (productHeaders.isEmpty()) { + fail("Response is missing required X-Elastic-Product response header"); + } + boolean headerPresent = false; + final List unexpected = new ArrayList<>(); + for (String header : productHeaders) { + if (header.equals("Elasticsearch")) { + headerPresent = true; + break; + } else { + unexpected.add(header); + } + } + if (headerPresent == false) { + StringBuilder failureMessage = new StringBuilder(); + appendBadHeaders( + failureMessage, + unexpected, + "did not get expected product header [Elasticsearch], found header" + (unexpected.size() > 1 ? "s" : "") + ); + fail(failureMessage.toString()); + } + } + void checkWarningHeaders(final List warningHeaders) { checkWarningHeaders(warningHeaders, null); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java index fdd3451012d5c..b7238588ffe36 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/DoSectionTests.java @@ -605,6 +605,7 @@ public void testNodeSelectorByVersion() throws IOException { doSection.getApiCallSection().getNodeSelector() ) ).thenReturn(mockResponse); + when(mockResponse.getHeaders("X-elastic-product")).thenReturn(List.of("Elasticsearch")); doSection.execute(context); verify(context).callApi( "indices.get_field_mapping", diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index 1661585b5062f..e6053ce1ff818 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -268,7 +268,8 @@ public void onFailure(Exception e) { assert restoreInfo.failedShards() > 0 : "Should have failed shards"; delegatedListener.onResponse(new PutFollowAction.Response(true, false, false)); } - }) + }), + threadPool.getThreadContext() ); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java index 1f775b97ee4d0..e76154ee5f470 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportUnfollowAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.action.support.master.AcknowledgedTransportMasterNodeAction; @@ -178,10 +179,16 @@ private void removeRetentionLeaseForShard( ) { logger.trace("{} removing retention lease [{}] while unfollowing leader index", followerShardId, retentionLeaseId); final ThreadContext threadContext = threadPool.getThreadContext(); + // We're about to stash the thread context for this retention lease removal. The listener will be completed while the + // context is stashed. The context needs to be restored in the listener when it is completing or else it is simply wiped. + final ActionListener preservedListener = new ContextPreservingActionListener<>( + threadContext.newRestorableContext(true), + listener + ); try (ThreadContext.StoredContext ignore = threadPool.getThreadContext().stashContext()) { // we have to execute under the system context so that if security is enabled the removal is authorized threadContext.markAsSystemContext(); - CcrRetentionLeases.asyncRemoveRetentionLease(leaderShardId, retentionLeaseId, remoteClient, listener); + CcrRetentionLeases.asyncRemoveRetentionLease(leaderShardId, retentionLeaseId, remoteClient, preservedListener); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java index 959ffc448f548..6a9d00e62e975 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeAction; import org.elasticsearch.client.internal.node.NodeClient; @@ -20,15 +21,9 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackFeatureSet; -import org.elasticsearch.xpack.core.XPackFeatureSet.Usage; -import org.elasticsearch.xpack.core.common.IteratingActionListener; import java.util.ArrayList; -import java.util.Collections; import java.util.List; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReferenceArray; -import java.util.function.BiConsumer; public class TransportXPackUsageAction extends TransportMasterNodeAction { @@ -66,32 +61,28 @@ protected List usageActions() { @Override protected void masterOperation(Task task, XPackUsageRequest request, ClusterState state, ActionListener listener) { - final ActionListener> usageActionListener = listener.delegateFailure( - (l, usages) -> l.onResponse(new XPackUsageResponse(usages)) - ); - final AtomicReferenceArray featureSetUsages = new AtomicReferenceArray<>(usageActions.size()); - final AtomicInteger position = new AtomicInteger(0); - final BiConsumer>> consumer = (featureUsageAction, iteratingListener) -> { - // Since we're executing the actions locally we should create a new request - // to avoid mutating the original request and setting the wrong parent task, - // since it is possible that the parent task gets cancelled and new child tasks are banned. - final XPackUsageRequest childRequest = new XPackUsageRequest(); - childRequest.setParentTask(request.getParentTask()); - client.executeLocally(featureUsageAction, childRequest, iteratingListener.delegateFailure((l, usageResponse) -> { - featureSetUsages.set(position.getAndIncrement(), usageResponse.getUsage()); - // the value sent back doesn't matter since our predicate keeps iterating - l.onResponse(Collections.emptyList()); - })); - }; - IteratingActionListener, XPackUsageFeatureAction> iteratingActionListener = - new IteratingActionListener<>(usageActionListener, consumer, usageActions, threadPool.getThreadContext(), (ignore) -> { - final List usageList = new ArrayList<>(featureSetUsages.length()); - for (int i = 0; i < featureSetUsages.length(); i++) { - usageList.add(featureSetUsages.get(i)); + new ActionRunnable<>(listener) { + final List responses = new ArrayList<>(usageActions.size()); + + @Override + protected void doRun() { + if (responses.size() < usageActions().size()) { + final var childRequest = new XPackUsageRequest(); + childRequest.setParentTask(request.getParentTask()); + client.executeLocally( + usageActions.get(responses.size()), + childRequest, + listener.delegateFailure((delegate, response) -> { + responses.add(response.getUsage()); + run(); // XPackUsageFeatureTransportAction always forks to MANAGEMENT so no risk of stack overflow here + }) + ); + } else { + assert responses.size() == usageActions.size() : responses.size() + " vs " + usageActions.size(); + listener.onResponse(new XPackUsageResponse(responses)); } - return usageList; - }, (ignore) -> true); - iteratingActionListener.run(); + } + }.run(); } @Override diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java index cf4a178ba85fa..97f47e13abb7d 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherUsageTransportAction.java @@ -8,6 +8,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -75,6 +76,10 @@ protected void masterOperation( ActionListener listener ) { if (enabled) { + ActionListener preservingListener = ContextPreservingActionListener.wrapPreservingContext( + listener, + client.threadPool().getThreadContext() + ); try (ThreadContext.StoredContext ignore = client.threadPool().getThreadContext().stashWithOrigin(WATCHER_ORIGIN)) { WatcherStatsRequest statsRequest = new WatcherStatsRequest(); statsRequest.includeStats(true); @@ -91,8 +96,8 @@ protected void masterOperation( true, mergedCounters.toNestedMap() ); - listener.onResponse(new XPackUsageFeatureResponse(usage)); - }, listener::onFailure)); + preservingListener.onResponse(new XPackUsageFeatureResponse(usage)); + }, preservingListener::onFailure)); } } else { WatcherFeatureSetUsage usage = new WatcherFeatureSetUsage( From 2aab7cc8680d47880043f119724305f8392cd4ca Mon Sep 17 00:00:00 2001 From: Mark Vieira Date: Tue, 15 Feb 2022 14:40:21 -0800 Subject: [PATCH 24/37] Add CI matrix configuration for snapshot BWC versions (#83990) --- .ci/snapshotBwcVersions | 5 +++++ build.gradle | 31 +++++++++++++++++++------------ 2 files changed, 24 insertions(+), 12 deletions(-) create mode 100644 .ci/snapshotBwcVersions diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions new file mode 100644 index 0000000000000..3fdb4a121405a --- /dev/null +++ b/.ci/snapshotBwcVersions @@ -0,0 +1,5 @@ +BWC_VERSION: + - "7.17.1" + - "8.0.1" + - "8.1.0" + - "8.2.0" diff --git a/build.gradle b/build.gradle index 120fadf16b31d..d62c6358e4cd4 100644 --- a/build.gradle +++ b/build.gradle @@ -68,17 +68,28 @@ ext.testArtifact = { p, String name = "test" -> } tasks.register("updateCIBwcVersions") { - doLast { - File yml = file(".ci/bwcVersions") - yml.text = "" - yml << "BWC_VERSION:\n" - BuildParams.bwcVersions.indexCompatible.each { - yml << " - \"$it\"\n" + def writeVersions = { File file, List versions -> + file.text = "" + file << "BWC_VERSION:\n" + versions.each { + file << " - \"$it\"\n" } } + doLast { + writeVersions(file(".ci/bwcVersions"), BuildParams.bwcVersions.indexCompatible) + writeVersions(file(".ci/snapshotBwcVersions"), BuildParams.bwcVersions.unreleasedIndexCompatible) + } } tasks.register("verifyVersions") { + def verifyCiYaml = { File file, List versions -> + String ciYml = file.text + versions.each { + if (ciYml.contains("\"$it\"\n") == false) { + throw new Exception("${file} is outdated, run `./gradlew updateCIBwcVersions` and check in the results") + } + } + } doLast { if (gradle.startParameter.isOffline()) { throw new GradleException("Must run in online mode to verify versions") @@ -94,12 +105,8 @@ tasks.register("verifyVersions") { .collect { Version.fromString(it) } ) } - String ciYml = file(".ci/bwcVersions").text - BuildParams.bwcVersions.indexCompatible.each { - if (ciYml.contains("\"$it\"\n") == false) { - throw new Exception(".ci/bwcVersions is outdated, run `./gradlew updateCIBwcVersions` and check in the results"); - } - } + verifyCiYaml(file(".ci/bwcVersions"), BuildParams.bwcVersions.indexCompatible) + verifyCiYaml(file(".ci/snapshotBwcVersions"), BuildParams.bwcVersions.unreleasedIndexCompatible) // Make sure backport bot config file is up to date JsonNode backportConfig = new ObjectMapper().readTree(file(".backportrc.json")) From e8b34c720da7bb25f9048ef51dd0b8f696862705 Mon Sep 17 00:00:00 2001 From: James Rodewig Date: Tue, 15 Feb 2022 18:20:22 -0500 Subject: [PATCH 25/37] [DOCS] Re-add HTTP proxy setings from #82737 (#84001) Re-adds HTTP proxy settings for the GCS repository type. These settings were added with https://github.com/elastic/elasticsearch/pull/82737. The docs were accidentally removed as part of https://github.com/elastic/elasticsearch/pull/82996. --- .../reference/snapshot-restore/repository-gcs.asciidoc | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/docs/reference/snapshot-restore/repository-gcs.asciidoc b/docs/reference/snapshot-restore/repository-gcs.asciidoc index 3a036e1487972..37dfe2add0b18 100644 --- a/docs/reference/snapshot-restore/repository-gcs.asciidoc +++ b/docs/reference/snapshot-restore/repository-gcs.asciidoc @@ -191,6 +191,16 @@ are marked as `Secure`. can be specified explicitly. For example, it can be used to switch between projects when the same credentials are usable for both the production and the development projects. +`proxy.host`:: + Host name of a proxy to connect to the Google Cloud Storage through. + +`proxy.port`:: + Port of a proxy to connect to the Google Cloud Storage through. + +`proxy.type`:: + Proxy type for the client. Supported values are `direct` (no proxy), + `http`, and `socks`. Defaults to `direct`. + [[repository-gcs-repository]] ==== Repository settings From 2db116a69722c71f6ef3c452e7bbf554614c1038 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 15 Feb 2022 16:22:45 -0800 Subject: [PATCH 26/37] Make action names available in NodeClient (#83919) The actions available in NodeClient are registered on node startup and hidden to callers. However, operator privileges needs to verify that all actions are classified into operator and non-operator actions. Currently the test uses reflection hacks to make the internal action objects available. This commit makes the action names available as a public method on NodeClient, so that the reflection hacks are no longer necessary. It would be nice to have a test specific way to expose this, but the test code in question actually serves the action names up in a rest api, so it is not test code, as far as server is concerned, that needs the action names. --- .../client/internal/node/NodeClient.java | 8 ++++++ .../actions/RestGetActionsAction.java | 25 +------------------ 2 files changed, 9 insertions(+), 24 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java b/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java index 99ab7b6519a79..4c4bfb4dae799 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/node/NodeClient.java @@ -26,6 +26,7 @@ import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.Transport; +import java.util.List; import java.util.Map; import java.util.function.Supplier; @@ -67,6 +68,13 @@ public void initialize( this.namedWriteableRegistry = namedWriteableRegistry; } + /** + * Return the names of all available actions registered with this client. + */ + public List getActionNames() { + return actions.keySet().stream().map(ActionType::name).toList(); + } + @Override public void close() { // nothing really to do diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/actions/RestGetActionsAction.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/actions/RestGetActionsAction.java index 8dd65407ce81d..76b416bc56c42 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/actions/RestGetActionsAction.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/main/java/org/elasticsearch/xpack/security/operator/actions/RestGetActionsAction.java @@ -7,22 +7,13 @@ package org.elasticsearch.xpack.security.operator.actions; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.internal.node.NodeClient; -import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; -import java.lang.reflect.Field; -import java.security.AccessController; -import java.security.PrivilegedAction; import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; import static org.elasticsearch.rest.RestRequest.Method.GET; @@ -37,23 +28,9 @@ public String getName() { return "test_get_actions"; } - @SuppressForbidden(reason = "Use reflection for testing only") - @SuppressWarnings({ "rawtypes", "unchecked" }) @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - final Map actions = AccessController.doPrivileged( - (PrivilegedAction>) () -> { - try { - final Field actionsField = client.getClass().getDeclaredField("actions"); - actionsField.setAccessible(true); - return (Map) actionsField.get(client); - } catch (NoSuchFieldException | IllegalAccessException e) { - throw new ElasticsearchException(e); - } - } - ); - - final List actionNames = actions.keySet().stream().map(ActionType::name).collect(Collectors.toList()); + final List actionNames = client.getActionNames(); return channel -> new RestToXContentListener<>(channel).onResponse( (builder, params) -> builder.startObject().field("actions", actionNames).endObject() ); From a7e57dfe003efa9b284acbf9dd134d7f5f25efe0 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 15 Feb 2022 18:13:16 -0800 Subject: [PATCH 27/37] Use latch to speedup multi feature migration test (#84007) The multi feature migration test works by having two system index features that are to be upgraded. The reindexing of the system indices of those features is done in sequence by the system index migrator, and the test has an assertBusy that waits for all the migrations to complete. Unfortunately assertBusy backs off exponentially, so it isn't great for quickly resuming the test once the underlying assertion becomes true, since the condition might change shortly after an iteration, and then have to wait for a long backoff. This commit adds a latch which will count down all the executions of the test plugins, so that the assert busy will not be run until after all the indexes have migrated. closes #83953 --- .../migration/MultiFeatureMigrationIT.java | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java index 2fc6358f5c468..f0838dd571637 100644 --- a/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java +++ b/modules/reindex/src/internalClusterTest/java/org/elasticsearch/migration/MultiFeatureMigrationIT.java @@ -37,6 +37,8 @@ import java.util.List; import java.util.Map; import java.util.Set; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Function; @@ -91,6 +93,8 @@ public void testMultipleFeatureMigration() throws Exception { ensureGreen(); + CountDownLatch hooksCalled = new CountDownLatch(4); + SetOnce preMigrationHookCalled = new SetOnce<>(); SetOnce postMigrationHookCalled = new SetOnce<>(); SetOnce secondPluginPreMigrationHookCalled = new SetOnce<>(); @@ -109,6 +113,7 @@ public void testMultipleFeatureMigration() throws Exception { assertThat(currentResults, nullValue()); preMigrationHookCalled.set(true); + hooksCalled.countDown(); return metadata; }); @@ -125,6 +130,7 @@ public void testMultipleFeatureMigration() throws Exception { assertThat(currentResults, nullValue()); postMigrationHookCalled.set(true); + hooksCalled.countDown(); }); SecondPlugin.preMigrationHook.set(clusterState -> { @@ -145,6 +151,7 @@ public void testMultipleFeatureMigration() throws Exception { assertThat(currentResults.getFeatureStatuses().get(FEATURE_NAME).getException(), nullValue()); secondPluginPreMigrationHookCalled.set(true); + hooksCalled.countDown(); return metadata; }); @@ -165,6 +172,7 @@ public void testMultipleFeatureMigration() throws Exception { assertThat(currentResults.getFeatureStatuses().get(FEATURE_NAME).getException(), nullValue()); secondPluginPostMigrationHookCalled.set(true); + hooksCalled.countDown(); }); PostFeatureUpgradeRequest migrationRequest = new PostFeatureUpgradeRequest(); @@ -177,6 +185,9 @@ public void testMultipleFeatureMigration() throws Exception { .collect(Collectors.toSet()); assertThat(migratingFeatures, hasItems(FEATURE_NAME, SECOND_FEATURE_NAME)); + // wait for all the plugin methods to have been called before assertBusy since that will exponentially backoff + assertThat(hooksCalled.await(30, TimeUnit.SECONDS), is(true)); + GetFeatureUpgradeStatusRequest getStatusRequest = new GetFeatureUpgradeStatusRequest(); assertBusy(() -> { GetFeatureUpgradeStatusResponse statusResponse = client().execute(GetFeatureUpgradeStatusAction.INSTANCE, getStatusRequest) From 74b5bfdb73dda73e99d03ee8ced63dfbe45ae160 Mon Sep 17 00:00:00 2001 From: Yannick Welsch Date: Wed, 16 Feb 2022 08:31:41 +0100 Subject: [PATCH 28/37] Feature usage actions for archive (#83931) Relates #81210 --- .../xcontent/monitor_cluster_stats.json | 5 ++ docs/reference/rest-api/info.asciidoc | 4 + docs/reference/rest-api/usage.asciidoc | 5 ++ .../snapshots/RestoreService.java | 17 +--- .../xpack/core/XPackClientPlugin.java | 5 +- .../elasticsearch/xpack/core/XPackField.java | 2 + .../core/action/XPackInfoFeatureAction.java | 4 +- .../core/action/XPackUsageFeatureAction.java | 4 +- .../core/archive/ArchiveFeatureSetUsage.java | 73 ++++++++++++++++ .../archive/ArchiveFeatureSetUsageTests.java | 39 +++++++++ .../lucene/bwc/ArchiveLicenseIntegTests.java | 23 +++++ .../bwc/ArchiveInfoTransportAction.java | 44 ++++++++++ .../bwc/ArchiveUsageTransportAction.java | 70 +++++++++++++++ .../xpack/lucene/bwc/OldLuceneVersions.java | 15 +++- .../xpack/security/operator/Constants.java | 2 + .../oldrepos/DocValueOnlyFieldsIT.java | 9 -- .../oldrepos/OldRepositoryAccessIT.java | 87 ++++++++----------- 17 files changed, 331 insertions(+), 77 deletions(-) create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/archive/ArchiveFeatureSetUsage.java create mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/archive/ArchiveFeatureSetUsageTests.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveInfoTransportAction.java create mode 100644 x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTransportAction.java diff --git a/benchmarks/src/main/resources/org/elasticsearch/benchmark/xcontent/monitor_cluster_stats.json b/benchmarks/src/main/resources/org/elasticsearch/benchmark/xcontent/monitor_cluster_stats.json index de460d770d249..eea13dec75ffd 100644 --- a/benchmarks/src/main/resources/org/elasticsearch/benchmark/xcontent/monitor_cluster_stats.json +++ b/benchmarks/src/main/resources/org/elasticsearch/benchmark/xcontent/monitor_cluster_stats.json @@ -1233,6 +1233,11 @@ "total" : 0, "failed" : 0 } + }, + "archive" : { + "available" : false, + "enabled" : true, + "indices_count" : 0 } } } diff --git a/docs/reference/rest-api/info.asciidoc b/docs/reference/rest-api/info.asciidoc index e4d533c8378d3..5292b6e8967cb 100644 --- a/docs/reference/rest-api/info.asciidoc +++ b/docs/reference/rest-api/info.asciidoc @@ -81,6 +81,10 @@ Example response: "available" : true, "enabled" : true }, + "archive" : { + "available" : true, + "enabled" : true + }, "enrich" : { "available" : true, "enabled" : true diff --git a/docs/reference/rest-api/usage.asciidoc b/docs/reference/rest-api/usage.asciidoc index 786a21f576423..13773b02fe417 100644 --- a/docs/reference/rest-api/usage.asciidoc +++ b/docs/reference/rest-api/usage.asciidoc @@ -395,6 +395,11 @@ GET /_xpack/usage "aggregate_metric" : { "available" : true, "enabled" : true + }, + "archive" : { + "available" : true, + "enabled" : true, + "indices_count" : 0 } } ------------------------------------------------------------ diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index d1b996978aa31..de137cde1f331 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -10,7 +10,6 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.StepListener; @@ -978,7 +977,8 @@ static void validateSnapshotRestorable(RestoreSnapshotRequest request, Repositor + "]" ); } - if (skipVersionChecks(repository) == false && snapshotInfo.version().before(Version.CURRENT.minimumIndexCompatibilityVersion())) { + if (ALLOW_BWC_INDICES_SETTING.get(repository.settings()) == false + && snapshotInfo.version().before(Version.CURRENT.minimumIndexCompatibilityVersion())) { throw new SnapshotRestoreException( new Snapshot(repository.name(), snapshotInfo.snapshotId()), "the snapshot was created with Elasticsearch version [" @@ -1002,19 +1002,6 @@ static void validateSnapshotRestorable(RestoreSnapshotRequest request, Repositor Setting.Property.NodeScope ); - private static boolean skipVersionChecks(RepositoryMetadata repositoryMetadata) { - if (Build.CURRENT.isSnapshot()) { - return ALLOW_BWC_INDICES_SETTING.get(repositoryMetadata.settings()); - } else { - if (ALLOW_BWC_INDICES_SETTING.exists(repositoryMetadata.settings())) { - throw new IllegalArgumentException( - "Repository setting [" + ALLOW_BWC_INDICES_SETTING.getKey() + "] only allowed in release builds" - ); - } - return false; - } - } - public static boolean failed(SnapshotInfo snapshot, String index) { for (SnapshotShardFailure failure : snapshot.shardFailures()) { if (index.equals(failure.index())) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 8d78275266a86..4bd7ce835dcdb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -36,6 +36,7 @@ import org.elasticsearch.xpack.core.action.XPackUsageAction; import org.elasticsearch.xpack.core.aggregatemetric.AggregateMetricFeatureSetUsage; import org.elasticsearch.xpack.core.analytics.AnalyticsFeatureSetUsage; +import org.elasticsearch.xpack.core.archive.ArchiveFeatureSetUsage; import org.elasticsearch.xpack.core.async.DeleteAsyncResultAction; import org.elasticsearch.xpack.core.ccr.AutoFollowMetadata; import org.elasticsearch.xpack.core.datastreams.DataStreamFeatureSetUsage; @@ -552,7 +553,9 @@ public List getNamedWriteables() { // Data Streams new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.DATA_STREAMS, DataStreamFeatureSetUsage::new), // Data Tiers - new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.DATA_TIERS, DataTiersFeatureSetUsage::new) + new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.DATA_TIERS, DataTiersFeatureSetUsage::new), + // Archive + new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.ARCHIVE, ArchiveFeatureSetUsage::new) ) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java index 59343705b9098..dbc100e62ac1e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java @@ -71,6 +71,8 @@ public final class XPackField { public static final String AGGREGATE_METRIC = "aggregate_metric"; /** Name constant for the operator privileges feature. */ public static final String OPERATOR_PRIVILEGES = "operator_privileges"; + /** Name constant for the archive feature. */ + public static final String ARCHIVE = "archive"; private XPackField() {} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java index c6c941ef3092d..83e835d4bb6dc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoFeatureAction.java @@ -47,6 +47,7 @@ public class XPackInfoFeatureAction extends ActionType public static final XPackInfoFeatureAction DATA_STREAMS = new XPackInfoFeatureAction(XPackField.DATA_STREAMS); public static final XPackInfoFeatureAction DATA_TIERS = new XPackInfoFeatureAction(XPackField.DATA_TIERS); public static final XPackInfoFeatureAction AGGREGATE_METRIC = new XPackInfoFeatureAction(XPackField.AGGREGATE_METRIC); + public static final XPackInfoFeatureAction ARCHIVE = new XPackInfoFeatureAction(XPackField.ARCHIVE); public static final List ALL; static { @@ -74,7 +75,8 @@ public class XPackInfoFeatureAction extends ActionType DATA_STREAMS, SEARCHABLE_SNAPSHOTS, DATA_TIERS, - AGGREGATE_METRIC + AGGREGATE_METRIC, + ARCHIVE ) ); ALL = Collections.unmodifiableList(actions); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java index cd310064ffa0f..bfbac109012e5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageFeatureAction.java @@ -44,6 +44,7 @@ public class XPackUsageFeatureAction extends ActionType ALL = List.of( AGGREGATE_METRIC, @@ -66,7 +67,8 @@ public class XPackUsageFeatureAction extends ActionType { + + @Override + protected ArchiveFeatureSetUsage createTestInstance() { + boolean available = randomBoolean(); + return new ArchiveFeatureSetUsage(available, randomIntBetween(0, 100000)); + } + + @Override + protected ArchiveFeatureSetUsage mutateInstance(ArchiveFeatureSetUsage instance) throws IOException { + boolean available = instance.available(); + int numArchiveIndices = instance.getNumberOfArchiveIndices(); + switch (between(0, 1)) { + case 0 -> available = available == false; + case 1 -> numArchiveIndices = randomValueOtherThan(numArchiveIndices, () -> randomIntBetween(0, 100000)); + default -> throw new AssertionError("Illegal randomisation branch"); + } + return new ArchiveFeatureSetUsage(available, numArchiveIndices); + } + + @Override + protected Writeable.Reader instanceReader() { + return ArchiveFeatureSetUsage::new; + } + +} diff --git a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java index 0c37eac048853..4d2c8113c02ba 100644 --- a/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java +++ b/x-pack/plugin/old-lucene-versions/src/internalClusterTest/java/org/elasticsearch/xpack/lucene/bwc/ArchiveLicenseIntegTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.license.PostStartTrialResponse; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.RepositoryPlugin; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.protocol.xpack.license.DeleteLicenseRequest; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.Repository; @@ -42,6 +43,9 @@ import org.elasticsearch.snapshots.mockstore.MockRepository; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; +import org.elasticsearch.xpack.core.archive.ArchiveFeatureSetUsage; import org.junit.Before; import java.io.IOException; @@ -52,6 +56,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.oneOf; @ESIntegTestCase.ClusterScope(supportsDedicatedMasters = false, numClientNodes = 0, scope = ESIntegTestCase.Scope.TEST) @@ -130,6 +135,24 @@ public void createAndRestoreArchive() throws Exception { client().execute(PostStartTrialAction.INSTANCE, request).get(); } + public void testFeatureUsage() throws Exception { + XPackUsageFeatureResponse usage = client().execute(XPackUsageFeatureAction.ARCHIVE, new XPackUsageRequest()).get(); + assertThat(usage.getUsage(), instanceOf(ArchiveFeatureSetUsage.class)); + ArchiveFeatureSetUsage archiveUsage = (ArchiveFeatureSetUsage) usage.getUsage(); + assertEquals(0, archiveUsage.getNumberOfArchiveIndices()); + + final RestoreSnapshotRequest req = new RestoreSnapshotRequest(repoName, snapshotName).indices(indexName).waitForCompletion(true); + + final RestoreSnapshotResponse restoreSnapshotResponse = client().admin().cluster().restoreSnapshot(req).get(); + assertThat(restoreSnapshotResponse.getRestoreInfo().failedShards(), equalTo(0)); + ensureGreen(indexName); + + usage = client().execute(XPackUsageFeatureAction.ARCHIVE, new XPackUsageRequest()).get(); + assertThat(usage.getUsage(), instanceOf(ArchiveFeatureSetUsage.class)); + archiveUsage = (ArchiveFeatureSetUsage) usage.getUsage(); + assertEquals(1, archiveUsage.getNumberOfArchiveIndices()); + } + public void testFailRestoreOnInvalidLicense() throws Exception { assertAcked(client().execute(DeleteLicenseAction.INSTANCE, new DeleteLicenseRequest()).get()); assertAcked(client().execute(PostStartBasicAction.INSTANCE, new PostStartBasicRequest()).get()); diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveInfoTransportAction.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveInfoTransportAction.java new file mode 100644 index 0000000000000..702559a4810d8 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveInfoTransportAction.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.XPackField; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureTransportAction; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.ARCHIVE_FEATURE; + +public class ArchiveInfoTransportAction extends XPackInfoFeatureTransportAction { + + private final XPackLicenseState licenseState; + + @Inject + public ArchiveInfoTransportAction(TransportService transportService, ActionFilters actionFilters, XPackLicenseState licenseState) { + super(XPackInfoFeatureAction.ARCHIVE.name(), transportService, actionFilters); + this.licenseState = licenseState; + } + + @Override + public String name() { + return XPackField.ARCHIVE; + } + + @Override + public boolean available() { + return ARCHIVE_FEATURE.checkWithoutTracking(licenseState); + } + + @Override + public boolean enabled() { + return true; + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTransportAction.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTransportAction.java new file mode 100644 index 0000000000000..d209db2f9ce37 --- /dev/null +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/ArchiveUsageTransportAction.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.lucene.bwc; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureResponse; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureTransportAction; +import org.elasticsearch.xpack.core.archive.ArchiveFeatureSetUsage; + +import static org.elasticsearch.xpack.lucene.bwc.OldLuceneVersions.ARCHIVE_FEATURE; + +public class ArchiveUsageTransportAction extends XPackUsageFeatureTransportAction { + + private final XPackLicenseState licenseState; + + @Inject + public ArchiveUsageTransportAction( + TransportService transportService, + ClusterService clusterService, + ThreadPool threadPool, + ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, + XPackLicenseState licenseState + ) { + super( + XPackUsageFeatureAction.ARCHIVE.name(), + transportService, + clusterService, + threadPool, + actionFilters, + indexNameExpressionResolver + ); + this.licenseState = licenseState; + } + + @Override + protected void masterOperation( + Task task, + XPackUsageRequest request, + ClusterState state, + ActionListener listener + ) { + int numArchiveIndices = 0; + for (IndexMetadata indexMetadata : state.metadata()) { + if (OldLuceneVersions.isArchiveIndex(indexMetadata.getCreationVersion())) { + numArchiveIndices++; + } + } + listener.onResponse( + new XPackUsageFeatureResponse(new ArchiveFeatureSetUsage(ARCHIVE_FEATURE.checkWithoutTracking(licenseState), numArchiveIndices)) + ); + } +} diff --git a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java index 631de49d4fa1d..69ac9777960de 100644 --- a/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java +++ b/x-pack/plugin/old-lucene-versions/src/main/java/org/elasticsearch/xpack/lucene/bwc/OldLuceneVersions.java @@ -12,6 +12,8 @@ import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.util.SetOnce; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; @@ -35,6 +37,7 @@ import org.elasticsearch.license.LicenseUtils; import org.elasticsearch.license.LicensedFeature; import org.elasticsearch.license.XPackLicenseState; +import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ClusterPlugin; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.Plugin; @@ -45,6 +48,8 @@ import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; +import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.lucene.bwc.codecs.BWCCodec; import java.io.IOException; @@ -56,7 +61,7 @@ import java.util.function.Consumer; import java.util.function.Supplier; -public class OldLuceneVersions extends Plugin implements IndexStorePlugin, ClusterPlugin, RepositoryPlugin { +public class OldLuceneVersions extends Plugin implements IndexStorePlugin, ClusterPlugin, RepositoryPlugin, ActionPlugin { public static final LicensedFeature.Momentary ARCHIVE_FEATURE = LicensedFeature.momentary( null, @@ -95,6 +100,14 @@ public Collection createComponents( return List.of(); } + @Override + public List> getActions() { + return List.of( + new ActionPlugin.ActionHandler<>(XPackUsageFeatureAction.ARCHIVE, ArchiveUsageTransportAction.class), + new ActionPlugin.ActionHandler<>(XPackInfoFeatureAction.ARCHIVE, ArchiveInfoTransportAction.class) + ); + } + // overridable by tests protected XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 1cff2e41c26fb..94378f91d0ebc 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -274,6 +274,7 @@ public class Constants { "cluster:monitor/xpack/info", "cluster:monitor/xpack/info/aggregate_metric", "cluster:monitor/xpack/info/analytics", + "cluster:monitor/xpack/info/archive", "cluster:monitor/xpack/info/ccr", "cluster:monitor/xpack/info/data_streams", "cluster:monitor/xpack/info/data_tiers", @@ -329,6 +330,7 @@ public class Constants { "cluster:monitor/xpack/usage", "cluster:monitor/xpack/usage/aggregate_metric", "cluster:monitor/xpack/usage/analytics", + "cluster:monitor/xpack/usage/archive", "cluster:monitor/xpack/usage/ccr", "cluster:monitor/xpack/usage/data_streams", "cluster:monitor/xpack/usage/data_tiers", diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java index 7df801a174e9d..ab1105d989ff1 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/DocValueOnlyFieldsIT.java @@ -12,7 +12,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.http.HttpHost; -import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; @@ -65,12 +64,6 @@ protected Settings restClientSettings() { return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build(); } - @Override - public void test() throws IOException { - assumeTrue("feature currently only enabled in snapshot builds", Build.CURRENT.isSnapshot()); - super.test(); - } - @Override protected boolean skipSetupSections() { // setup in the YAML file is replaced by the method below @@ -79,8 +72,6 @@ protected boolean skipSetupSections() { @Before public void setupIndex() throws IOException { - assumeTrue("feature currently only enabled in snapshot builds", Build.CURRENT.isSnapshot()); - final boolean afterRestart = Booleans.parseBoolean(System.getProperty("tests.after_restart")); if (afterRestart) { return; diff --git a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java index 6174c029c47cb..0f77bfb8ee964 100644 --- a/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java +++ b/x-pack/qa/repository-old-versions/src/test/java/org/elasticsearch/oldrepos/OldRepositoryAccessIT.java @@ -8,7 +8,6 @@ package org.elasticsearch.oldrepos; import org.apache.http.HttpHost; -import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; @@ -131,11 +130,9 @@ public void runTest(boolean sourceOnlyRepository) throws IOException { } private void afterRestart(String indexName) throws IOException { - if (Build.CURRENT.isSnapshot()) { - ensureGreen("restored_" + indexName); - ensureGreen("mounted_full_copy_" + indexName); - ensureGreen("mounted_shared_cache_" + indexName); - } + ensureGreen("restored_" + indexName); + ensureGreen("mounted_full_copy_" + indexName); + ensureGreen("mounted_shared_cache_" + indexName); } @SuppressWarnings("removal") @@ -207,9 +204,7 @@ private void beforeRestart( if (sourceOnlyRepository) { repoSettingsBuilder.put("delegate_type", "fs"); } - if (Build.CURRENT.isSnapshot()) { - repoSettingsBuilder.put("allow_bwc_indices", true); - } + repoSettingsBuilder.put("allow_bwc_indices", true); ElasticsearchAssertions.assertAcked( client.snapshot() .createRepository( @@ -263,48 +258,42 @@ private void beforeRestart( assertThat(snapshotStatus.getStats().getTotalSize(), greaterThan(0L)); assertThat(snapshotStatus.getStats().getTotalFileCount(), greaterThan(0)); - if (Build.CURRENT.isSnapshot()) { - // restore / mount and check whether searches work - restoreMountAndVerify( - numDocs, - expectedIds, - client, - numberOfShards, - sourceOnlyRepository, - oldVersion, - indexName, - repoName, - snapshotName - ); + // restore / mount and check whether searches work + restoreMountAndVerify( + numDocs, + expectedIds, + client, + numberOfShards, + sourceOnlyRepository, + oldVersion, + indexName, + repoName, + snapshotName + ); - // close indices - assertTrue( - client.indices().close(new CloseIndexRequest("restored_" + indexName), RequestOptions.DEFAULT).isShardsAcknowledged() - ); - assertTrue( - client.indices() - .close(new CloseIndexRequest("mounted_full_copy_" + indexName), RequestOptions.DEFAULT) - .isShardsAcknowledged() - ); - assertTrue( - client.indices() - .close(new CloseIndexRequest("mounted_shared_cache_" + indexName), RequestOptions.DEFAULT) - .isShardsAcknowledged() - ); + // close indices + assertTrue(client.indices().close(new CloseIndexRequest("restored_" + indexName), RequestOptions.DEFAULT).isShardsAcknowledged()); + assertTrue( + client.indices().close(new CloseIndexRequest("mounted_full_copy_" + indexName), RequestOptions.DEFAULT).isShardsAcknowledged() + ); + assertTrue( + client.indices() + .close(new CloseIndexRequest("mounted_shared_cache_" + indexName), RequestOptions.DEFAULT) + .isShardsAcknowledged() + ); - // restore / mount again - restoreMountAndVerify( - numDocs, - expectedIds, - client, - numberOfShards, - sourceOnlyRepository, - oldVersion, - indexName, - repoName, - snapshotName - ); - } + // restore / mount again + restoreMountAndVerify( + numDocs, + expectedIds, + client, + numberOfShards, + sourceOnlyRepository, + oldVersion, + indexName, + repoName, + snapshotName + ); } private String getType(Version oldVersion, String id) { From 494da68e50ea7e73079ce03a78b4c8f6d3a6d15e Mon Sep 17 00:00:00 2001 From: Ievgen Degtiarenko Date: Wed, 16 Feb 2022 10:33:47 +0100 Subject: [PATCH 29/37] Optimize spliterator for ImmutableOpenMap (#83899) We know the exact amount of nodes, so we can return a sized spliterator which allows the Stream pipeline to allocate memory more granularly. --- .../org/elasticsearch/common/collect/ImmutableOpenIntMap.java | 2 +- .../java/org/elasticsearch/common/collect/ImmutableOpenMap.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java index aa9f5ee41567f..5acc18df3f8a3 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java +++ b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenIntMap.java @@ -241,7 +241,7 @@ public boolean remove(Object o) { } public Spliterator> spliterator() { - return Spliterators.spliteratorUnknownSize(iterator(), 0); + return Spliterators.spliterator(iterator(), size(), Spliterator.SIZED); } public void forEach(Consumer> action) { diff --git a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java index 20427cc734638..8afef238aae50 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java +++ b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java @@ -209,7 +209,7 @@ public boolean remove(Object o) { } public Spliterator> spliterator() { - return Spliterators.spliteratorUnknownSize(iterator(), 0); + return Spliterators.spliterator(iterator(), size(), Spliterator.SIZED); } public void forEach(Consumer> action) { From d1bd822161572f7e443ef7dcebde43ba8adba299 Mon Sep 17 00:00:00 2001 From: Kevin Lacabane Date: Wed, 16 Feb 2022 10:46:29 +0100 Subject: [PATCH 30/37] [Stack Monitoring] add kibana_stats version alias to -mb template (#83930) * add kibana_stats version alias * increment version number Co-authored-by: James Baiera Co-authored-by: James Baiera --- .../plugin/core/src/main/resources/monitoring-kibana-mb.json | 4 ++++ .../xpack/monitoring/MonitoringTemplateRegistry.java | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json b/x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json index e155f74ae0486..262e07d37c5ea 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-kibana-mb.json @@ -492,6 +492,10 @@ "uuid": { "type": "alias", "path": "service.id" + }, + "version": { + "type": "alias", + "path": "service.version" } } }, diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java index 4e2ed262bece1..c72a7ddfb9f3b 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/MonitoringTemplateRegistry.java @@ -78,7 +78,7 @@ public class MonitoringTemplateRegistry extends IndexTemplateRegistry { * writes monitoring data in ECS format as of 8.0. These templates define the ECS schema as well as alias fields for the old monitoring * mappings that point to the corresponding ECS fields. */ - public static final int STACK_MONITORING_REGISTRY_VERSION = Version.V_8_0_0.id; + public static final int STACK_MONITORING_REGISTRY_VERSION = Version.V_8_0_0.id + 1; private static final String STACK_MONITORING_REGISTRY_VERSION_VARIABLE = "xpack.stack.monitoring.template.release.version"; private static final String STACK_TEMPLATE_VERSION = "8"; private static final String STACK_TEMPLATE_VERSION_VARIABLE = "xpack.stack.monitoring.template.version"; From e6abd9fe3d26b7233255a9fa7868d0642da57a66 Mon Sep 17 00:00:00 2001 From: Luigi Dell'Aquila Date: Wed, 16 Feb 2022 11:48:49 +0100 Subject: [PATCH 31/37] QL: Add leniency option to SQL CLI (#83795) by default the query behaviour from SQL CLI is strict (ie. non-lenient), so queries that return multi-value fields return an error. We now add an option to allow lenient behaviour (ie. in case of multi-value fields, return the first value). This behaviour can be enabled with the following command: lenient = true --- docs/changelog/83795.yaml | 6 ++ .../xpack/sql/qa/multi_node/CliLenientIT.java | 11 ++++ .../xpack/sql/qa/security/CliLenientIT.java | 28 +++++++++ .../sql/qa/single_node/CliLenientIT.java | 11 ++++ .../xpack/sql/qa/cli/LenientTestCase.java | 46 ++++++++++++++ .../org/elasticsearch/xpack/sql/cli/Cli.java | 6 +- .../cli/command/AbstractServerCliCommand.java | 2 +- .../xpack/sql/cli/command/CliSession.java | 42 ++----------- .../cli/command/CliSessionConfiguration.java | 60 +++++++++++++++++++ .../cli/command/FetchSeparatorCliCommand.java | 4 +- .../sql/cli/command/FetchSizeCliCommand.java | 4 +- .../sql/cli/command/LenientCliCommand.java | 31 ++++++++++ .../cli/command/ServerQueryCliCommand.java | 6 +- .../sql/cli/command/BuiltinCommandTests.java | 33 +++++++--- .../command/ServerQueryCliCommandTests.java | 30 +++++----- .../xpack/sql/client/HttpClient.java | 6 +- .../sql/client/HttpClientRequestTests.java | 2 +- 17 files changed, 255 insertions(+), 73 deletions(-) create mode 100644 docs/changelog/83795.yaml create mode 100644 x-pack/plugin/sql/qa/server/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java create mode 100644 x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliLenientIT.java create mode 100644 x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java create mode 100644 x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java create mode 100644 x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSessionConfiguration.java create mode 100644 x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/LenientCliCommand.java diff --git a/docs/changelog/83795.yaml b/docs/changelog/83795.yaml new file mode 100644 index 0000000000000..af5a670918a7a --- /dev/null +++ b/docs/changelog/83795.yaml @@ -0,0 +1,6 @@ +pr: 83795 +summary: Add leniency option to SQL CLI +area: SQL +type: enhancement +issues: + - 67436 diff --git a/x-pack/plugin/sql/qa/server/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java b/x-pack/plugin/sql/qa/server/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java new file mode 100644 index 0000000000000..fc4a04570ff67 --- /dev/null +++ b/x-pack/plugin/sql/qa/server/multi-node/src/test/java/org/elasticsearch/xpack/sql/qa/multi_node/CliLenientIT.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.qa.multi_node; + +import org.elasticsearch.xpack.sql.qa.cli.LenientTestCase; + +public class CliLenientIT extends LenientTestCase {} diff --git a/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliLenientIT.java b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliLenientIT.java new file mode 100644 index 0000000000000..87e056baa6751 --- /dev/null +++ b/x-pack/plugin/sql/qa/server/security/src/test/java/org/elasticsearch/xpack/sql/qa/security/CliLenientIT.java @@ -0,0 +1,28 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.qa.security; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xpack.sql.qa.cli.EmbeddedCli.SecurityConfig; +import org.elasticsearch.xpack.sql.qa.cli.LenientTestCase; + +public class CliLenientIT extends LenientTestCase { + @Override + protected Settings restClientSettings() { + return RestSqlIT.securitySettings(); + } + + @Override + protected String getProtocol() { + return RestSqlIT.SSL_ENABLED ? "https" : "http"; + } + + @Override + protected SecurityConfig securityConfig() { + return CliSecurityIT.adminSecurityConfig(); + } +} diff --git a/x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java b/x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java new file mode 100644 index 0000000000000..afcfca0a01ed2 --- /dev/null +++ b/x-pack/plugin/sql/qa/server/single-node/src/test/java/org/elasticsearch/xpack/sql/qa/single_node/CliLenientIT.java @@ -0,0 +1,11 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.qa.single_node; + +import org.elasticsearch.xpack.sql.qa.cli.LenientTestCase; + +public class CliLenientIT extends LenientTestCase {} diff --git a/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java new file mode 100644 index 0000000000000..76f84541e5bb9 --- /dev/null +++ b/x-pack/plugin/sql/qa/server/src/main/java/org/elasticsearch/xpack/sql/qa/cli/LenientTestCase.java @@ -0,0 +1,46 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.qa.cli; + +import org.elasticsearch.test.hamcrest.RegexMatcher; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; + +public abstract class LenientTestCase extends CliIntegrationTestCase { + + public void testLenientCommand() throws IOException { + index("test", body -> body.field("name", "foo").field("tags", new String[] { "bar", "bar" })); + assertEquals("[?1l>[?1000l[?2004llenient set to [90mtrue[0m", command("lenient = true")); + assertThat(command("SELECT * FROM test"), RegexMatcher.matches("\\s*name\\s*\\|\\s*tags\\s*")); + assertThat(readLine(), containsString("----------")); + assertThat(readLine(), RegexMatcher.matches("\\s*foo\\s*\\|\\s*bar\\s*")); + assertEquals("", readLine()); + } + + public void testDefaultNoLenient() throws IOException { + index("test", body -> body.field("name", "foo").field("tags", new String[] { "bar", "bar" })); + assertThat( + command("SELECT * FROM test"), + containsString("Server encountered an error [Arrays (returned by [tags]) are not supported]") + ); + while ("][23;31;1m][0m".equals(readLine()) == false) + ; // clean console to avoid failures on shutdown + } + + public void testExplicitNoLenient() throws IOException { + index("test", body -> body.field("name", "foo").field("tags", new String[] { "bar", "bar" })); + assertEquals("[?1l>[?1000l[?2004llenient set to [90mfalse[0m", command("lenient = false")); + assertThat( + command("SELECT * FROM test"), + containsString("Server encountered an error [Arrays (returned by [tags]) are not supported]") + ); + while ("][23;31;1m][0m".equals(readLine()) == false) + ; // clean console to avoid failures on shutdown + } +} diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java index 8ccc079860937..97d5bcc3da927 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/Cli.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.sql.cli.command.CliSession; import org.elasticsearch.xpack.sql.cli.command.FetchSeparatorCliCommand; import org.elasticsearch.xpack.sql.cli.command.FetchSizeCliCommand; +import org.elasticsearch.xpack.sql.cli.command.LenientCliCommand; import org.elasticsearch.xpack.sql.cli.command.PrintLogoCommand; import org.elasticsearch.xpack.sql.cli.command.ServerInfoCliCommand; import org.elasticsearch.xpack.sql.cli.command.ServerQueryCliCommand; @@ -128,6 +129,7 @@ private void execute(String uri, boolean debug, boolean binary, String keystoreL new PrintLogoCommand(), new ClearScreenCliCommand(), new FetchSizeCliCommand(), + new LenientCliCommand(), new FetchSeparatorCliCommand(), new ServerInfoCliCommand(), new ServerQueryCliCommand() @@ -136,7 +138,7 @@ private void execute(String uri, boolean debug, boolean binary, String keystoreL ConnectionBuilder connectionBuilder = new ConnectionBuilder(cliTerminal); ConnectionConfiguration con = connectionBuilder.buildConnection(uri, keystoreLocation, binary); CliSession cliSession = new CliSession(new HttpClient(con)); - cliSession.setDebug(debug); + cliSession.cfg().setDebug(debug); if (checkConnection) { checkConnection(cliSession, cliTerminal, con); } @@ -150,7 +152,7 @@ private void checkConnection(CliSession cliSession, CliTerminal cliTerminal, Con try { cliSession.checkConnection(); } catch (ClientException ex) { - if (cliSession.isDebug()) { + if (cliSession.cfg().isDebug()) { cliTerminal.error("Client Exception", ex.getMessage()); cliTerminal.println(); cliTerminal.printStackTrace(ex); diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java index a3ede76da53a7..89f8a71ca9f5c 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/AbstractServerCliCommand.java @@ -34,7 +34,7 @@ protected void handleExceptionWhileCommunicatingWithServer(CliTerminal terminal, .param(e.getMessage() == null ? e.getClass().getName() : e.getMessage()) .error("]") .ln(); - if (cliSession.isDebug()) { + if (cliSession.cfg().isDebug()) { terminal.printStackTrace(e); } } diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java index 34502aab9db3f..b48c4b84cd0cf 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSession.java @@ -9,7 +9,6 @@ import org.elasticsearch.xpack.sql.client.ClientException; import org.elasticsearch.xpack.sql.client.ClientVersion; import org.elasticsearch.xpack.sql.client.HttpClient; -import org.elasticsearch.xpack.sql.proto.CoreProtocol; import org.elasticsearch.xpack.sql.proto.MainResponse; import org.elasticsearch.xpack.sql.proto.SqlVersion; @@ -20,52 +19,19 @@ */ public class CliSession { private final HttpClient httpClient; - private int fetchSize = CoreProtocol.FETCH_SIZE; - private String fetchSeparator = ""; - private boolean debug; - private boolean binary; + private final CliSessionConfiguration configuration; public CliSession(HttpClient httpClient) { this.httpClient = httpClient; + this.configuration = new CliSessionConfiguration(); } public HttpClient getClient() { return httpClient; } - public void setFetchSize(int fetchSize) { - if (fetchSize <= 0) { - throw new IllegalArgumentException("Must be > 0."); - } - this.fetchSize = fetchSize; - } - - public int getFetchSize() { - return fetchSize; - } - - public void setFetchSeparator(String fetchSeparator) { - this.fetchSeparator = fetchSeparator; - } - - public String getFetchSeparator() { - return fetchSeparator; - } - - public void setDebug(boolean debug) { - this.debug = debug; - } - - public boolean isDebug() { - return debug; - } - - public void setBinary(boolean binary) { - this.binary = binary; - } - - public boolean isBinary() { - return binary; + public CliSessionConfiguration cfg() { + return configuration; } public void checkConnection() throws ClientException { diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSessionConfiguration.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSessionConfiguration.java new file mode 100644 index 0000000000000..4507d36946bde --- /dev/null +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/CliSessionConfiguration.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.sql.cli.command; + +import org.elasticsearch.xpack.sql.proto.CoreProtocol; + +/** + * Configuration for CLI session + */ +public class CliSessionConfiguration { + private int fetchSize; + private String fetchSeparator = ""; + private boolean debug; + private boolean lenient; + + public CliSessionConfiguration() { + this.fetchSize = CoreProtocol.FETCH_SIZE; + this.lenient = CoreProtocol.FIELD_MULTI_VALUE_LENIENCY; + } + + public void setFetchSize(int fetchSize) { + if (fetchSize <= 0) { + throw new IllegalArgumentException("Must be > 0."); + } + this.fetchSize = fetchSize; + } + + public int getFetchSize() { + return fetchSize; + } + + public void setFetchSeparator(String fetchSeparator) { + this.fetchSeparator = fetchSeparator; + } + + public String getFetchSeparator() { + return fetchSeparator; + } + + public void setDebug(boolean debug) { + this.debug = debug; + } + + public boolean isDebug() { + return debug; + } + + public boolean isLenient() { + return lenient; + } + + public void setLenient(boolean lenient) { + this.lenient = lenient; + } +} diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSeparatorCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSeparatorCliCommand.java index bd07a5b9f04e2..efb6c9c054775 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSeparatorCliCommand.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSeparatorCliCommand.java @@ -22,8 +22,8 @@ public FetchSeparatorCliCommand() { @Override protected boolean doHandle(CliTerminal terminal, CliSession cliSession, Matcher m, String line) { - cliSession.setFetchSeparator(m.group(1)); - terminal.line().text("fetch separator set to \"").em(cliSession.getFetchSeparator()).text("\"").end(); + cliSession.cfg().setFetchSeparator(m.group(1)); + terminal.line().text("fetch separator set to \"").em(cliSession.cfg().getFetchSeparator()).text("\"").end(); return true; } } diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSizeCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSizeCliCommand.java index c4b3f1aeeb0ae..f17b3c469aa2d 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSizeCliCommand.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/FetchSizeCliCommand.java @@ -23,7 +23,7 @@ public FetchSizeCliCommand() { @Override protected boolean doHandle(CliTerminal terminal, CliSession cliSession, Matcher m, String line) { try { - cliSession.setFetchSize(Integer.parseInt(m.group(1))); + cliSession.cfg().setFetchSize(Integer.parseInt(m.group(1))); } catch (NumberFormatException e) { terminal.line().error("Invalid fetch size [").param(m.group(1)).error("]").end(); return true; @@ -31,7 +31,7 @@ protected boolean doHandle(CliTerminal terminal, CliSession cliSession, Matcher terminal.line().error("Invalid fetch size [").param(m.group(1)).error("]. " + e.getMessage()).end(); return true; } - terminal.line().text("fetch size set to ").em(Integer.toString(cliSession.getFetchSize())).end(); + terminal.line().text("fetch size set to ").em(Integer.toString(cliSession.cfg().getFetchSize())).end(); return true; } } diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/LenientCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/LenientCliCommand.java new file mode 100644 index 0000000000000..fd285a35c96e5 --- /dev/null +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/LenientCliCommand.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.sql.cli.command; + +import org.elasticsearch.xpack.sql.cli.CliTerminal; + +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * lenient command, enables/disables fields multi-value leniency. + * ie. with lenient = true, in case of array values, return the first value, with no guarantee of consistent results. + * + */ +public class LenientCliCommand extends AbstractCliCommand { + + public LenientCliCommand() { + super(Pattern.compile("lenient *= *(.+)", Pattern.CASE_INSENSITIVE)); + } + + @Override + protected boolean doHandle(CliTerminal terminal, CliSession cliSession, Matcher m, String line) { + cliSession.cfg().setLenient(Boolean.parseBoolean(m.group(1))); + terminal.line().text("lenient set to ").em(Boolean.toString(cliSession.cfg().isLenient())).end(); + return true; + } +} diff --git a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java index 1d929ed7708b4..ae582837b2e9f 100644 --- a/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java +++ b/x-pack/plugin/sql/sql-cli/src/main/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommand.java @@ -26,7 +26,7 @@ protected boolean doHandle(CliTerminal terminal, CliSession cliSession, String l SimpleFormatter formatter; String data; try { - response = cliClient.basicQuery(line, cliSession.getFetchSize()); + response = cliClient.basicQuery(line, cliSession.cfg().getFetchSize(), cliSession.cfg().isLenient()); formatter = new SimpleFormatter(response.columns(), response.rows(), CLI); data = formatter.formatWithHeader(response.columns(), response.rows()); while (true) { @@ -36,8 +36,8 @@ protected boolean doHandle(CliTerminal terminal, CliSession cliSession, String l terminal.flush(); return true; } - if (false == cliSession.getFetchSeparator().equals("")) { - terminal.println(cliSession.getFetchSeparator()); + if (false == cliSession.cfg().getFetchSeparator().equals("")) { + terminal.println(cliSession.cfg().getFetchSeparator()); } response = cliSession.getClient().nextPage(response.cursor()); data = formatter.formatWithoutHeader(response.rows()); diff --git a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/BuiltinCommandTests.java b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/BuiltinCommandTests.java index 6c935885662a4..0d809f940c820 100644 --- a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/BuiltinCommandTests.java +++ b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/BuiltinCommandTests.java @@ -44,20 +44,20 @@ public void testFetchSeparator() throws Exception { CliSession cliSession = new CliSession(httpClient); FetchSeparatorCliCommand cliCommand = new FetchSeparatorCliCommand(); assertFalse(cliCommand.handle(testTerminal, cliSession, "fetch")); - assertEquals("", cliSession.getFetchSeparator()); + assertEquals("", cliSession.cfg().getFetchSeparator()); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_separator = \"foo\"")); - assertEquals("foo", cliSession.getFetchSeparator()); + assertEquals("foo", cliSession.cfg().getFetchSeparator()); assertEquals("fetch separator set to \"foo\"", testTerminal.toString()); testTerminal.clear(); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_separator=\"bar\"")); - assertEquals("bar", cliSession.getFetchSeparator()); + assertEquals("bar", cliSession.cfg().getFetchSeparator()); assertEquals("fetch separator set to \"bar\"", testTerminal.toString()); testTerminal.clear(); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch separator=\"baz\"")); - assertEquals("baz", cliSession.getFetchSeparator()); + assertEquals("baz", cliSession.cfg().getFetchSeparator()); assertEquals("fetch separator set to \"baz\"", testTerminal.toString()); verifyNoMoreInteractions(httpClient); } @@ -68,21 +68,21 @@ public void testFetchSize() throws Exception { CliSession cliSession = new CliSession(httpClient); FetchSizeCliCommand cliCommand = new FetchSizeCliCommand(); assertFalse(cliCommand.handle(testTerminal, cliSession, "fetch")); - assertEquals(1000L, cliSession.getFetchSize()); + assertEquals(1000L, cliSession.cfg().getFetchSize()); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_size = \"foo\"")); - assertEquals(1000L, cliSession.getFetchSize()); + assertEquals(1000L, cliSession.cfg().getFetchSize()); assertEquals("Invalid fetch size [\"foo\"]", testTerminal.toString()); testTerminal.clear(); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_size = 10")); - assertEquals(10L, cliSession.getFetchSize()); + assertEquals(10L, cliSession.cfg().getFetchSize()); assertEquals("fetch size set to 10", testTerminal.toString()); testTerminal.clear(); assertTrue(cliCommand.handle(testTerminal, cliSession, "fetch_size = -10")); - assertEquals(10L, cliSession.getFetchSize()); + assertEquals(10L, cliSession.cfg().getFetchSize()); assertEquals("Invalid fetch size [-10]. Must be > 0.", testTerminal.toString()); verifyNoMoreInteractions(httpClient); } @@ -98,4 +98,21 @@ public void testPrintLogo() throws Exception { verifyNoMoreInteractions(httpClient); } + public void testLenient() { + TestTerminal testTerminal = new TestTerminal(); + HttpClient httpClient = mock(HttpClient.class); + CliSession cliSession = new CliSession(httpClient); + LenientCliCommand cliCommand = new LenientCliCommand(); + assertFalse(cliCommand.handle(testTerminal, cliSession, "lenient")); + assertEquals(false, cliSession.cfg().isLenient()); + assertTrue(cliCommand.handle(testTerminal, cliSession, "lenient = true")); + assertEquals(true, cliSession.cfg().isLenient()); + assertEquals("lenient set to true", testTerminal.toString()); + testTerminal.clear(); + assertTrue(cliCommand.handle(testTerminal, cliSession, "lenient = false")); + assertEquals(false, cliSession.cfg().isLenient()); + assertEquals("lenient set to false", testTerminal.toString()); + testTerminal.clear(); + } + } diff --git a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java index e005e9f668ff9..bc1eb75bd9a76 100644 --- a/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java +++ b/x-pack/plugin/sql/sql-cli/src/test/java/org/elasticsearch/xpack/sql/cli/command/ServerQueryCliCommandTests.java @@ -32,11 +32,11 @@ public void testExceptionHandling() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - when(client.basicQuery("blah", 1000)).thenThrow(new SQLException("test exception")); + when(client.basicQuery("blah", 1000, false)).thenThrow(new SQLException("test exception")); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); assertTrue(cliCommand.handle(testTerminal, cliSession, "blah")); assertEquals("Bad request [test exception]\n", testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("blah"), eq(1000)); + verify(client, times(1)).basicQuery(eq("blah"), eq(1000), eq(false)); verifyNoMoreInteractions(client); } @@ -44,8 +44,8 @@ public void testOnePageQuery() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - cliSession.setFetchSize(10); - when(client.basicQuery("test query", 10)).thenReturn(fakeResponse("", true, "foo")); + cliSession.cfg().setFetchSize(10); + when(client.basicQuery("test query", 10, false)).thenReturn(fakeResponse("", true, "foo")); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); assertTrue(cliCommand.handle(testTerminal, cliSession, "test query")); assertEquals(""" @@ -53,7 +53,7 @@ public void testOnePageQuery() throws Exception { --------------- foo \s """, testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("test query"), eq(10)); + verify(client, times(1)).basicQuery(eq("test query"), eq(10), eq(false)); verifyNoMoreInteractions(client); } @@ -61,8 +61,8 @@ public void testThreePageQuery() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - cliSession.setFetchSize(10); - when(client.basicQuery("test query", 10)).thenReturn(fakeResponse("my_cursor1", true, "first")); + cliSession.cfg().setFetchSize(10); + when(client.basicQuery("test query", 10, false)).thenReturn(fakeResponse("my_cursor1", true, "first")); when(client.nextPage("my_cursor1")).thenReturn(fakeResponse("my_cursor2", false, "second")); when(client.nextPage("my_cursor2")).thenReturn(fakeResponse("", false, "third")); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); @@ -74,7 +74,7 @@ public void testThreePageQuery() throws Exception { second \s third \s """, testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("test query"), eq(10)); + verify(client, times(1)).basicQuery(eq("test query"), eq(10), eq(false)); verify(client, times(2)).nextPage(any()); verifyNoMoreInteractions(client); } @@ -83,10 +83,10 @@ public void testTwoPageQueryWithSeparator() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - cliSession.setFetchSize(15); + cliSession.cfg().setFetchSize(15); // Set a separator - cliSession.setFetchSeparator("-----"); - when(client.basicQuery("test query", 15)).thenReturn(fakeResponse("my_cursor1", true, "first")); + cliSession.cfg().setFetchSeparator("-----"); + when(client.basicQuery("test query", 15, false)).thenReturn(fakeResponse("my_cursor1", true, "first")); when(client.nextPage("my_cursor1")).thenReturn(fakeResponse("", false, "second")); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); assertTrue(cliCommand.handle(testTerminal, cliSession, "test query")); @@ -97,7 +97,7 @@ public void testTwoPageQueryWithSeparator() throws Exception { ----- second \s """, testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("test query"), eq(15)); + verify(client, times(1)).basicQuery(eq("test query"), eq(15), eq(false)); verify(client, times(1)).nextPage(any()); verifyNoMoreInteractions(client); } @@ -106,8 +106,8 @@ public void testCursorCleanupOnError() throws Exception { TestTerminal testTerminal = new TestTerminal(); HttpClient client = mock(HttpClient.class); CliSession cliSession = new CliSession(client); - cliSession.setFetchSize(15); - when(client.basicQuery("test query", 15)).thenReturn(fakeResponse("my_cursor1", true, "first")); + cliSession.cfg().setFetchSize(15); + when(client.basicQuery("test query", 15, false)).thenReturn(fakeResponse("my_cursor1", true, "first")); when(client.nextPage("my_cursor1")).thenThrow(new SQLException("test exception")); when(client.queryClose("my_cursor1", Mode.CLI)).thenReturn(true); ServerQueryCliCommand cliCommand = new ServerQueryCliCommand(); @@ -118,7 +118,7 @@ public void testCursorCleanupOnError() throws Exception { first \s Bad request [test exception] """, testTerminal.toString()); - verify(client, times(1)).basicQuery(eq("test query"), eq(15)); + verify(client, times(1)).basicQuery(eq("test query"), eq(15), eq(false)); verify(client, times(1)).nextPage(any()); verify(client, times(1)).queryClose(eq("my_cursor1"), eq(Mode.CLI)); verifyNoMoreInteractions(client); diff --git a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java index 8c14a8008540c..d3784b70a00e2 100644 --- a/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java +++ b/x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/HttpClient.java @@ -61,6 +61,10 @@ public MainResponse serverInfo() throws SQLException { } public SqlQueryResponse basicQuery(String query, int fetchSize) throws SQLException { + return basicQuery(query, fetchSize, CoreProtocol.FIELD_MULTI_VALUE_LENIENCY); + } + + public SqlQueryResponse basicQuery(String query, int fetchSize, boolean fieldMultiValueLeniency) throws SQLException { // TODO allow customizing the time zone - this is what session set/reset/get should be about // method called only from CLI SqlQueryRequest sqlRequest = new SqlQueryRequest( @@ -74,7 +78,7 @@ public SqlQueryResponse basicQuery(String query, int fetchSize) throws SQLExcept Boolean.FALSE, null, new RequestInfo(Mode.CLI, ClientVersion.CURRENT), - false, + fieldMultiValueLeniency, false, cfg.binaryCommunication() ); diff --git a/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/HttpClientRequestTests.java b/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/HttpClientRequestTests.java index 6b4648702fb0f..6ff8fc6946856 100644 --- a/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/HttpClientRequestTests.java +++ b/x-pack/plugin/sql/sql-client/src/test/java/org/elasticsearch/xpack/sql/client/HttpClientRequestTests.java @@ -106,7 +106,7 @@ private void assertBinaryRequestForCLI(XContentType xContentType) throws URISynt prepareMockResponse(); try { - httpClient.basicQuery(query, fetchSize); + httpClient.basicQuery(query, fetchSize, randomBoolean()); } catch (SQLException e) { logger.info("Ignored SQLException", e); } From d465ee1be44fcc32783e8daa6eae8b5c87f3e3ff Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Wed, 16 Feb 2022 12:10:54 +0100 Subject: [PATCH 32/37] Replace deprecated api in artifact transforms (#84015) `ArtifactAttributes.ARTIFACT_FORMAT` has been deprecated in favor of public available `ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE` --- .../InternalDistributionArchiveSetupPlugin.java | 7 ++++--- .../gradle/internal/JdkDownloadPlugin.java | 11 +++++------ .../gradle/internal/test/DistroTestPlugin.java | 3 +-- .../gradle/DistributionDownloadPlugin.java | 12 ++++++------ .../gradle/plugin/PluginBuildPlugin.java | 3 +-- .../gradle/test/YamlRestTestPlugin.java | 7 +++---- .../gradle/testclusters/ElasticsearchNode.java | 7 +++---- modules/reindex/build.gradle | 6 +++--- rest-api-spec/build.gradle | 2 +- x-pack/qa/repository-old-versions/build.gradle | 6 +++--- 10 files changed, 30 insertions(+), 34 deletions(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java index b23fb215bcffc..53b1fec01cd8f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java @@ -23,7 +23,6 @@ import java.io.File; import static org.elasticsearch.gradle.internal.conventions.GUtils.capitalize; -import static org.gradle.api.internal.artifacts.ArtifactAttributes.ARTIFACT_FORMAT; /** * Provides a DSL and common configurations to define different types of @@ -75,12 +74,14 @@ private void registerAndConfigureDistributionArchivesExtension(Project project) sub.getArtifacts().add(DEFAULT_CONFIGURATION_NAME, distributionArchive.getArchiveTask()); var extractedConfiguration = sub.getConfigurations().create(EXTRACTED_CONFIGURATION_NAME); extractedConfiguration.setCanBeResolved(false); - extractedConfiguration.getAttributes().attribute(ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + extractedConfiguration.getAttributes() + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); sub.getArtifacts().add(EXTRACTED_CONFIGURATION_NAME, distributionArchive.getExpandedDistTask()); // The "composite" configuration is specifically used for resolving transformed artifacts in an included build var compositeConfiguration = sub.getConfigurations().create(COMPOSITE_CONFIGURATION_NAME); compositeConfiguration.setCanBeResolved(false); - compositeConfiguration.getAttributes().attribute(ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + compositeConfiguration.getAttributes() + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); compositeConfiguration.getAttributes().attribute(Attribute.of("composite", Boolean.class), true); sub.getArtifacts().add(COMPOSITE_CONFIGURATION_NAME, distributionArchive.getArchiveTask()); sub.getTasks().register("extractedAssemble", task -> diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java index a1008babb3987..2bc84b36c8fa1 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/JdkDownloadPlugin.java @@ -19,7 +19,6 @@ import org.gradle.api.artifacts.repositories.IvyArtifactRepository; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.attributes.Attribute; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import java.util.Arrays; @@ -40,10 +39,10 @@ public void apply(Project project) { project.getDependencies().getArtifactTypes().maybeCreate(ArtifactTypeDefinition.ZIP_TYPE); project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { transformSpec.getFrom() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE) .attribute(jdkAttribute, true); transformSpec.getTo() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) .attribute(jdkAttribute, true); transformSpec.parameters(parameters -> parameters.setTrimmedPrefixPattern(JDK_TRIMMED_PREFIX)); }); @@ -51,10 +50,10 @@ public void apply(Project project) { ArtifactTypeDefinition tarArtifactTypeDefinition = project.getDependencies().getArtifactTypes().maybeCreate("tar.gz"); project.getDependencies().registerTransform(SymbolicLinkPreservingUntarTransform.class, transformSpec -> { transformSpec.getFrom() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, tarArtifactTypeDefinition.getName()) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, tarArtifactTypeDefinition.getName()) .attribute(jdkAttribute, true); transformSpec.getTo() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) .attribute(jdkAttribute, true); transformSpec.parameters(parameters -> { parameters.setTrimmedPrefixPattern(JDK_TRIMMED_PREFIX); @@ -65,7 +64,7 @@ public void apply(Project project) { NamedDomainObjectContainer jdksContainer = project.container(Jdk.class, name -> { Configuration configuration = project.getConfigurations().create("jdk_" + name); configuration.setCanBeConsumed(false); - configuration.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + configuration.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); configuration.getAttributes().attribute(jdkAttribute, true); Jdk jdk = new Jdk(name, configuration, project.getObjects()); configuration.defaultDependencies(dependencies -> { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java index 16e7328ea98ff..0c916951bcd1c 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/test/DistroTestPlugin.java @@ -35,7 +35,6 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.plugins.JavaBasePlugin; import org.gradle.api.provider.Provider; import org.gradle.api.specs.Specs; @@ -313,7 +312,7 @@ private static Object convertPath( private static Configuration configureExamplePlugin(Project project) { Configuration examplePlugin = project.getConfigurations().create(EXAMPLE_PLUGIN_CONFIGURATION); - examplePlugin.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); + examplePlugin.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); DependencyHandler deps = project.getDependencies(); deps.add(EXAMPLE_PLUGIN_CONFIGURATION, deps.project(Map.of("path", ":plugins:analysis-icu", "configuration", "zip"))); return examplePlugin; diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java index 9de5d161116f0..d08dc469e5ba5 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java @@ -18,7 +18,6 @@ import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.artifacts.repositories.IvyArtifactRepository; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.Property; import org.gradle.api.provider.Provider; @@ -61,14 +60,14 @@ public void setDockerAvailability(Provider dockerAvailability) { @Override public void apply(Project project) { project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { - transformSpec.getFrom().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); - transformSpec.getTo().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + transformSpec.getFrom().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); + transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); }); ArtifactTypeDefinition tarArtifactTypeDefinition = project.getDependencies().getArtifactTypes().maybeCreate("tar.gz"); project.getDependencies().registerTransform(SymbolicLinkPreservingUntarTransform.class, transformSpec -> { - transformSpec.getFrom().attribute(ArtifactAttributes.ARTIFACT_FORMAT, tarArtifactTypeDefinition.getName()); - transformSpec.getTo().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + transformSpec.getFrom().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, tarArtifactTypeDefinition.getName()); + transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); }); setupResolutionsContainer(project); @@ -80,7 +79,8 @@ private void setupDistributionContainer(Project project, Property docke distributionsContainer = project.container(ElasticsearchDistribution.class, name -> { Configuration fileConfiguration = project.getConfigurations().create("es_distro_file_" + name); Configuration extractedConfiguration = project.getConfigurations().create(DISTRO_EXTRACTED_CONFIG_PREFIX + name); - extractedConfiguration.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + extractedConfiguration.getAttributes() + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); return new ElasticsearchDistribution( name, project.getObjects(), diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java index 2dedd25c007f5..938f5e8c8ad25 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/plugin/PluginBuildPlugin.java @@ -32,7 +32,6 @@ import org.gradle.api.artifacts.Configuration; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.file.RegularFile; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.plugins.BasePlugin; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.plugins.JavaPluginExtension; @@ -236,7 +235,7 @@ public Object doCall() { // also make the zip available as a configuration (used when depending on this project) Configuration configuration = project.getConfigurations().create("zip"); - configuration.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); + configuration.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); project.getArtifacts().add("zip", bundle); return bundle; diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java b/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java index 59144576333f2..8e86973826830 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/test/YamlRestTestPlugin.java @@ -24,7 +24,6 @@ import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.artifacts.type.ArtifactTypeDefinition; import org.gradle.api.attributes.Attribute; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.plugins.JavaBasePlugin; import org.gradle.api.tasks.Copy; import org.gradle.api.tasks.SourceSet; @@ -53,16 +52,16 @@ public void apply(Project project) { project.getDependencies().getArtifactTypes().maybeCreate(ArtifactTypeDefinition.JAR_TYPE); project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { transformSpec.getFrom() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.JAR_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.JAR_TYPE) .attribute(restAttribute, true); transformSpec.getTo() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) .attribute(restAttribute, true); }); ConfigurationContainer configurations = project.getConfigurations(); Configuration restTestSpecs = configurations.create(REST_TEST_SPECS_CONFIGURATION_NAME); - restTestSpecs.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + restTestSpecs.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); restTestSpecs.getAttributes().attribute(restAttribute, true); TaskProvider copyRestTestSpecs = project.getTasks().register("copyRestTestSpecs", Copy.class, t -> { diff --git a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java index 3ef499d4ef814..a47807ae6d326 100644 --- a/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java +++ b/build-tools/src/main/java/org/elasticsearch/gradle/testclusters/ElasticsearchNode.java @@ -37,7 +37,6 @@ import org.gradle.api.file.FileSystemOperations; import org.gradle.api.file.FileTree; import org.gradle.api.file.RegularFile; -import org.gradle.api.internal.artifacts.ArtifactAttributes; import org.gradle.api.internal.file.FileOperations; import org.gradle.api.logging.Logger; import org.gradle.api.logging.Logging; @@ -343,7 +342,7 @@ public void module(Provider module) { private void registerExtractedConfig(Provider pluginProvider) { Dependency pluginDependency = this.project.getDependencies().create(project.files(pluginProvider)); Configuration extractedConfig = project.getConfigurations().detachedConfiguration(pluginDependency); - extractedConfig.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + extractedConfig.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); extractedConfig.getAttributes().attribute(bundleAttribute, true); pluginAndModuleConfiguration.from(extractedConfig); } @@ -353,10 +352,10 @@ private void configureArtifactTransforms() { project.getDependencies().getArtifactTypes().maybeCreate(ArtifactTypeDefinition.ZIP_TYPE); project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { transformSpec.getFrom() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE) .attribute(bundleAttribute, true); transformSpec.getTo() - .attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + .attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) .attribute(bundleAttribute, true); transformSpec.getParameters().setAsFiletreeOutput(true); }); diff --git a/modules/reindex/build.gradle b/modules/reindex/build.gradle index ac839731b7746..91eab7ca54852 100644 --- a/modules/reindex/build.gradle +++ b/modules/reindex/build.gradle @@ -111,8 +111,8 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { * To avoid testing against too many old versions, always pick first and last version per major */ project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { - transformSpec.getFrom().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); - transformSpec.getTo().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + transformSpec.getFrom().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); + transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); }); def versions = ['2', '1', '090'] @@ -123,7 +123,7 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { } versions.each { version -> def oldEsDependency = configurations['es' + version] - oldEsDependency.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + oldEsDependency.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); TaskProvider fixture = tasks.register("oldEs${version}Fixture", AntFixture) { dependsOn project.configurations.oldesFixture, jdks.legacy, oldEsDependency executable = "${BuildParams.runtimeJavaHome}/bin/java" diff --git a/rest-api-spec/build.gradle b/rest-api-spec/build.gradle index f49cde09e3eee..8a8630c9846c6 100644 --- a/rest-api-spec/build.gradle +++ b/rest-api-spec/build.gradle @@ -23,7 +23,7 @@ configurations { // easy and efficient basicRestSpecs { attributes { - attribute(org.gradle.api.internal.artifacts.ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE) + attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE) } } } diff --git a/x-pack/qa/repository-old-versions/build.gradle b/x-pack/qa/repository-old-versions/build.gradle index 2581a4e5736ce..fc2e96ec66045 100644 --- a/x-pack/qa/repository-old-versions/build.gradle +++ b/x-pack/qa/repository-old-versions/build.gradle @@ -58,8 +58,8 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { * To avoid testing against too many old versions, always pick first and last version per major */ project.getDependencies().registerTransform(UnzipTransform.class, transformSpec -> { - transformSpec.getFrom().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.ZIP_TYPE); - transformSpec.getTo().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + transformSpec.getFrom().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.ZIP_TYPE); + transformSpec.getTo().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); }); for (String versionString : ['5.0.0', '5.6.16', '6.0.0', '6.8.20']) { @@ -70,7 +70,7 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { String configName = "es${versionNoDots}" def config = configurations.create(configName) - config.getAttributes().attribute(ArtifactAttributes.ARTIFACT_FORMAT, ArtifactTypeDefinition.DIRECTORY_TYPE); + config.getAttributes().attribute(ArtifactTypeDefinition.ARTIFACT_TYPE_ATTRIBUTE, ArtifactTypeDefinition.DIRECTORY_TYPE); dependencies.add(configName, artifact) String repoLocation = "${buildDir}/cluster/shared/repo/${versionNoDots}" From 15e522ec414b482a4f347092128cb22b1113cc07 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Wed, 16 Feb 2022 12:41:19 +0000 Subject: [PATCH 33/37] [ML] Fix license feature test cleanup (#84020) Testing license features for can cause the .ml-stats index to be created some time after the tests complete. This can cause the post-test cleanup to fail. This change fixes the problem by waiting for pending tasks in the first stage cleanup, before the main cleanup methods run in the base classes. Fixes #83923 --- .../ml/integration/TestFeatureLicenseTrackingIT.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java index 9651115c658ee..ac2416c899028 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/TestFeatureLicenseTrackingIT.java @@ -31,6 +31,7 @@ import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; +import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase; import org.junit.After; import java.time.ZonedDateTime; @@ -57,7 +58,7 @@ public class TestFeatureLicenseTrackingIT extends MlSingleNodeTestCase { private final Set createdPipelines = new HashSet<>(); @After - public void cleanup() { + public void cleanup() throws Exception { for (String pipeline : createdPipelines) { try { client().execute(DeletePipelineAction.INSTANCE, new DeletePipelineRequest(pipeline)).actionGet(); @@ -65,6 +66,9 @@ public void cleanup() { logger.warn(() -> new ParameterizedMessage("error cleaning up pipeline [{}]", pipeline), ex); } } + // Some of the tests have async side effects. We need to wait for these to complete before continuing + // the cleanup, otherwise unexpected indices may get created during the cleanup process. + BaseMlIntegTestCase.waitForPendingTasks(client()); } public void testFeatureTrackingAnomalyJob() throws Exception { @@ -125,7 +129,7 @@ public void testFeatureTrackingInferenceModelPipeline() throws Exception { .setInferenceConfig(new ClassificationConfig(3)) .setParsedDefinition( new TrainedModelDefinition.Builder().setPreProcessors( - Arrays.asList(new OneHotEncoding("other.categorical", oneHotEncoding, false)) + List.of(new OneHotEncoding("other.categorical", oneHotEncoding, false)) ).setTrainedModel(buildClassification(true)) ) .build(); From e7ca5324a0f6326b579a6f6cc9f8213957ffce43 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 16 Feb 2022 14:11:42 +0100 Subject: [PATCH 34/37] Move InferenceConfigUpdate under VersionedNamedWriteable (#84022) In #81809 we introduced a mechanism to check searializability of search request to earlier version nodes already on the coordinating node. This requires knowledge about the version that NamedWritable classes have been introduced, which is why we started moving classes that are used inside the search request under the VersionedNamedWriteable interface to make sure future additions implement the mthod that provides the version information. This change moves the InferenceConfigUpdate and implementing classes under that sub-interface. I have used the versions they were first released in looking at the pull requests that introduced the classes. --- .../trainedmodel/ClassificationConfigUpdate.java | 7 +++++++ .../inference/trainedmodel/EmptyConfigUpdate.java | 6 ++++++ .../trainedmodel/FillMaskConfigUpdate.java | 7 +++++++ .../trainedmodel/InferenceConfigUpdate.java | 4 ++-- .../ml/inference/trainedmodel/NerConfigUpdate.java | 7 +++++++ .../trainedmodel/PassThroughConfigUpdate.java | 7 +++++++ .../trainedmodel/RegressionConfigUpdate.java | 8 ++++++++ .../inference/trainedmodel/ResultsFieldUpdate.java | 6 ++++++ .../TextClassificationConfigUpdate.java | 13 ++++++++++--- .../trainedmodel/TextEmbeddingConfigUpdate.java | 7 +++++++ .../ZeroShotClassificationConfigUpdate.java | 7 +++++++ 11 files changed, 74 insertions(+), 5 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java index b295ba4122580..9a26d10702e5e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ClassificationConfigUpdate.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -121,6 +122,7 @@ public String getTopClassesResultsField() { return topClassesResultsField; } + @Override public String getResultsField() { return resultsField; } @@ -246,6 +248,11 @@ boolean isNoop(ClassificationConfig originalConfig) { && (predictionFieldType == null || predictionFieldType.equals(originalConfig.getPredictionFieldType())); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_8_0; + } + public static class Builder implements InferenceConfigUpdate.Builder { private Integer numTopClasses; private String topClassesResultsField; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java index 9696afe098885..58fb78d53b8b9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/EmptyConfigUpdate.java @@ -65,6 +65,11 @@ public int hashCode() { return EmptyConfigUpdate.class.hashCode(); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_9_0; + } + public static class Builder implements InferenceConfigUpdate.Builder { @Override @@ -72,6 +77,7 @@ public Builder setResultsField(String resultsField) { return this; } + @Override public EmptyConfigUpdate build() { return new EmptyConfigUpdate(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java index 78274da77ab10..da0beaa6785d8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/FillMaskConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -101,6 +102,11 @@ public String getName() { return NAME; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + @Override public InferenceConfig apply(InferenceConfig originalConfig) { if (originalConfig instanceof FillMaskConfig == false) { @@ -191,6 +197,7 @@ public FillMaskConfigUpdate.Builder setTokenizationUpdate(TokenizationUpdate tok return this; } + @Override public FillMaskConfigUpdate build() { return new FillMaskConfigUpdate(this.numTopClasses, this.resultsField, this.tokenizationUpdate); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java index b00ee7134bac9..30ecac00a3b80 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/InferenceConfigUpdate.java @@ -6,7 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; -import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.common.io.stream.VersionedNamedWriteable; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -15,7 +15,7 @@ import java.util.HashSet; import java.util.Set; -public interface InferenceConfigUpdate extends NamedWriteable { +public interface InferenceConfigUpdate extends VersionedNamedWriteable { Set RESERVED_ML_FIELD_NAMES = new HashSet<>( Arrays.asList(WarningInferenceResults.WARNING.getPreferredName(), TrainedModelConfig.MODEL_ID.getPreferredName()) ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java index f3dbd03dcbf3b..d52463eb092c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/NerConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -144,6 +145,11 @@ public int hashCode() { return Objects.hash(resultsField, tokenizationUpdate); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + public static class Builder implements InferenceConfigUpdate.Builder { private String resultsField; private TokenizationUpdate tokenizationUpdate; @@ -159,6 +165,7 @@ public NerConfigUpdate.Builder setTokenizationUpdate(TokenizationUpdate tokeniza return this; } + @Override public NerConfigUpdate build() { return new NerConfigUpdate(resultsField, tokenizationUpdate); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java index a317e3c6c9e89..92bfe1bf9ea79 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/PassThroughConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -146,6 +147,11 @@ public int hashCode() { return Objects.hash(resultsField, tokenizationUpdate); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + public static class Builder implements InferenceConfigUpdate.Builder { private String resultsField; private TokenizationUpdate tokenizationUpdate; @@ -161,6 +167,7 @@ public PassThroughConfigUpdate.Builder setTokenizationUpdate(TokenizationUpdate return this; } + @Override public PassThroughConfigUpdate build() { return new PassThroughConfigUpdate(this.resultsField, tokenizationUpdate); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java index 2400bc1c670a8..c655c391317fb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/RegressionConfigUpdate.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -84,6 +85,7 @@ public Integer getNumTopFeatureImportanceValues() { return numTopFeatureImportanceValues; } + @Override public String getResultsField() { return resultsField; } @@ -109,6 +111,11 @@ public String getName() { return NAME.getPreferredName(); } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_8_0; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -186,6 +193,7 @@ public Builder setNumTopFeatureImportanceValues(Integer numTopFeatureImportanceV return this; } + @Override public RegressionConfigUpdate build() { return new RegressionConfigUpdate(resultsField, numTopFeatureImportanceValues); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java index 2266fb5ca3e44..0eaf101c25c8e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ResultsFieldUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -68,6 +69,11 @@ public String getWriteableName() { return NAME; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_7_9_0; + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(resultsField); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java index 72a0858117e61..a62f9e3a197fb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextClassificationConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -21,10 +22,10 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.CLASSIFICATION_LABELS; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.NUM_TOP_CLASSES; +import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.RESULTS_FIELD; import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.NlpConfig.TOKENIZATION; -import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextClassificationConfig.CLASSIFICATION_LABELS; -import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextClassificationConfig.NUM_TOP_CLASSES; -import static org.elasticsearch.xpack.core.ml.inference.trainedmodel.TextClassificationConfig.RESULTS_FIELD; public class TextClassificationConfigUpdate extends NlpConfigUpdate implements NamedXContentObject { @@ -96,6 +97,11 @@ public String getName() { return NAME; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); @@ -237,6 +243,7 @@ public TextClassificationConfigUpdate.Builder setTokenizationUpdate(Tokenization return this; } + @Override public TextClassificationConfigUpdate build() { return new TextClassificationConfigUpdate( this.classificationLabels, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java index 1ca7d04fb1eee..589b71bd631d0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/TextEmbeddingConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ObjectParser; @@ -95,6 +96,11 @@ public String getName() { return NAME; } + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } + @Override public InferenceConfig apply(InferenceConfig originalConfig) { if ((resultsField == null || resultsField.equals(originalConfig.getResultsField())) && super.isNoop()) { @@ -160,6 +166,7 @@ public TextEmbeddingConfigUpdate.Builder setTokenizationUpdate(TokenizationUpdat return this; } + @Override public TextEmbeddingConfigUpdate build() { return new TextEmbeddingConfigUpdate(resultsField, tokenizationUpdate); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java index edfc675fe6ec4..3cf9f8c8f8354 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/trainedmodel/ZeroShotClassificationConfigUpdate.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.core.ml.inference.trainedmodel; +import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.core.Nullable; @@ -235,8 +236,14 @@ public Builder setTokenizationUpdate(TokenizationUpdate tokenizationUpdate) { return this; } + @Override public ZeroShotClassificationConfigUpdate build() { return new ZeroShotClassificationConfigUpdate(labels, isMultiLabel, resultsField, tokenizationUpdate); } } + + @Override + public Version getMinimalSupportedVersion() { + return Version.V_8_0_0; + } } From b44fcfbb3a8be9ece5d653118db0da14d1db96d3 Mon Sep 17 00:00:00 2001 From: Stuart Tettemer Date: Wed, 16 Feb 2022 07:31:25 -0600 Subject: [PATCH 35/37] Script: Fields API for Dense Vector (#83550) Adds the fields API for `dense_vector` field mapper. Adds a `DenseVector` interface for the value type. Implemented by: * `KnnDenseVector` which wraps a decoded float array from `VectorValues` * `BinaryDenseVector` which lazily decodes a `BytesRef` from `BinaryDocValues` The vector operations have moved into those implements from `BinaryDenseVectorScriptDocValues.java` and `KnnDenseVectorScriptDocValues.java`, respectively. The `DenseVector` API is: ``` float getMagnitude(); double dotProduct(float[] | List); double l1Norm(float[] | List); double l2Norm(float[] | List); float[] getVector(); int dims(); boolean isEmpty(); // does the value exist int size(); // 0 if isEmpty(), 1 otherwise Iterator iterator() ``` `dotProduct`, `l1Norm` and `l2Norm` take a `float[]` or a `List` via the a delegating `default` method on the `DenseVector` interface. The `DenseVectorDocValuesField` abstract class contains two getter APIS. It is implemented by `KnnDenseVectorDocValuesField` and `BinaryDenseVectorDocValuesField`. ``` DenseVector get() DenseVector get(DenseVector defaultValue) ``` The `get()` method is included because there isn't a good default dense vector, so that API returns an empty `DenseVector` which throws an `IllegalArgumentException` for all method calls other than `isEmpty()`, `size()` and `iterator()`. The empty dense vector will always be `DenseVector.EMPTY` in case users want to use equality checks. Refs: #79105 --- docs/changelog/83550.yaml | 5 + .../60_knn_and_binary_dv_fields_api.yml | 848 ++++++++++++++++++ .../vectors/query/BinaryDenseVector.java | 141 +++ .../BinaryDenseVectorDocValuesField.java | 70 ++ .../BinaryDenseVectorScriptDocValues.java | 119 --- .../xpack/vectors/query/DenseVector.java | 227 +++++ .../query/DenseVectorDocValuesField.java | 51 ++ .../query/DenseVectorScriptDocValues.java | 90 +- .../query/DocValuesWhitelistExtension.java | 5 +- .../xpack/vectors/query/KnnDenseVector.java | 109 +++ .../query/KnnDenseVectorDocValuesField.java | 79 ++ .../query/KnnDenseVectorScriptDocValues.java | 122 --- .../xpack/vectors/query/ScoreScriptUtils.java | 34 +- .../vectors/query/VectorDVLeafFieldData.java | 34 +- ...xt => org.elasticsearch.xpack.vectors.txt} | 37 + ...BinaryDenseVectorScriptDocValuesTests.java | 66 +- .../query/DenseVectorFunctionTests.java | 74 +- .../xpack/vectors/query/DenseVectorTests.java | 84 ++ .../KnnDenseVectorScriptDocValuesTests.java | 78 +- 19 files changed, 1883 insertions(+), 390 deletions(-) create mode 100644 docs/changelog/83550.yaml create mode 100644 x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/60_knn_and_binary_dv_fields_api.yml create mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVector.java create mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorDocValuesField.java delete mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValues.java create mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVector.java create mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorDocValuesField.java create mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVector.java create mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorDocValuesField.java delete mode 100644 x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValues.java rename x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/{whitelist.txt => org.elasticsearch.xpack.vectors.txt} (52%) create mode 100644 x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorTests.java diff --git a/docs/changelog/83550.yaml b/docs/changelog/83550.yaml new file mode 100644 index 0000000000000..51ab72f642fe6 --- /dev/null +++ b/docs/changelog/83550.yaml @@ -0,0 +1,5 @@ +pr: 83550 +summary: "Script: Fields API for Dense Vector" +area: Infra/Scripting +type: enhancement +issues: [] diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/60_knn_and_binary_dv_fields_api.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/60_knn_and_binary_dv_fields_api.yml new file mode 100644 index 0000000000000..b583a25738215 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/vectors/60_knn_and_binary_dv_fields_api.yml @@ -0,0 +1,848 @@ +--- +"size and isEmpty code works for any vector, including empty": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + def dv = field(params.field).get(); + if (dv.isEmpty()) { + return dv.size(); + } + return dv.vector[2] * dv.size() + params: + field: bdv + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 3 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 2 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 1 } + - match: { hits.hits.3._id: "missing_vector" } + - match: { hits.hits.3._score: 0 } + + - do: + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + def dv = field(params.field).get(); + if (dv.isEmpty()) { + return dv.size(); + } + return dv.vector[2] * dv.size() + params: + field: knn + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 3 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 2 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 1 } + - match: { hits.hits.3._id: "missing_vector" } + - match: { hits.hits.3._score: 0 } + +--- +"null can be used for default value": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + DenseVector dv = field(params.field).get(null); + if (dv == null) { + return 1; + } + return dv.vector[2]; + params: + field: bdv + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 3 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 2 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 1 } + - match: { hits.hits.3._id: "missing_vector" } + - match: { hits.hits.3._score: 1 } + + - do: + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + DenseVector dv = field(params.field).get(null); + if (dv == null) { + return 1; + } + return dv.vector[2]; + params: + field: knn + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 3 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 2 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 1 } + - match: { hits.hits.3._id: "missing_vector" } + - match: { hits.hits.3._score: 1 } + +--- +"empty dense vector throws for vector accesses": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { "bool": { "must_not": { "exists": { "field": "bdv" } } } } + script: + source: | + field(params.field).get().vector[2] + params: + field: bdv + + - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Dense vector value missing for a field, use isEmpty() to check for a missing vector value" } + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { "bool": { "must_not": { "exists": { "field": "bdv" } } } } + script: + source: | + field(params.field).get().vector[2] + params: + field: knn + + - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Dense vector value missing for a field, use isEmpty() to check for a missing vector value" } + + - do: + search: + body: + query: + script_score: + query: { "bool": { "must_not": { "exists": { "field": "bdv" } } } } + script: + source: | + float[] q = new float[1]; + q[0] = 3; + DenseVector dv = field(params.field).get(); + float score = 0; + try { score += dv.magnitude } catch (IllegalArgumentException e) { score += 10; } + try { score += dv.dotProduct(q) } catch (IllegalArgumentException e) { score += 200; } + try { score += dv.l1Norm(q) } catch (IllegalArgumentException e) { score += 3000; } + try { score += dv.l2Norm(q) } catch (IllegalArgumentException e) { score += 40000; } + try { score += dv.vector[0] } catch (IllegalArgumentException e) { score += 500000; } + try { score += dv.dims } catch (IllegalArgumentException e) { score += 6000000; } + return score; + params: + field: bdv + + - match: { hits.hits.0._id: "missing_vector" } + - match: { hits.hits.0._score: 6543210 } + + - do: + search: + body: + query: + script_score: + query: { "bool": { "must_not": { "exists": { "field": "bdv" } } } } + script: + source: | + float[] q = new float[1]; + q[0] = 3; + DenseVector dv = field(params.field).get(); + float score = 0; + try { score += dv.magnitude } catch (IllegalArgumentException e) { score += 10; } + try { score += dv.dotProduct(q) } catch (IllegalArgumentException e) { score += 200; } + try { score += dv.l1Norm(q) } catch (IllegalArgumentException e) { score += 3000; } + try { score += dv.l2Norm(q) } catch (IllegalArgumentException e) { score += 40000; } + try { score += dv.cosineSimilarity(q) } catch (IllegalArgumentException e) { score += 200000; } + try { score += dv.vector[0] } catch (IllegalArgumentException e) { score += 500000; } + try { score += dv.dims } catch (IllegalArgumentException e) { score += 6000000; } + return score; + params: + field: knn + + - match: { hits.hits.0._id: "missing_vector" } + - match: { hits.hits.0._score: 6743210 } + +--- +"dot product works on dense vectors": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + field(params.field).get().dotProduct(params.query) + params: + query: [4, 5, 6] + field: bdv + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 27 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 21 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 15 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + field(params.field).get().dotProduct(query) + params: + field: bdv + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 27 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 21 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 15 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + field(params.field).get().dotProduct(params.query) + params: + query: [4, 5, 6] + field: knn + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 27 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 21 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 15 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + field(params.field).get().dotProduct(query) + params: + field: knn + + - match: { hits.hits.0._id: "3" } + - match: { hits.hits.0._score: 27 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 21 } + - match: { hits.hits.2._id: "1" } + - match: { hits.hits.2._score: 15 } + +--- +"iterator over dense vector values": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + float sum = 0.0f; + for (def v : field(params.field)) { + sum += v; + } + return sum; + params: + field: bdv + + - match: { error.failed_shards.0.reason.caused_by.type: "unsupported_operation_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Cannot iterate over single valued dense_vector field, use get() instead" } + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { match_all: {} } + script: + source: | + float sum = 0.0f; + for (def v : field(params.field)) { + sum += v; + } + return sum; + params: + field: knn + + - match: { error.failed_shards.0.reason.caused_by.type: "unsupported_operation_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Cannot iterate over single valued dense_vector field, use get() instead"} + +--- +"l1Norm works on dense vectors": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + field(params.field).get().l1Norm(params.query) + params: + query: [4, 5, 6] + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 12 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + field(params.field).get().l1Norm(query) + params: + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 12 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + field(params.field).get().l1Norm(params.query) + params: + query: [4, 5, 6] + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 12 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + field(params.field).get().l1Norm(query) + params: + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 12 } + +--- +"l2Norm works on dense vectors": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) field(params.field).get().l2Norm(params.query) + params: + query: [4, 5, 6] + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 7 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 6 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 5 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + (int) field(params.field).get().l2Norm(query) + params: + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 7 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 6 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 5 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) field(params.field).get().l2Norm(params.query) + params: + query: [4, 5, 6] + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 7 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 6 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 5 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + (int) field(params.field).get().l2Norm(query) + params: + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 7 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 6 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 5 } + +--- +"cosineSimilarity works on dense vectors": + - skip: + version: " - 8.1.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + - '{"index": {"_id": "2"}}' + - '{"bdv": [1, 1, 2], "knn": [1, 1, 2]}' + - '{"index": {"_id": "3"}}' + - '{"bdv": [1, 1, 3], "knn": [1, 1, 3]}' + - '{"index": {"_id": "missing_vector"}}' + - '{}' + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + float[] query = new float[3]; + query[0] = 4; query[1] = 5; query[2] = 6; + (int) (field(params.field).get().cosineSimilarity(query) * 100.0f) + params: + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 98 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 97 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 92 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) (field(params.field).get().cosineSimilarity(params.query) * 100.0f) + params: + query: [4, 5, 6] + field: knn + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 98 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 97 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 92 } + + - do: + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) (field(params.field).get().cosineSimilarity(params.query) * 100.0f) + params: + query: [4, 5, 6] + field: bdv + + - match: { hits.hits.0._id: "1" } + - match: { hits.hits.0._score: 98 } + - match: { hits.hits.1._id: "2" } + - match: { hits.hits.1._score: 97 } + - match: { hits.hits.2._id: "3" } + - match: { hits.hits.2._score: 92 } + +--- +"query vector of wrong type errors": + - skip: + version: " - 8.0.99" + reason: "Fields API for dense vector added in 8.2" + + - do: + indices.create: + index: test-index + body: + mappings: + properties: + bdv: + type: dense_vector + dims: 3 + knn: + type: dense_vector + dims: 3 + index: true + similarity: l2_norm + - do: + bulk: + index: test-index + refresh: true + body: + - '{"index": {"_id": "1"}}' + - '{"bdv": [1, 1, 1], "knn": [1, 1, 1]}' + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) field(params.field).get().l2Norm(params.query) + params: + query: "one, two, three" + field: bdv + + - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Cannot use vector [one, two, three] with class [java.lang.String] as query vector" } + + - do: + catch: bad_request + search: + body: + query: + script_score: + query: { "exists": { "field": "bdv" } } + script: + source: | + (int) field(params.field).get().l2Norm(params.query) + params: + query: "one, two, three" + field: knn + + - match: { error.failed_shards.0.reason.caused_by.type: "illegal_argument_exception" } + - match: { error.failed_shards.0.reason.caused_by.reason: "Cannot use vector [one, two, three] with class [java.lang.String] as query vector" } diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVector.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVector.java new file mode 100644 index 0000000000000..785016bed097a --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVector.java @@ -0,0 +1,141 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.xpack.vectors.mapper.VectorEncoderDecoder; + +import java.nio.ByteBuffer; +import java.util.List; + +public class BinaryDenseVector implements DenseVector { + protected final BytesRef docVector; + protected final int dims; + protected final Version indexVersion; + + protected float[] decodedDocVector; + + public BinaryDenseVector(BytesRef docVector, int dims, Version indexVersion) { + this.docVector = docVector; + this.indexVersion = indexVersion; + this.dims = dims; + } + + @Override + public float[] getVector() { + if (decodedDocVector == null) { + decodedDocVector = new float[dims]; + VectorEncoderDecoder.decodeDenseVector(docVector, decodedDocVector); + } + return decodedDocVector; + } + + @Override + public float getMagnitude() { + return VectorEncoderDecoder.getMagnitude(indexVersion, docVector); + } + + @Override + public double dotProduct(float[] queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + + double dotProduct = 0; + for (float v : queryVector) { + dotProduct += byteBuffer.getFloat() * v; + } + return dotProduct; + } + + @Override + public double dotProduct(List queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + + double dotProduct = 0; + for (int i = 0; i < queryVector.size(); i++) { + dotProduct += byteBuffer.getFloat() * queryVector.get(i).floatValue(); + } + return dotProduct; + } + + @Override + public double l1Norm(float[] queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + + double l1norm = 0; + for (float v : queryVector) { + l1norm += Math.abs(v - byteBuffer.getFloat()); + } + return l1norm; + } + + @Override + public double l1Norm(List queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + + double l1norm = 0; + for (int i = 0; i < queryVector.size(); i++) { + l1norm += Math.abs(queryVector.get(i).floatValue() - byteBuffer.getFloat()); + } + return l1norm; + } + + @Override + public double l2Norm(float[] queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + double l2norm = 0; + for (float queryValue : queryVector) { + double diff = byteBuffer.getFloat() - queryValue; + l2norm += diff * diff; + } + return Math.sqrt(l2norm); + } + + @Override + public double l2Norm(List queryVector) { + ByteBuffer byteBuffer = wrap(docVector); + double l2norm = 0; + for (Number number : queryVector) { + double diff = byteBuffer.getFloat() - number.floatValue(); + l2norm += diff * diff; + } + return Math.sqrt(l2norm); + } + + @Override + public double cosineSimilarity(float[] queryVector, boolean normalizeQueryVector) { + if (normalizeQueryVector) { + return dotProduct(queryVector) / (DenseVector.getMagnitude(queryVector) * getMagnitude()); + } + return dotProduct(queryVector) / getMagnitude(); + } + + @Override + public double cosineSimilarity(List queryVector) { + return dotProduct(queryVector) / (DenseVector.getMagnitude(queryVector) * getMagnitude()); + } + + @Override + public int size() { + return 1; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public int getDims() { + return dims; + } + + private static ByteBuffer wrap(BytesRef dv) { + return ByteBuffer.wrap(dv.bytes, dv.offset, dv.length); + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorDocValuesField.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorDocValuesField.java new file mode 100644 index 0000000000000..ad1d016132547 --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorDocValuesField.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; + +import java.io.IOException; + +public class BinaryDenseVectorDocValuesField extends DenseVectorDocValuesField { + + protected final BinaryDocValues input; + protected final Version indexVersion; + protected final int dims; + protected BytesRef value; + + public BinaryDenseVectorDocValuesField(BinaryDocValues input, String name, int dims, Version indexVersion) { + super(name); + this.input = input; + this.indexVersion = indexVersion; + this.dims = dims; + } + + @Override + public void setNextDocId(int docId) throws IOException { + if (input.advanceExact(docId)) { + value = input.binaryValue(); + } else { + value = null; + } + } + + @Override + public DenseVectorScriptDocValues getScriptDocValues() { + return new DenseVectorScriptDocValues(this, dims); + } + + @Override + public boolean isEmpty() { + return value == null; + } + + @Override + public DenseVector get() { + if (isEmpty()) { + return DenseVector.EMPTY; + } + + return new BinaryDenseVector(value, dims, indexVersion); + } + + @Override + public DenseVector get(DenseVector defaultValue) { + if (isEmpty()) { + return defaultValue; + } + return new BinaryDenseVector(value, dims, indexVersion); + } + + @Override + public DenseVector getInternal() { + return get(null); + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValues.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValues.java deleted file mode 100644 index 852b63500a9bf..0000000000000 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValues.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.vectors.query; - -import org.apache.lucene.index.BinaryDocValues; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; -import org.elasticsearch.xpack.vectors.mapper.VectorEncoderDecoder; - -import java.io.IOException; -import java.nio.ByteBuffer; - -public class BinaryDenseVectorScriptDocValues extends DenseVectorScriptDocValues { - - public static class BinaryDenseVectorSupplier implements DenseVectorSupplier { - - private final BinaryDocValues in; - private BytesRef value; - - public BinaryDenseVectorSupplier(BinaryDocValues in) { - this.in = in; - } - - @Override - public void setNextDocId(int docId) throws IOException { - if (in.advanceExact(docId)) { - value = in.binaryValue(); - } else { - value = null; - } - } - - @Override - public BytesRef getInternal(int index) { - throw new UnsupportedOperationException(); - } - - public BytesRef getInternal() { - return value; - } - - @Override - public int size() { - if (value == null) { - return 0; - } else { - return 1; - } - } - } - - private final BinaryDenseVectorSupplier bdvSupplier; - private final Version indexVersion; - private final float[] vector; - - BinaryDenseVectorScriptDocValues(BinaryDenseVectorSupplier supplier, Version indexVersion, int dims) { - super(supplier, dims); - this.bdvSupplier = supplier; - this.indexVersion = indexVersion; - this.vector = new float[dims]; - } - - @Override - public int size() { - return supplier.size(); - } - - @Override - public float[] getVectorValue() { - VectorEncoderDecoder.decodeDenseVector(bdvSupplier.getInternal(), vector); - return vector; - } - - @Override - public float getMagnitude() { - return VectorEncoderDecoder.getMagnitude(indexVersion, bdvSupplier.getInternal()); - } - - @Override - public double dotProduct(float[] queryVector) { - BytesRef value = bdvSupplier.getInternal(); - ByteBuffer byteBuffer = ByteBuffer.wrap(value.bytes, value.offset, value.length); - - double dotProduct = 0; - for (float queryValue : queryVector) { - dotProduct += queryValue * byteBuffer.getFloat(); - } - return (float) dotProduct; - } - - @Override - public double l1Norm(float[] queryVector) { - BytesRef value = bdvSupplier.getInternal(); - ByteBuffer byteBuffer = ByteBuffer.wrap(value.bytes, value.offset, value.length); - - double l1norm = 0; - for (float queryValue : queryVector) { - l1norm += Math.abs(queryValue - byteBuffer.getFloat()); - } - return l1norm; - } - - @Override - public double l2Norm(float[] queryVector) { - BytesRef value = bdvSupplier.getInternal(); - ByteBuffer byteBuffer = ByteBuffer.wrap(value.bytes, value.offset, value.length); - double l2norm = 0; - for (float queryValue : queryVector) { - double diff = queryValue - byteBuffer.getFloat(); - l2norm += diff * diff; - } - return Math.sqrt(l2norm); - } -} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVector.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVector.java new file mode 100644 index 0000000000000..4ffbccbd9e415 --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVector.java @@ -0,0 +1,227 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import java.util.List; + +/** + * DenseVector value type for the painless. + */ +/* dotProduct, l1Norm, l2Norm, cosineSimilarity have three flavors depending on the type of the queryVector + * 1) float[], this is for the ScoreScriptUtils class bindings which have converted a List based query vector into an array + * 2) List, A painless script will typically use Lists since they are easy to pass as params and have an easy + * literal syntax. Working with Lists directly, instead of converting to a float[], trades off runtime operations against + * memory pressure. Dense Vectors may have high dimensionality, up to 2048. Allocating a float[] per doc per script API + * call is prohibitively expensive. + * 3) Object, the whitelisted method for the painless API. Calls into the float[] or List version based on the + class of the argument and checks dimensionality. + */ +public interface DenseVector { + float[] getVector(); + + float getMagnitude(); + + double dotProduct(float[] queryVector); + + double dotProduct(List queryVector); + + @SuppressWarnings("unchecked") + default double dotProduct(Object queryVector) { + if (queryVector instanceof float[] array) { + checkDimensions(getDims(), array.length); + return dotProduct(array); + + } else if (queryVector instanceof List list) { + checkDimensions(getDims(), list.size()); + return dotProduct((List) list); + } + + throw new IllegalArgumentException(badQueryVectorType(queryVector)); + } + + double l1Norm(float[] queryVector); + + double l1Norm(List queryVector); + + @SuppressWarnings("unchecked") + default double l1Norm(Object queryVector) { + if (queryVector instanceof float[] array) { + checkDimensions(getDims(), array.length); + return l1Norm(array); + + } else if (queryVector instanceof List list) { + checkDimensions(getDims(), list.size()); + return l1Norm((List) list); + } + + throw new IllegalArgumentException(badQueryVectorType(queryVector)); + } + + double l2Norm(float[] queryVector); + + double l2Norm(List queryVector); + + @SuppressWarnings("unchecked") + default double l2Norm(Object queryVector) { + if (queryVector instanceof float[] array) { + checkDimensions(getDims(), array.length); + return l2Norm(array); + + } else if (queryVector instanceof List list) { + checkDimensions(getDims(), list.size()); + return l2Norm((List) list); + } + + throw new IllegalArgumentException(badQueryVectorType(queryVector)); + } + + /** + * Get the cosine similarity with the un-normalized query vector + */ + default double cosineSimilarity(float[] queryVector) { + return cosineSimilarity(queryVector, true); + } + + /** + * Get the cosine similarity with the query vector + * @param normalizeQueryVector - normalize the query vector, does not change the contents of passed in query vector + */ + double cosineSimilarity(float[] queryVector, boolean normalizeQueryVector); + + /** + * Get the cosine similarity with the un-normalized query vector + */ + double cosineSimilarity(List queryVector); + + /** + * Get the cosine similarity with the un-normalized query vector. Handles queryVectors of type float[] and List. + */ + @SuppressWarnings("unchecked") + default double cosineSimilarity(Object queryVector) { + if (queryVector instanceof float[] array) { + checkDimensions(getDims(), array.length); + return cosineSimilarity(array); + + } else if (queryVector instanceof List list) { + checkDimensions(getDims(), list.size()); + return cosineSimilarity((List) list); + } + + throw new IllegalArgumentException(badQueryVectorType(queryVector)); + } + + boolean isEmpty(); + + int getDims(); + + int size(); + + static float getMagnitude(float[] vector) { + double mag = 0.0f; + for (float elem : vector) { + mag += elem * elem; + } + return (float) Math.sqrt(mag); + } + + static float getMagnitude(List vector) { + double mag = 0.0f; + for (Number number : vector) { + float elem = number.floatValue(); + mag += elem * elem; + } + return (float) Math.sqrt(mag); + } + + static void checkDimensions(int dvDims, int qvDims) { + if (dvDims != qvDims) { + throw new IllegalArgumentException( + "The query vector has a different number of dimensions [" + qvDims + "] than the document vectors [" + dvDims + "]." + ); + } + } + + private static String badQueryVectorType(Object queryVector) { + return "Cannot use vector [" + queryVector + "] with class [" + queryVector.getClass().getName() + "] as query vector"; + } + + DenseVector EMPTY = new DenseVector() { + public static final String MISSING_VECTOR_FIELD_MESSAGE = "Dense vector value missing for a field," + + " use isEmpty() to check for a missing vector value"; + + @Override + public float getMagnitude() { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double dotProduct(float[] queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double dotProduct(List queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double l1Norm(List queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double l1Norm(float[] queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double l2Norm(List queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double l2Norm(float[] queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double cosineSimilarity(float[] queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double cosineSimilarity(float[] queryVector, boolean normalizeQueryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public double cosineSimilarity(List queryVector) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public float[] getVector() { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public boolean isEmpty() { + return true; + } + + @Override + public int getDims() { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + + @Override + public int size() { + return 0; + } + }; +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorDocValuesField.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorDocValuesField.java new file mode 100644 index 0000000000000..dd4a00fef3af0 --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorDocValuesField.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.script.field.DocValuesField; + +import java.util.Iterator; + +public abstract class DenseVectorDocValuesField implements DocValuesField, DenseVectorScriptDocValues.DenseVectorSupplier { + protected final String name; + + public DenseVectorDocValuesField(String name) { + this.name = name; + } + + @Override + public String getName() { + return name; + } + + @Override + public int size() { + return isEmpty() ? 0 : 1; + } + + @Override + public BytesRef getInternal(int index) { + throw new UnsupportedOperationException(); + } + + /** + * Get the DenseVector for a document if one exists, DenseVector.EMPTY otherwise + */ + public abstract DenseVector get(); + + public abstract DenseVector get(DenseVector defaultValue); + + public abstract DenseVectorScriptDocValues getScriptDocValues(); + + // DenseVector fields are single valued, so Iterable does not make sense. + @Override + public Iterator iterator() { + throw new UnsupportedOperationException("Cannot iterate over single valued dense_vector field, use get() instead"); + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorScriptDocValues.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorScriptDocValues.java index 650ebca1d5ee5..43d04f5ccde7a 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorScriptDocValues.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DenseVectorScriptDocValues.java @@ -10,24 +10,16 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.fielddata.ScriptDocValues; -public abstract class DenseVectorScriptDocValues extends ScriptDocValues { - - public interface DenseVectorSupplier extends Supplier { - - @Override - default BytesRef getInternal(int index) { - throw new UnsupportedOperationException(); - } - - T getInternal(); - } +public class DenseVectorScriptDocValues extends ScriptDocValues { public static final String MISSING_VECTOR_FIELD_MESSAGE = "A document doesn't have a value for a vector field!"; private final int dims; + protected final DenseVectorSupplier dvSupplier; - public DenseVectorScriptDocValues(DenseVectorSupplier supplier, int dims) { + public DenseVectorScriptDocValues(DenseVectorSupplier supplier, int dims) { super(supplier); + this.dvSupplier = supplier; this.dims = dims; } @@ -35,60 +27,58 @@ public int dims() { return dims; } + private DenseVector getCheckedVector() { + DenseVector vector = dvSupplier.getInternal(); + if (vector == null) { + throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); + } + return vector; + } + /** * Get dense vector's value as an array of floats */ - public abstract float[] getVectorValue(); + public float[] getVectorValue() { + return getCheckedVector().getVector(); + } /** * Get dense vector's magnitude */ - public abstract float getMagnitude(); + public float getMagnitude() { + return getCheckedVector().getMagnitude(); + } - public abstract double dotProduct(float[] queryVector); + public double dotProduct(float[] queryVector) { + return getCheckedVector().dotProduct(queryVector); + } - public abstract double l1Norm(float[] queryVector); + public double l1Norm(float[] queryVector) { + return getCheckedVector().l1Norm(queryVector); + } - public abstract double l2Norm(float[] queryVector); + public double l2Norm(float[] queryVector) { + return getCheckedVector().l2Norm(queryVector); + } @Override public BytesRef get(int index) { throw new UnsupportedOperationException( - "accessing a vector field's value through 'get' or 'value' is not supported!" + "Use 'vectorValue' or 'magnitude' instead!'" + "accessing a vector field's value through 'get' or 'value' is not supported, use 'vectorValue' or 'magnitude' instead." ); } - public static DenseVectorScriptDocValues empty(DenseVectorSupplier supplier, int dims) { - return new DenseVectorScriptDocValues(supplier, dims) { - @Override - public float[] getVectorValue() { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public float getMagnitude() { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public double dotProduct(float[] queryVector) { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public double l1Norm(float[] queryVector) { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public double l2Norm(float[] queryVector) { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public int size() { - return supplier.size(); - } - }; + @Override + public int size() { + return dvSupplier.getInternal() == null ? 0 : 1; + } + + public interface DenseVectorSupplier extends Supplier { + @Override + default BytesRef getInternal(int index) { + throw new UnsupportedOperationException(); + } + + DenseVector getInternal(); } } diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DocValuesWhitelistExtension.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DocValuesWhitelistExtension.java index c53d1379dc252..953044c3a5500 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DocValuesWhitelistExtension.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/DocValuesWhitelistExtension.java @@ -19,7 +19,10 @@ public class DocValuesWhitelistExtension implements PainlessExtension { - private static final Whitelist WHITELIST = WhitelistLoader.loadFromResourceFiles(DocValuesWhitelistExtension.class, "whitelist.txt"); + private static final Whitelist WHITELIST = WhitelistLoader.loadFromResourceFiles( + DocValuesWhitelistExtension.class, + "org.elasticsearch.xpack.vectors.txt" + ); @Override public Map, List> getContextWhitelists() { diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVector.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVector.java new file mode 100644 index 0000000000000..1c240892ab2bd --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVector.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.util.VectorUtil; + +import java.util.Arrays; +import java.util.List; + +public class KnnDenseVector implements DenseVector { + protected final float[] docVector; + + public KnnDenseVector(float[] docVector) { + this.docVector = docVector; + } + + @Override + public float[] getVector() { + // we need to copy the value, since {@link VectorValues} can reuse + // the underlying array across documents + return Arrays.copyOf(docVector, docVector.length); + } + + @Override + public float getMagnitude() { + return DenseVector.getMagnitude(docVector); + } + + @Override + public double dotProduct(float[] queryVector) { + return VectorUtil.dotProduct(docVector, queryVector); + } + + @Override + public double dotProduct(List queryVector) { + double dotProduct = 0; + for (int i = 0; i < docVector.length; i++) { + dotProduct += docVector[i] * queryVector.get(i).floatValue(); + } + return dotProduct; + } + + @Override + public double l1Norm(float[] queryVector) { + double result = 0.0; + for (int i = 0; i < docVector.length; i++) { + result += Math.abs(docVector[i] - queryVector[i]); + } + return result; + } + + @Override + public double l1Norm(List queryVector) { + double result = 0.0; + for (int i = 0; i < docVector.length; i++) { + result += Math.abs(docVector[i] - queryVector.get(i).floatValue()); + } + return result; + } + + @Override + public double l2Norm(float[] queryVector) { + return Math.sqrt(VectorUtil.squareDistance(docVector, queryVector)); + } + + @Override + public double l2Norm(List queryVector) { + double l2norm = 0; + for (int i = 0; i < docVector.length; i++) { + double diff = docVector[i] - queryVector.get(i).floatValue(); + l2norm += diff * diff; + } + return Math.sqrt(l2norm); + } + + @Override + public double cosineSimilarity(float[] queryVector, boolean normalizeQueryVector) { + if (normalizeQueryVector) { + return dotProduct(queryVector) / (DenseVector.getMagnitude(queryVector) * getMagnitude()); + } + + return dotProduct(queryVector) / getMagnitude(); + } + + @Override + public double cosineSimilarity(List queryVector) { + return dotProduct(queryVector) / (DenseVector.getMagnitude(queryVector) * getMagnitude()); + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public int getDims() { + return docVector.length; + } + + @Override + public int size() { + return 1; + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorDocValuesField.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorDocValuesField.java new file mode 100644 index 0000000000000..58b2e60a0fb80 --- /dev/null +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorDocValuesField.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.index.VectorValues; +import org.elasticsearch.core.Nullable; + +import java.io.IOException; + +import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; + +public class KnnDenseVectorDocValuesField extends DenseVectorDocValuesField { + protected VectorValues input; // null if no vectors + protected float[] vector; + protected final int dims; + + public KnnDenseVectorDocValuesField(@Nullable VectorValues input, String name, int dims) { + super(name); + this.dims = dims; + this.input = input; + } + + @Override + public void setNextDocId(int docId) throws IOException { + if (input == null) { + return; + } + int currentDoc = input.docID(); + if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { + vector = null; + } else if (docId == currentDoc) { + vector = input.vectorValue(); + } else { + currentDoc = input.advance(docId); + if (currentDoc == docId) { + vector = input.vectorValue(); + } else { + vector = null; + } + } + } + + @Override + public DenseVectorScriptDocValues getScriptDocValues() { + return new DenseVectorScriptDocValues(this, dims); + } + + public boolean isEmpty() { + return vector == null; + } + + @Override + public DenseVector get() { + if (isEmpty()) { + return DenseVector.EMPTY; + } + + return new KnnDenseVector(vector); + } + + @Override + public DenseVector get(DenseVector defaultValue) { + if (isEmpty()) { + return defaultValue; + } + + return new KnnDenseVector(vector); + } + + @Override + public DenseVector getInternal() { + return get(null); + } +} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValues.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValues.java deleted file mode 100644 index fc6f1bdb59906..0000000000000 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValues.java +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.vectors.query; - -import org.apache.lucene.index.VectorValues; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.VectorUtil; - -import java.io.IOException; -import java.util.Arrays; - -import static org.apache.lucene.search.DocIdSetIterator.NO_MORE_DOCS; - -public class KnnDenseVectorScriptDocValues extends DenseVectorScriptDocValues { - - public static class KnnDenseVectorSupplier implements DenseVectorSupplier { - - private final VectorValues in; - private float[] vector; - - public KnnDenseVectorSupplier(VectorValues in) { - this.in = in; - } - - @Override - public void setNextDocId(int docId) throws IOException { - int currentDoc = in.docID(); - if (currentDoc == NO_MORE_DOCS || docId < currentDoc) { - vector = null; - } else if (docId == currentDoc) { - vector = in.vectorValue(); - } else { - currentDoc = in.advance(docId); - if (currentDoc == docId) { - vector = in.vectorValue(); - } else { - vector = null; - } - } - } - - @Override - public BytesRef getInternal(int index) { - throw new UnsupportedOperationException(); - } - - public float[] getInternal() { - return vector; - } - - @Override - public int size() { - if (vector == null) { - return 0; - } else { - return 1; - } - } - } - - private final KnnDenseVectorSupplier kdvSupplier; - - KnnDenseVectorScriptDocValues(KnnDenseVectorSupplier supplier, int dims) { - super(supplier, dims); - this.kdvSupplier = supplier; - } - - private float[] getVectorChecked() { - if (kdvSupplier.getInternal() == null) { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - return kdvSupplier.getInternal(); - } - - @Override - public float[] getVectorValue() { - float[] vector = getVectorChecked(); - // we need to copy the value, since {@link VectorValues} can reuse - // the underlying array across documents - return Arrays.copyOf(vector, vector.length); - } - - @Override - public float getMagnitude() { - float[] vector = getVectorChecked(); - double magnitude = 0.0f; - for (float elem : vector) { - magnitude += elem * elem; - } - return (float) Math.sqrt(magnitude); - } - - @Override - public double dotProduct(float[] queryVector) { - return VectorUtil.dotProduct(getVectorChecked(), queryVector); - } - - @Override - public double l1Norm(float[] queryVector) { - float[] vectorValue = getVectorChecked(); - double result = 0.0; - for (int i = 0; i < queryVector.length; i++) { - result += Math.abs(vectorValue[i] - queryVector[i]); - } - return result; - } - - @Override - public double l2Norm(float[] queryVector) { - return Math.sqrt(VectorUtil.squareDistance(getVectorValue(), queryVector)); - } - - @Override - public int size() { - return supplier.size(); - } -} diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/ScoreScriptUtils.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/ScoreScriptUtils.java index e97daf4c2f397..24e74e4a93958 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/ScoreScriptUtils.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/ScoreScriptUtils.java @@ -18,10 +18,10 @@ public class ScoreScriptUtils { public static class DenseVectorFunction { final ScoreScript scoreScript; final float[] queryVector; - final DenseVectorScriptDocValues docValues; + final DenseVectorDocValuesField field; - public DenseVectorFunction(ScoreScript scoreScript, List queryVector, String field) { - this(scoreScript, queryVector, field, false); + public DenseVectorFunction(ScoreScript scoreScript, List queryVector, String fieldName) { + this(scoreScript, queryVector, fieldName, false); } /** @@ -31,19 +31,10 @@ public DenseVectorFunction(ScoreScript scoreScript, List queryVector, St * @param queryVector The query vector. * @param normalizeQuery Whether the provided query should be normalized to unit length. */ - public DenseVectorFunction(ScoreScript scoreScript, List queryVector, String field, boolean normalizeQuery) { + public DenseVectorFunction(ScoreScript scoreScript, List queryVector, String fieldName, boolean normalizeQuery) { this.scoreScript = scoreScript; - this.docValues = (DenseVectorScriptDocValues) scoreScript.getDoc().get(field); - - if (docValues.dims() != queryVector.size()) { - throw new IllegalArgumentException( - "The query vector has a different number of dimensions [" - + queryVector.size() - + "] than the document vectors [" - + docValues.dims() - + "]." - ); - } + this.field = (DenseVectorDocValuesField) scoreScript.field(fieldName); + DenseVector.checkDimensions(field.get().getDims(), queryVector.size()); this.queryVector = new float[queryVector.size()]; double queryMagnitude = 0.0; @@ -63,11 +54,11 @@ public DenseVectorFunction(ScoreScript scoreScript, List queryVector, St void setNextVector() { try { - docValues.getSupplier().setNextDocId(scoreScript._getDocId()); + field.setNextDocId(scoreScript._getDocId()); } catch (IOException e) { throw ExceptionsHelper.convertToElastic(e); } - if (docValues.size() == 0) { + if (field.isEmpty()) { throw new IllegalArgumentException("A document doesn't have a value for a vector field!"); } } @@ -82,7 +73,7 @@ public L1Norm(ScoreScript scoreScript, List queryVector, String field) { public double l1norm() { setNextVector(); - return docValues.l1Norm(queryVector); + return field.get().l1Norm(queryVector); } } @@ -95,7 +86,7 @@ public L2Norm(ScoreScript scoreScript, List queryVector, String field) { public double l2norm() { setNextVector(); - return docValues.l2Norm(queryVector); + return field.get().l2Norm(queryVector); } } @@ -108,7 +99,7 @@ public DotProduct(ScoreScript scoreScript, List queryVector, String fiel public double dotProduct() { setNextVector(); - return docValues.dotProduct(queryVector); + return field.get().dotProduct(queryVector); } } @@ -121,7 +112,8 @@ public CosineSimilarity(ScoreScript scoreScript, List queryVector, Strin public double cosineSimilarity() { setNextVector(); - return docValues.dotProduct(queryVector) / docValues.getMagnitude(); + // query vector normalized in constructor + return field.get().cosineSimilarity(queryVector, false); } } } diff --git a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVLeafFieldData.java b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVLeafFieldData.java index 1d8c45e9c60c2..a4789543ded43 100644 --- a/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVLeafFieldData.java +++ b/x-pack/plugin/vectors/src/main/java/org/elasticsearch/xpack/vectors/query/VectorDVLeafFieldData.java @@ -15,18 +15,12 @@ import org.elasticsearch.Version; import org.elasticsearch.index.fielddata.LeafFieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.script.field.DelegateDocValuesField; import org.elasticsearch.script.field.DocValuesField; -import org.elasticsearch.xpack.vectors.query.BinaryDenseVectorScriptDocValues.BinaryDenseVectorSupplier; -import org.elasticsearch.xpack.vectors.query.DenseVectorScriptDocValues.DenseVectorSupplier; -import org.elasticsearch.xpack.vectors.query.KnnDenseVectorScriptDocValues.KnnDenseVectorSupplier; import java.io.IOException; import java.util.Collection; import java.util.Collections; -import static org.elasticsearch.xpack.vectors.query.DenseVectorScriptDocValues.MISSING_VECTOR_FIELD_MESSAGE; - final class VectorDVLeafFieldData implements LeafFieldData { private final LeafReader reader; @@ -63,31 +57,15 @@ public DocValuesField getScriptField(String name) { try { if (indexed) { VectorValues values = reader.getVectorValues(field); - if (values == null || values == VectorValues.EMPTY) { - return new DelegateDocValuesField(DenseVectorScriptDocValues.empty(new DenseVectorSupplier() { - @Override - public float[] getInternal() { - throw new IllegalArgumentException(MISSING_VECTOR_FIELD_MESSAGE); - } - - @Override - public void setNextDocId(int docId) throws IOException { - // do nothing - } - - @Override - public int size() { - return 0; - } - }, dims), name); + if (values == VectorValues.EMPTY) { + // There's no way for KnnDenseVectorDocValuesField to reliably differentiate between VectorValues.EMPTY and + // values that can be iterated through. Since VectorValues.EMPTY throws on docID(), pass a null instead. + values = null; } - return new DelegateDocValuesField(new KnnDenseVectorScriptDocValues(new KnnDenseVectorSupplier(values), dims), name); + return new KnnDenseVectorDocValuesField(values, name, dims); } else { BinaryDocValues values = DocValues.getBinary(reader, field); - return new DelegateDocValuesField( - new BinaryDenseVectorScriptDocValues(new BinaryDenseVectorSupplier(values), indexVersion, dims), - name - ); + return new BinaryDenseVectorDocValuesField(values, name, dims, indexVersion); } } catch (IOException e) { throw new IllegalStateException("Cannot load doc values for vector field!", e); diff --git a/x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/whitelist.txt b/x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/org.elasticsearch.xpack.vectors.txt similarity index 52% rename from x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/whitelist.txt rename to x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/org.elasticsearch.xpack.vectors.txt index 86583d77264a2..bcf989933b04e 100644 --- a/x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/whitelist.txt +++ b/x-pack/plugin/vectors/src/main/resources/org/elasticsearch/xpack/vectors/query/org.elasticsearch.xpack.vectors.txt @@ -11,6 +11,43 @@ class org.elasticsearch.xpack.vectors.query.DenseVectorScriptDocValues { class org.elasticsearch.script.ScoreScript @no_import { } +class org.elasticsearch.xpack.vectors.query.DenseVector { + DenseVector EMPTY + float getMagnitude() + + # handle List and float[] arguments + double dotProduct(Object) + double l1Norm(Object) + double l2Norm(Object) + double cosineSimilarity(Object) + + float[] getVector() + boolean isEmpty() + int getDims() + int size() +} + +# implementation of DenseVector +class org.elasticsearch.xpack.vectors.query.BinaryDenseVector { +} + +# implementation of DenseVector +class org.elasticsearch.xpack.vectors.query.KnnDenseVector { +} + +class org.elasticsearch.xpack.vectors.query.DenseVectorDocValuesField { + DenseVector get() + DenseVector get(DenseVector) +} + +# implementation of DenseVectorDocValuesField +class org.elasticsearch.xpack.vectors.query.KnnDenseVectorDocValuesField { +} + +# implementation of DenseVectorDocValuesField +class org.elasticsearch.xpack.vectors.query.BinaryDenseVectorDocValuesField { +} + static_import { double l1norm(org.elasticsearch.script.ScoreScript, List, String) bound_to org.elasticsearch.xpack.vectors.query.ScoreScriptUtils$L1Norm double l2norm(org.elasticsearch.script.ScoreScript, List, String) bound_to org.elasticsearch.xpack.vectors.query.ScoreScriptUtils$L2Norm diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValuesTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValuesTests.java index 2761364e51505..ddd96ba9fd0a7 100644 --- a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValuesTests.java +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/BinaryDenseVectorScriptDocValuesTests.java @@ -12,7 +12,6 @@ import org.elasticsearch.Version; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.vectors.mapper.VectorEncoderDecoder; -import org.elasticsearch.xpack.vectors.query.BinaryDenseVectorScriptDocValues.BinaryDenseVectorSupplier; import java.io.IOException; import java.nio.ByteBuffer; @@ -29,24 +28,56 @@ public void testGetVectorValueAndGetMagnitude() throws IOException { for (Version indexVersion : Arrays.asList(Version.V_7_4_0, Version.CURRENT)) { BinaryDocValues docValues = wrap(vectors, indexVersion); - BinaryDenseVectorSupplier supplier = new BinaryDenseVectorSupplier(docValues); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues(supplier, indexVersion, dims); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, indexVersion); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); for (int i = 0; i < vectors.length; i++) { - supplier.setNextDocId(i); + field.setNextDocId(i); + assertEquals(1, field.size()); + assertEquals(dims, scriptDocValues.dims()); assertArrayEquals(vectors[i], scriptDocValues.getVectorValue(), 0.0001f); assertEquals(expectedMagnitudes[i], scriptDocValues.getMagnitude(), 0.0001f); } } } + public void testMetadataAndIterator() throws IOException { + int dims = 3; + Version indexVersion = Version.CURRENT; + float[][] vectors = fill(new float[randomIntBetween(1, 5)][dims]); + BinaryDocValues docValues = wrap(vectors, indexVersion); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, indexVersion); + for (int i = 0; i < vectors.length; i++) { + field.setNextDocId(i); + DenseVector dv = field.get(); + assertEquals(1, dv.size()); + assertFalse(dv.isEmpty()); + assertEquals(dims, dv.getDims()); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, field::iterator); + assertEquals("Cannot iterate over single valued dense_vector field, use get() instead", e.getMessage()); + } + field.setNextDocId(vectors.length); + DenseVector dv = field.get(); + assertEquals(dv, DenseVector.EMPTY); + } + + protected float[][] fill(float[][] vectors) { + for (float[] vector : vectors) { + for (int i = 0; i < vector.length; i++) { + vector[i] = randomFloat(); + } + } + return vectors; + } + public void testMissingValues() throws IOException { int dims = 3; float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; BinaryDocValues docValues = wrap(vectors, Version.CURRENT); - BinaryDenseVectorSupplier supplier = new BinaryDenseVectorSupplier(docValues); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues(supplier, Version.CURRENT, dims); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, Version.CURRENT); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(3); + field.setNextDocId(3); + assertEquals(0, field.size()); Exception e = expectThrows(IllegalArgumentException.class, scriptDocValues::getVectorValue); assertEquals("A document doesn't have a value for a vector field!", e.getMessage()); @@ -58,12 +89,17 @@ public void testGetFunctionIsNotAccessible() throws IOException { int dims = 3; float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; BinaryDocValues docValues = wrap(vectors, Version.CURRENT); - BinaryDenseVectorSupplier supplier = new BinaryDenseVectorSupplier(docValues); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues(supplier, Version.CURRENT, dims); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, Version.CURRENT); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(0); + field.setNextDocId(0); Exception e = expectThrows(UnsupportedOperationException.class, () -> scriptDocValues.get(0)); - assertThat(e.getMessage(), containsString("accessing a vector field's value through 'get' or 'value' is not supported!")); + assertThat( + e.getMessage(), + containsString( + "accessing a vector field's value through 'get' or 'value' is not supported, use 'vectorValue' or 'magnitude' instead." + ) + ); } public void testSimilarityFunctions() throws IOException { @@ -73,10 +109,10 @@ public void testSimilarityFunctions() throws IOException { for (Version indexVersion : Arrays.asList(Version.V_7_4_0, Version.CURRENT)) { BinaryDocValues docValues = wrap(new float[][] { docVector }, indexVersion); - BinaryDenseVectorSupplier supplier = new BinaryDenseVectorSupplier(docValues); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues(supplier, Version.CURRENT, dims); + BinaryDenseVectorDocValuesField field = new BinaryDenseVectorDocValuesField(docValues, "test", dims, indexVersion); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(0); + field.setNextDocId(0); assertEquals( "dotProduct result is not equal to the expected value!", @@ -133,7 +169,7 @@ public long cost() { }; } - private static BytesRef mockEncodeDenseVector(float[] values, Version indexVersion) { + static BytesRef mockEncodeDenseVector(float[] values, Version indexVersion) { byte[] bytes = indexVersion.onOrAfter(Version.V_7_5_0) ? new byte[VectorEncoderDecoder.INT_BYTES * values.length + VectorEncoderDecoder.INT_BYTES] : new byte[VectorEncoderDecoder.INT_BYTES * values.length]; diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorFunctionTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorFunctionTests.java index 0ecd26f08c20c..d40d7e3abd663 100644 --- a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorFunctionTests.java +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorFunctionTests.java @@ -7,18 +7,16 @@ package org.elasticsearch.xpack.vectors.query; -import org.apache.lucene.index.BinaryDocValues; import org.elasticsearch.Version; import org.elasticsearch.script.ScoreScript; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.vectors.query.BinaryDenseVectorScriptDocValues.BinaryDenseVectorSupplier; import org.elasticsearch.xpack.vectors.query.ScoreScriptUtils.CosineSimilarity; import org.elasticsearch.xpack.vectors.query.ScoreScriptUtils.DotProduct; import org.elasticsearch.xpack.vectors.query.ScoreScriptUtils.L1Norm; import org.elasticsearch.xpack.vectors.query.ScoreScriptUtils.L2Norm; +import java.io.IOException; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.function.Supplier; @@ -28,34 +26,72 @@ public class DenseVectorFunctionTests extends ESTestCase { - public void testVectorFunctions() { - String field = "vector"; + public void testVectorClassBindings() throws IOException { + String fieldName = "vector"; int dims = 5; float[] docVector = new float[] { 230.0f, 300.33f, -34.8988f, 15.555f, -200.0f }; List queryVector = Arrays.asList(0.5f, 111.3f, -13.0f, 14.8f, -156.0f); List invalidQueryVector = Arrays.asList(0.5, 111.3); - for (Version indexVersion : Arrays.asList(Version.V_7_4_0, Version.CURRENT)) { - BinaryDocValues docValues = BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, indexVersion); - DenseVectorScriptDocValues scriptDocValues = new BinaryDenseVectorScriptDocValues( - new BinaryDenseVectorSupplier(docValues), - indexVersion, - dims - ); + List fields = List.of( + new BinaryDenseVectorDocValuesField( + BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, Version.V_7_4_0), + "test", + dims, + Version.V_7_4_0 + ), + new BinaryDenseVectorDocValuesField( + BinaryDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }, Version.CURRENT), + "test", + dims, + Version.CURRENT + ), + new KnnDenseVectorDocValuesField(KnnDenseVectorScriptDocValuesTests.wrap(new float[][] { docVector }), "test", dims) + ); + for (DenseVectorDocValuesField field : fields) { + field.setNextDocId(0); ScoreScript scoreScript = mock(ScoreScript.class); - when(scoreScript.getDoc()).thenReturn(Collections.singletonMap(field, scriptDocValues)); + when(scoreScript.field("vector")).thenAnswer(mock -> field); // Test cosine similarity explicitly, as it must perform special logic on top of the doc values - CosineSimilarity function = new CosineSimilarity(scoreScript, queryVector, field); - assertEquals("cosineSimilarity result is not equal to the expected value!", 0.790, function.cosineSimilarity(), 0.001); + CosineSimilarity function = new CosineSimilarity(scoreScript, queryVector, fieldName); + float cosineSimilarityExpected = 0.790f; + assertEquals( + "cosineSimilarity result is not equal to the expected value!", + cosineSimilarityExpected, + function.cosineSimilarity(), + 0.001 + ); + + // Test normalization for cosineSimilarity + float[] queryVectorArray = new float[queryVector.size()]; + for (int i = 0; i < queryVectorArray.length; i++) { + queryVectorArray[i] = queryVector.get(i).floatValue(); + } + assertEquals( + "cosineSimilarity result is not equal to the expected value!", + cosineSimilarityExpected, + field.getInternal().cosineSimilarity(queryVectorArray, true), + 0.001 + ); // Check each function rejects query vectors with the wrong dimension - assertDimensionMismatch(() -> new DotProduct(scoreScript, invalidQueryVector, field)); - assertDimensionMismatch(() -> new CosineSimilarity(scoreScript, invalidQueryVector, field)); - assertDimensionMismatch(() -> new L1Norm(scoreScript, invalidQueryVector, field)); - assertDimensionMismatch(() -> new L2Norm(scoreScript, invalidQueryVector, field)); + assertDimensionMismatch(() -> new DotProduct(scoreScript, invalidQueryVector, fieldName)); + assertDimensionMismatch(() -> new CosineSimilarity(scoreScript, invalidQueryVector, fieldName)); + assertDimensionMismatch(() -> new L1Norm(scoreScript, invalidQueryVector, fieldName)); + assertDimensionMismatch(() -> new L2Norm(scoreScript, invalidQueryVector, fieldName)); + + // Check scripting infrastructure integration + DotProduct dotProduct = new DotProduct(scoreScript, queryVector, fieldName); + assertEquals(65425.6249, dotProduct.dotProduct(), 0.001); + assertEquals(485.1837, new L1Norm(scoreScript, queryVector, fieldName).l1norm(), 0.001); + assertEquals(301.3614, new L2Norm(scoreScript, queryVector, fieldName).l2norm(), 0.001); + when(scoreScript._getDocId()).thenReturn(1); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, dotProduct::dotProduct); + assertEquals("A document doesn't have a value for a vector field!", e.getMessage()); } + } private void assertDimensionMismatch(Supplier supplier) { diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorTests.java new file mode 100644 index 0000000000000..11078e4964920 --- /dev/null +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/DenseVectorTests.java @@ -0,0 +1,84 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.vectors.query; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; +import org.elasticsearch.test.ESTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; + +public class DenseVectorTests extends ESTestCase { + public void testBadVectorType() { + DenseVector knn = new KnnDenseVector(new float[] { 1.0f, 2.0f, 3.5f }); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> knn.dotProduct(new HashMap<>())); + assertThat(e.getMessage(), containsString("Cannot use vector [")); + assertThat(e.getMessage(), containsString("] with class [java.util.HashMap] as query vector")); + + e = expectThrows(IllegalArgumentException.class, () -> knn.l1Norm(new HashMap<>())); + assertThat(e.getMessage(), containsString("Cannot use vector [")); + assertThat(e.getMessage(), containsString("] with class [java.util.HashMap] as query vector")); + + e = expectThrows(IllegalArgumentException.class, () -> knn.l2Norm(new HashMap<>())); + assertThat(e.getMessage(), containsString("Cannot use vector [")); + assertThat(e.getMessage(), containsString("] with class [java.util.HashMap] as query vector")); + + e = expectThrows(IllegalArgumentException.class, () -> knn.cosineSimilarity(new HashMap<>())); + assertThat(e.getMessage(), containsString("Cannot use vector [")); + assertThat(e.getMessage(), containsString("] with class [java.util.HashMap] as query vector")); + } + + public void testFloatVsListQueryVector() { + int dims = randomIntBetween(1, 16); + float[] docVector = new float[dims]; + float[] arrayQV = new float[dims]; + List listQV = new ArrayList<>(dims); + for (int i = 0; i < docVector.length; i++) { + docVector[i] = randomFloat(); + float q = randomFloat(); + arrayQV[i] = q; + listQV.add(q); + } + + KnnDenseVector knn = new KnnDenseVector(docVector); + assertEquals(knn.dotProduct(arrayQV), knn.dotProduct(listQV), 0.001f); + assertEquals(knn.dotProduct((Object) listQV), knn.dotProduct((Object) arrayQV), 0.001f); + + assertEquals(knn.l1Norm(arrayQV), knn.l1Norm(listQV), 0.001f); + assertEquals(knn.l1Norm((Object) listQV), knn.l1Norm((Object) arrayQV), 0.001f); + + assertEquals(knn.l2Norm(arrayQV), knn.l2Norm(listQV), 0.001f); + assertEquals(knn.l2Norm((Object) listQV), knn.l2Norm((Object) arrayQV), 0.001f); + + assertEquals(knn.cosineSimilarity(arrayQV), knn.cosineSimilarity(listQV), 0.001f); + assertEquals(knn.cosineSimilarity((Object) listQV), knn.cosineSimilarity((Object) arrayQV), 0.001f); + + for (Version indexVersion : Arrays.asList(Version.V_7_4_0, Version.CURRENT)) { + BytesRef value = BinaryDenseVectorScriptDocValuesTests.mockEncodeDenseVector(docVector, indexVersion); + BinaryDenseVector bdv = new BinaryDenseVector(value, dims, indexVersion); + + assertEquals(bdv.dotProduct(arrayQV), bdv.dotProduct(listQV), 0.001f); + assertEquals(bdv.dotProduct((Object) listQV), bdv.dotProduct((Object) arrayQV), 0.001f); + + assertEquals(bdv.l1Norm(arrayQV), bdv.l1Norm(listQV), 0.001f); + assertEquals(bdv.l1Norm((Object) listQV), bdv.l1Norm((Object) arrayQV), 0.001f); + + assertEquals(bdv.l2Norm(arrayQV), bdv.l2Norm(listQV), 0.001f); + assertEquals(bdv.l2Norm((Object) listQV), bdv.l2Norm((Object) arrayQV), 0.001f); + + assertEquals(bdv.cosineSimilarity(arrayQV), bdv.cosineSimilarity(listQV), 0.001f); + assertEquals(bdv.cosineSimilarity((Object) listQV), bdv.cosineSimilarity((Object) arrayQV), 0.001f); + } + } + +} diff --git a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValuesTests.java b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValuesTests.java index 7005e4d7bd531..743fc2d8bb63e 100644 --- a/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValuesTests.java +++ b/x-pack/plugin/vectors/src/test/java/org/elasticsearch/xpack/vectors/query/KnnDenseVectorScriptDocValuesTests.java @@ -10,7 +10,6 @@ import org.apache.lucene.index.VectorValues; import org.apache.lucene.util.BytesRef; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.vectors.query.KnnDenseVectorScriptDocValues.KnnDenseVectorSupplier; import java.io.IOException; @@ -23,22 +22,52 @@ public void testGetVectorValueAndGetMagnitude() throws IOException { float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; float[] expectedMagnitudes = { 1.7320f, 2.4495f, 3.3166f }; - KnnDenseVectorSupplier supplier = new KnnDenseVectorSupplier(wrap(vectors)); - DenseVectorScriptDocValues scriptDocValues = new KnnDenseVectorScriptDocValues(supplier, dims); + DenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(vectors), "test", dims); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); for (int i = 0; i < vectors.length; i++) { - supplier.setNextDocId(i); + field.setNextDocId(i); + assertEquals(1, field.size()); + assertEquals(dims, scriptDocValues.dims()); assertArrayEquals(vectors[i], scriptDocValues.getVectorValue(), 0.0001f); assertEquals(expectedMagnitudes[i], scriptDocValues.getMagnitude(), 0.0001f); } } + public void testMetadataAndIterator() throws IOException { + int dims = 3; + float[][] vectors = fill(new float[randomIntBetween(1, 5)][dims]); + KnnDenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(vectors), "test", dims); + for (int i = 0; i < vectors.length; i++) { + field.setNextDocId(i); + DenseVector dv = field.get(); + assertEquals(1, dv.size()); + assertFalse(dv.isEmpty()); + assertEquals(dims, dv.getDims()); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, field::iterator); + assertEquals("Cannot iterate over single valued dense_vector field, use get() instead", e.getMessage()); + } + assertEquals(1, field.size()); + field.setNextDocId(vectors.length); + DenseVector dv = field.get(); + assertEquals(dv, DenseVector.EMPTY); + } + + protected float[][] fill(float[][] vectors) { + for (float[] vector : vectors) { + for (int i = 0; i < vector.length; i++) { + vector[i] = randomFloat(); + } + } + return vectors; + } + public void testMissingValues() throws IOException { int dims = 3; float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; - KnnDenseVectorSupplier supplier = new KnnDenseVectorSupplier(wrap(vectors)); - DenseVectorScriptDocValues scriptDocValues = new KnnDenseVectorScriptDocValues(supplier, dims); + DenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(vectors), "test", dims); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(3); + field.setNextDocId(3); Exception e = expectThrows(IllegalArgumentException.class, () -> scriptDocValues.getVectorValue()); assertEquals("A document doesn't have a value for a vector field!", e.getMessage()); @@ -49,12 +78,17 @@ public void testMissingValues() throws IOException { public void testGetFunctionIsNotAccessible() throws IOException { int dims = 3; float[][] vectors = { { 1, 1, 1 }, { 1, 1, 2 }, { 1, 1, 3 } }; - KnnDenseVectorSupplier supplier = new KnnDenseVectorSupplier(wrap(vectors)); - DenseVectorScriptDocValues scriptDocValues = new KnnDenseVectorScriptDocValues(supplier, dims); + DenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(vectors), "test", dims); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); - supplier.setNextDocId(0); + field.setNextDocId(0); Exception e = expectThrows(UnsupportedOperationException.class, () -> scriptDocValues.get(0)); - assertThat(e.getMessage(), containsString("accessing a vector field's value through 'get' or 'value' is not supported!")); + assertThat( + e.getMessage(), + containsString( + "accessing a vector field's value through 'get' or 'value' is not supported, use 'vectorValue' or 'magnitude' instead." + ) + ); } public void testSimilarityFunctions() throws IOException { @@ -62,16 +96,30 @@ public void testSimilarityFunctions() throws IOException { float[] docVector = new float[] { 230.0f, 300.33f, -34.8988f, 15.555f, -200.0f }; float[] queryVector = new float[] { 0.5f, 111.3f, -13.0f, 14.8f, -156.0f }; - KnnDenseVectorSupplier supplier = new KnnDenseVectorSupplier(wrap(new float[][] { docVector })); - DenseVectorScriptDocValues scriptDocValues = new KnnDenseVectorScriptDocValues(supplier, dims); - supplier.setNextDocId(0); + DenseVectorDocValuesField field = new KnnDenseVectorDocValuesField(wrap(new float[][] { docVector }), "test", dims); + DenseVectorScriptDocValues scriptDocValues = field.getScriptDocValues(); + field.setNextDocId(0); assertEquals("dotProduct result is not equal to the expected value!", 65425.624, scriptDocValues.dotProduct(queryVector), 0.001); assertEquals("l1norm result is not equal to the expected value!", 485.184, scriptDocValues.l1Norm(queryVector), 0.001); assertEquals("l2norm result is not equal to the expected value!", 301.361, scriptDocValues.l2Norm(queryVector), 0.001); } - private static VectorValues wrap(float[][] vectors) { + public void testMissingVectorValues() throws IOException { + int dims = 7; + KnnDenseVectorDocValuesField emptyKnn = new KnnDenseVectorDocValuesField(null, "test", dims); + + emptyKnn.setNextDocId(0); + assertEquals(0, emptyKnn.getScriptDocValues().size()); + assertTrue(emptyKnn.getScriptDocValues().isEmpty()); + assertEquals(DenseVector.EMPTY, emptyKnn.get()); + assertNull(emptyKnn.get(null)); + assertNull(emptyKnn.getInternal()); + UnsupportedOperationException e = expectThrows(UnsupportedOperationException.class, emptyKnn::iterator); + assertEquals("Cannot iterate over single valued dense_vector field, use get() instead", e.getMessage()); + } + + static VectorValues wrap(float[][] vectors) { return new VectorValues() { int index = 0; From 01eba38ee397fdde48c124f74b9157289beebb18 Mon Sep 17 00:00:00 2001 From: Przemko Robakowski Date: Wed, 16 Feb 2022 14:37:45 +0100 Subject: [PATCH 36/37] Fix GeoIpDownloader startup during rolling upgrade (#84000) If rolling upgrade was used from version prior GeoIPv2 (<`7.14`) then geoip downloader wouldn't be started so no new databases were downloaded. This is especially troubling in `8.x` as we no longer provide default databases inside ES so after upgrade no geoip enrichment can take place until downloader is started with workaround (setting `ingest.geoip.downloader.enabled` to `false` and `true` again). This is because logic that was used to lower number of requests / cluster update listeners at the startup was too optimistic about order of actions / who can be elected master at what time. This change fixes that and also cleans up logs when there are some ignorable errors and adds debug logging on start and stop of the task to ease up troubleshooting. It also adds rolling upgrade test to make sure the fix works. --- docs/changelog/84000.yaml | 5 +++ .../ingest/geoip/GeoIpDownloader.java | 9 +++- .../geoip/GeoIpDownloaderTaskExecutor.java | 43 ++++++++++++------- x-pack/qa/rolling-upgrade/build.gradle | 7 +++ .../upgrades/GeoIpUpgradeIT.java | 33 ++++++++++++++ 5 files changed, 81 insertions(+), 16 deletions(-) create mode 100644 docs/changelog/84000.yaml create mode 100644 x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/GeoIpUpgradeIT.java diff --git a/docs/changelog/84000.yaml b/docs/changelog/84000.yaml new file mode 100644 index 0000000000000..b24d357834e21 --- /dev/null +++ b/docs/changelog/84000.yaml @@ -0,0 +1,5 @@ +pr: 84000 +summary: Fix `GeoIpDownloader` startup during rolling upgrade +area: Ingest +type: bug +issues: [] diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java index 4d1c594ab7b7c..5ec08891981f6 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloader.java @@ -66,9 +66,15 @@ public class GeoIpDownloader extends AllocatedPersistentTask { Property.Dynamic, Property.NodeScope ); + + // for overriding in tests + private static final String DEFAULT_ENDPOINT = System.getProperty( + "ingest.geoip.downloader.endpoint.default", + "https://geoip.elastic.co/v1/database" + ); public static final Setting ENDPOINT_SETTING = Setting.simpleString( "ingest.geoip.downloader.endpoint", - "https://geoip.elastic.co/v1/database", + DEFAULT_ENDPOINT, Property.NodeScope ); @@ -258,6 +264,7 @@ void runDownloader() { try { updateDatabases(); } catch (Exception e) { + stats = stats.failedDownload(); logger.error("exception during geoip databases update", e); } try { diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java index 16cb86953003e..9d65b17bacc5e 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/GeoIpDownloaderTaskExecutor.java @@ -12,11 +12,13 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -29,6 +31,7 @@ import org.elasticsearch.persistent.PersistentTasksService; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.RemoteTransportException; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; @@ -128,14 +131,18 @@ public void clusterChanged(ClusterChangedEvent event) { // wait for state recovered return; } - // bootstrap downloader after first cluster start + + DiscoveryNode masterNode = event.state().nodes().getMasterNode(); + if (masterNode == null || masterNode.getVersion().before(Version.V_7_14_0)) { + // wait for master to be upgraded so it understands geoip task + return; + } + clusterService.removeListener(this); - if (event.localNodeMaster()) { - if (ENABLED_SETTING.get(event.state().getMetadata().settings(), settings)) { - startTask(() -> clusterService.addListener(this)); - } else { - stopTask(() -> clusterService.addListener(this)); - } + if (ENABLED_SETTING.get(event.state().getMetadata().settings(), settings)) { + startTask(() -> clusterService.addListener(this)); + } else { + stopTask(() -> clusterService.addListener(this)); } } @@ -144,8 +151,9 @@ private void startTask(Runnable onFailure) { GEOIP_DOWNLOADER, GEOIP_DOWNLOADER, new GeoIpTaskParams(), - ActionListener.wrap(r -> {}, e -> { - if (e instanceof ResourceAlreadyExistsException == false) { + ActionListener.wrap(r -> logger.debug("Started geoip downloader task"), e -> { + Throwable t = e instanceof RemoteTransportException ? e.getCause() : e; + if (t instanceof ResourceAlreadyExistsException == false) { logger.error("failed to create geoip downloader task", e); onFailure.run(); } @@ -154,18 +162,23 @@ private void startTask(Runnable onFailure) { } private void stopTask(Runnable onFailure) { - ActionListener> listener = ActionListener.wrap(r -> {}, e -> { - if (e instanceof ResourceNotFoundException == false) { - logger.error("failed to remove geoip downloader task", e); - onFailure.run(); + ActionListener> listener = ActionListener.wrap( + r -> logger.debug("Stopped geoip downloader task"), + e -> { + Throwable t = e instanceof RemoteTransportException ? e.getCause() : e; + if (t instanceof ResourceNotFoundException == false) { + logger.error("failed to remove geoip downloader task", e); + onFailure.run(); + } } - }); + ); persistentTasksService.sendRemoveRequest( GEOIP_DOWNLOADER, ActionListener.runAfter( listener, () -> client.admin().indices().prepareDelete(DATABASES_INDEX).execute(ActionListener.wrap(rr -> {}, e -> { - if (e instanceof ResourceNotFoundException == false) { + Throwable t = e instanceof RemoteTransportException ? e.getCause() : e; + if (t instanceof ResourceNotFoundException == false) { logger.warn("failed to remove " + DATABASES_INDEX, e); } })) diff --git a/x-pack/qa/rolling-upgrade/build.gradle b/x-pack/qa/rolling-upgrade/build.gradle index 93a9a99ce3e3f..a6db46c9d0d10 100644 --- a/x-pack/qa/rolling-upgrade/build.gradle +++ b/x-pack/qa/rolling-upgrade/build.gradle @@ -41,6 +41,13 @@ BuildParams.bwcVersions.withWireCompatible { bwcVersion, baseName -> versions = [oldVersion, project.version] numberOfNodes = 3 + systemProperty 'ingest.geoip.downloader.enabled.default', 'true' + //we don't want to hit real service from each test + systemProperty 'ingest.geoip.downloader.endpoint.default', 'http://invalid.endpoint' + if (bwcVersion.onOrAfter('7.14.0')) { + setting 'ingest.geoip.downloader.endpoint', 'http://invalid.endpoint' + } + setting 'repositories.url.allowed_urls', 'http://snapshot.test*' setting 'path.repo', "['${buildDir}/cluster/shared/repo/${baseName}', '${searchableSnapshotRepository}']" setting 'xpack.license.self_generated.type', 'trial' diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/GeoIpUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/GeoIpUpgradeIT.java new file mode 100644 index 0000000000000..3dedd041d6465 --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/GeoIpUpgradeIT.java @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.upgrades; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.hamcrest.Matchers; + +import java.nio.charset.StandardCharsets; + +public class GeoIpUpgradeIT extends AbstractUpgradeTestCase { + + public void testGeoIpDownloader() throws Exception { + if (CLUSTER_TYPE == ClusterType.UPGRADED) { + assertBusy(() -> { + Response response = client().performRequest(new Request("GET", "_cat/tasks")); + String tasks = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); + assertThat(tasks, Matchers.containsString("geoip-downloader")); + }); + assertBusy(() -> { + Response response = client().performRequest(new Request("GET", "_ingest/geoip/stats")); + String tasks = EntityUtils.toString(response.getEntity(), StandardCharsets.UTF_8); + assertThat(tasks, Matchers.containsString("failed_downloads\":1")); + }); + } + } +} From 6b904d79cb18a809268cd079a04210a960750685 Mon Sep 17 00:00:00 2001 From: Artem Prigoda Date: Wed, 16 Feb 2022 15:52:20 +0100 Subject: [PATCH 37/37] [docs] Mention JDK 17 in the Contributing docs (#84018) ES 8+ requires JDK 17 to be built --- CONTRIBUTING.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fc9935ca69794..287b28c5718e1 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -112,11 +112,11 @@ Contributing to the Elasticsearch codebase **Repository:** [https://github.com/elastic/elasticsearch](https://github.com/elastic/elasticsearch) -JDK 16 is required to build Elasticsearch. You must have a JDK 16 installation +JDK 17 is required to build Elasticsearch. You must have a JDK 17 installation with the environment variable `JAVA_HOME` referencing the path to Java home for -your JDK 16 installation. By default, tests use the same runtime as `JAVA_HOME`. +your JDK 17 installation. By default, tests use the same runtime as `JAVA_HOME`. However, since Elasticsearch supports JDK 11, the build supports compiling with -JDK 16 and testing on a JDK 11 runtime; to do this, set `RUNTIME_JAVA_HOME` +JDK 17 and testing on a JDK 11 runtime; to do this, set `RUNTIME_JAVA_HOME` pointing to the Java home of a JDK 11 installation. Note that this mechanism can be used to test against other JDKs as well, this is not only limited to JDK 11. @@ -151,9 +151,9 @@ and then run `curl` in another window like this: ### Importing the project into IntelliJ IDEA The minimum IntelliJ IDEA version required to import the Elasticsearch project is 2020.1 -Elasticsearch builds using Java 16. When importing into IntelliJ you will need +Elasticsearch builds using Java 17. When importing into IntelliJ you will need to define an appropriate SDK. The convention is that **this SDK should be named -"16"** so that the project import will detect it automatically. For more details +"17"** so that the project import will detect it automatically. For more details on defining an SDK in IntelliJ please refer to [their documentation](https://www.jetbrains.com/help/idea/sdk.html#define-sdk). SDK definitions are global, so you can add the JDK from any project, or after project import. Importing with a missing JDK will still work, IntelliJ will