Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ML] Make ml internal indices hidden #52423

Merged
merged 10 commits into from
Feb 19, 2020
Original file line number Diff line number Diff line change
Expand Up @@ -256,12 +256,13 @@ public Iterator<Setting<?>> settings() {
Setting.Property.Dynamic,
Setting.Property.IndexScope);

public static final String SETTING_INDEX_HIDDEN = "index.hidden";
/**
* Whether the index is considered hidden or not. A hidden index will not be resolved in
* normal wildcard searches unless explicitly allowed
*/
public static final Setting<Boolean> INDEX_HIDDEN_SETTING =
Setting.boolSetting("index.hidden", false, Property.IndexScope, Property.Final);
Setting.boolSetting(SETTING_INDEX_HIDDEN, false, Property.IndexScope, Property.Final);

/**
* an internal index format description, allowing us to find out if this index is upgraded or needs upgrading
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,9 +119,6 @@ public class MetaDataCreateIndexService {
*/
private static final CharacterRunAutomaton DOT_INDICES_EXCLUSIONS = new CharacterRunAutomaton(Regex.simpleMatchToAutomaton(
".watch-history-*",
".ml-anomalies-*",
".ml-notifications-*",
".ml-annotations*",
".data-frame-notifications-*",
".transform-notifications-*"
));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -618,9 +618,6 @@ public void testIndexNameExclusionsList() {
// this test case should be removed when DOT_INDICES_EXCLUSIONS is empty
List<String> excludedNames = Arrays.asList(
".watch-history-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT),
".ml-anomalies-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT),
".ml-notifications-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT),
".ml-annotations-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT),
".data-frame-notifications-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT),
".transform-notifications-" + randomAlphaOfLength(5).toLowerCase(Locale.ROOT)
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.ClusterName;
Expand Down Expand Up @@ -124,13 +125,20 @@ public void setUp() throws Exception {
public void tearDown() throws Exception {
logger.trace("[{}#{}]: cleaning up after test", getTestClass().getSimpleName(), getTestName());
super.tearDown();
assertAcked(client().admin().indices().prepareDelete("*").get());
assertAcked(
client().admin().indices().prepareDelete("*")
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN)
.get());
MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData();
assertThat("test leaves persistent cluster metadata behind: " + metaData.persistentSettings().keySet(),
metaData.persistentSettings().size(), equalTo(0));
assertThat("test leaves transient cluster metadata behind: " + metaData.transientSettings().keySet(),
metaData.transientSettings().size(), equalTo(0));
GetIndexResponse indices = client().admin().indices().prepareGetIndex().addIndices("*").get();
GetIndexResponse indices =
client().admin().indices().prepareGetIndex()
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN)
.addIndices("*")
.get();
assertThat("test leaves indices that were not deleted: " + Strings.arrayToCommaDelimitedString(indices.indices()),
indices.indices(), equalTo(Strings.EMPTY_ARRAY));
if (resetNodeAfterTest()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -572,9 +572,9 @@ private void wipeCluster() throws Exception {
protected static void wipeAllIndices() throws IOException {
boolean includeHidden = minimumNodeVersion().onOrAfter(Version.V_7_7_0);
try {
final Request deleteReq = new Request("DELETE", "*");
deleteReq.addParameter("expand_wildcards", "open,closed" + (includeHidden ? ",hidden" : ""));
final Response response = adminClient().performRequest(deleteReq);
final Request deleteRequest = new Request("DELETE", "*");
deleteRequest.addParameter("expand_wildcards", "open,closed" + (includeHidden ? ",hidden" : ""));
final Response response = adminClient().performRequest(deleteRequest);
try (InputStream is = response.getEntity().getContent()) {
assertTrue((boolean) XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true).get("acknowledged"));
}
Expand Down Expand Up @@ -706,6 +706,13 @@ private void wipeRollupJobs() throws IOException {
}
}

protected void refreshAllIndices() throws IOException {
boolean includeHidden = minimumNodeVersion().onOrAfter(Version.V_7_7_0);
Request refreshRequest = new Request("POST", "/_refresh");
refreshRequest.addParameter("expand_wildcards", "open,closed" + (includeHidden ? ",hidden" : ""));
client().performRequest(refreshRequest);
}

private void waitForPendingRollupTasks() throws Exception {
waitForPendingTasks(adminClient(), taskName -> taskName.startsWith("xpack/rollup/job") == false);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,13 @@ public static void createAnnotationsIndexIfNecessary(Settings settings, Client c
// Create the annotations index if it doesn't exist already.
if (mlLookup.containsKey(INDEX_NAME) == false) {

CreateIndexRequest createIndexRequest = new CreateIndexRequest(INDEX_NAME);
createIndexRequest.mapping(annotationsMapping());
createIndexRequest.settings(Settings.builder()
.put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1"));
CreateIndexRequest createIndexRequest =
new CreateIndexRequest(INDEX_NAME)
.mapping(annotationsMapping())
.settings(Settings.builder()
.put(IndexMetaData.SETTING_AUTO_EXPAND_REPLICAS, "0-1")
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, "1")
.put(IndexMetaData.SETTING_INDEX_HIDDEN, true));

executeAsyncWithOrigin(client.threadPool().getThreadContext(), ML_ORIGIN, createIndexRequest,
ActionListener.<CreateIndexResponse>wrap(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,9 @@ public final class AnomalyDetectorsIndexFields {

public static final String CONFIG_INDEX = ".ml-config";

public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-";

public static final String STATE_INDEX_PREFIX = ".ml-state";
public static final String STATE_INDEX_PATTERN = STATE_INDEX_PREFIX + "*";

public static final String RESULTS_INDEX_PREFIX = ".ml-anomalies-";
public static final String RESULTS_INDEX_DEFAULT = "shared";

private AnomalyDetectorsIndexFields() {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@
"auto_expand_replicas" : "0-1",
"query" : {
"default_field" : "all_field_values"
}
},
"hidden": true
}
},
"mappings": ${xpack.ml.anomalydetection.results.mappings}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
],
"settings" : {
"index" : {
"auto_expand_replicas" : "0-1"
"auto_expand_replicas" : "0-1",
"hidden": true
}
},
"mappings" : {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@
"settings" : {
"index" : {
"number_of_shards" : "1",
"auto_expand_replicas" : "0-1"
"auto_expand_replicas" : "0-1",
"hidden": true
}
},
"mappings" : {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ public void testMiniFarequoteWithDatafeeder() throws Exception {
client().performRequest(airlineData2);

// Ensure all data is searchable
client().performRequest(new Request("POST", "/_refresh"));
refreshAllIndices();

String jobId = "mini-farequote-with-data-feeder-job";
createFarequoteJob(jobId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
Expand Down Expand Up @@ -79,7 +80,7 @@ public void setUpData() {
public void tearDownData() {
cleanUp();
client().admin().indices().prepareDelete(DATA_INDEX).get();
client().admin().indices().prepareRefresh("*").get();
client().admin().indices().prepareRefresh("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN).get();
}

public void testBasicCategorization() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -747,7 +747,7 @@ public void testLookbackWithoutPermissions() throws Exception {
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":0"));

// There should be a notification saying that there was a problem extracting data
client().performRequest(new Request("POST", "/_refresh"));
refreshAllIndices();
Response notificationsResponse = client().performRequest(
new Request("GET", NotificationsIndex.NOTIFICATIONS_INDEX + "/_search?size=1000&q=job_id:" + jobId));
String notificationsResponseAsString = EntityUtils.toString(notificationsResponse.getEntity());
Expand Down Expand Up @@ -954,7 +954,7 @@ public void testLookbackWithoutPermissionsAndRollup() throws Exception {
startDatafeedAndWaitUntilStopped(datafeedId, BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS);
waitUntilJobIsClosed(jobId);
// There should be a notification saying that there was a problem extracting data
client().performRequest(new Request("POST", "/_refresh"));
refreshAllIndices();
Response notificationsResponse = client().performRequest(
new Request("GET", NotificationsIndex.NOTIFICATIONS_INDEX + "/_search?size=1000&q=job_id:" + jobId));
String notificationsResponseAsString = EntityUtils.toString(notificationsResponse.getEntity());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.action.update.UpdateAction;
import org.elasticsearch.action.update.UpdateRequest;
Expand Down Expand Up @@ -161,7 +162,7 @@ public void testDeleteExpiredData() throws Exception {
}

// Refresh to ensure the snapshot timestamp updates are visible
client().admin().indices().prepareRefresh("*").get();
client().admin().indices().prepareRefresh("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN).get();

// We need to wait a second to ensure the second time around model snapshots will have a different ID (it depends on epoch seconds)
// FIXME it would be better to wait for something concrete instead of wait for time to elapse
Expand Down Expand Up @@ -292,6 +293,6 @@ private void retainAllSnapshots(String jobId) throws Exception {
client().execute(UpdateModelSnapshotAction.INSTANCE, request).get();
}
// We need to refresh to ensure the updates are visible
client().admin().indices().prepareRefresh("*").get();
client().admin().indices().prepareRefresh("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN).get();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ public void testCreateJobsWithIndexNameOption() throws Exception {
jobId1, "1236", 1));
client().performRequest(createResultRequest);

client().performRequest(new Request("POST", "/_refresh"));
refreshAllIndices();

responseAsString = EntityUtils.toString(client().performRequest(
new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1 + "/results/buckets")).getEntity());
Expand All @@ -256,7 +256,7 @@ public void testCreateJobsWithIndexNameOption() throws Exception {
jobId2, "1236", 1));
client().performRequest(createResultRequest);

client().performRequest(new Request("POST", "/_refresh"));
refreshAllIndices();

responseAsString = EntityUtils.toString(client().performRequest(
new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId2 + "/results/buckets")).getEntity());
Expand All @@ -278,7 +278,7 @@ public void testCreateJobsWithIndexNameOption() throws Exception {
new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity());
assertThat(responseAsString, containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName));

client().performRequest(new Request("POST", "/_refresh"));
refreshAllIndices();

responseAsString = EntityUtils.toString(client().performRequest(
new Request("GET", AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName + "/_count")).getEntity());
Expand All @@ -289,7 +289,7 @@ public void testCreateJobsWithIndexNameOption() throws Exception {
responseAsString = EntityUtils.toString(client().performRequest(new Request("GET", "/_aliases")).getEntity());
assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2))));

client().performRequest(new Request("POST", "/_refresh"));
refreshAllIndices();
responseAsString = EntityUtils.toString(client().performRequest(
new Request("GET", "/_cat/indices/" + AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "*")).getEntity());
assertThat(responseAsString, not(containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName)));
Expand Down Expand Up @@ -670,7 +670,7 @@ public void testMultiIndexDelete() throws Exception {
createDoc3.setEntity(createDoc0.getEntity());
client().performRequest(createDoc3);

client().performRequest(new Request("POST", "/_refresh"));
refreshAllIndices();

// check for the documents
assertThat(EntityUtils.toString(client().performRequest(new Request("GET", indexName+ "/_count")).getEntity()),
Expand All @@ -683,7 +683,7 @@ public void testMultiIndexDelete() throws Exception {
// Delete
client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));

client().performRequest(new Request("POST", "/_refresh"));
refreshAllIndices();

// check that the indices still exist but are empty
String indicesAfterDelete = EntityUtils.toString(client().performRequest(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
package org.elasticsearch.xpack.ml.integration;

import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterModule;
import org.elasticsearch.cluster.ClusterState;
Expand Down Expand Up @@ -148,7 +149,7 @@ protected DeleteExpiredDataAction.Response deleteExpiredData() throws Exception
new DeleteExpiredDataAction.Request()).get();

// We need to refresh to ensure the deletion is visible
client().admin().indices().prepareRefresh("*").get();
client().admin().indices().prepareRefresh("*").setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN).get();

return response;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -938,7 +938,6 @@ public List<NamedXContentRegistry.Entry> getNamedXContent() {
public Collection<SystemIndexDescriptor> getSystemIndexDescriptors() {
return List.of(
new SystemIndexDescriptor(MlMetaIndex.INDEX_NAME, "Contains scheduling and anomaly tracking metadata"),
new SystemIndexDescriptor(AnomalyDetectorsIndexFields.STATE_INDEX_PATTERN, "Contains ML model state"),
new SystemIndexDescriptor(AnomalyDetectorsIndexFields.CONFIG_INDEX, "Contains ML configuration data"),
new SystemIndexDescriptor(InferenceIndexConstants.INDEX_PATTERN, "Contains ML model configuration and statistics")
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
package org.elasticsearch.xpack.ml.integration;

import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.metadata.AliasMetaData;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Settings;
Expand Down Expand Up @@ -73,7 +74,10 @@ private boolean annotationsIndexExists() {
private int numberOfAnnotationsAliases() {
int count = 0;
ImmutableOpenMap<String, List<AliasMetaData>> aliases = client().admin().indices()
.prepareGetAliases(AnnotationIndex.READ_ALIAS_NAME, AnnotationIndex.WRITE_ALIAS_NAME).get().getAliases();
.prepareGetAliases(AnnotationIndex.READ_ALIAS_NAME, AnnotationIndex.WRITE_ALIAS_NAME)
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN)
.get()
.getAliases();
if (aliases != null) {
for (ObjectObjectCursor<String, List<AliasMetaData>> entry : aliases) {
count += entry.value.size();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ public void testJobAutoClose() throws Exception {
indexRequest = new IndexRequest("data");
indexRequest.source("time", 1407083600L);
client().index(indexRequest).get();
refresh();
refresh("*", ".ml-*");

Job.Builder job = createScheduledJob("job_id");
PutJobAction.Request putJobRequest = new PutJobAction.Request(job);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
Expand Down Expand Up @@ -489,6 +490,7 @@ private void run(String jobId, CheckedRunnable<Exception> disrupt) throws Except
// are what we expect them to be:
private static DataCounts getDataCountsFromIndex(String jobId) {
SearchResponse searchResponse = client().prepareSearch()
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED_HIDDEN)
.setQuery(QueryBuilders.idsQuery().addIds(DataCounts.documentId(jobId)))
.get();
if (searchResponse.getHits().getTotalHits().value != 1) {
Expand Down
Loading