Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[7.x][ML] Rename df-analytics _id_copy to ml__id_copy (#43754) #43783

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,10 @@ public void testOutlierDetectionWithFewDocuments() throws Exception {
assertThat(destDoc.get(field), equalTo(sourceDoc.get(field)));
}
assertThat(destDoc.containsKey("ml"), is(true));

@SuppressWarnings("unchecked")
Map<String, Object> resultsObject = (Map<String, Object>) destDoc.get("ml");

assertThat(resultsObject.containsKey("outlier_score"), is(true));
double outlierScore = (double) resultsObject.get("outlier_score");
assertThat(outlierScore, allOf(greaterThanOrEqualTo(0.0), lessThanOrEqualTo(100.0)));
Expand Down Expand Up @@ -209,7 +212,10 @@ public void testOutlierDetectionWithMoreFieldsThanDocValueFieldLimit() throws Ex
assertThat(destDoc.get(field), equalTo(sourceDoc.get(field)));
}
assertThat(destDoc.containsKey("ml"), is(true));

@SuppressWarnings("unchecked")
Map<String, Object> resultsObject = (Map<String, Object>) destDoc.get("ml");

assertThat(resultsObject.containsKey("outlier_score"), is(true));
double outlierScore = (double) resultsObject.get("outlier_score");
assertThat(outlierScore, allOf(greaterThanOrEqualTo(0.0), lessThanOrEqualTo(100.0)));
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,16 @@
/**
* {@link DataFrameAnalyticsIndex} class encapsulates logic for creating destination index based on source index metadata.
*/
final class DataFrameAnalyticsIndex {
public final class DataFrameAnalyticsIndex {

public static final String ID_COPY = "ml__id_copy";

// Metadata fields
static final String CREATION_DATE_MILLIS = "creation_date_in_millis";
static final String VERSION = "version";
static final String CREATED = "created";
static final String CREATED_BY = "created_by";
static final String ANALYTICS = "analytics";

private static final String PROPERTIES = "properties";
private static final String META = "_meta";
Expand Down Expand Up @@ -122,7 +131,7 @@ private static Settings settings(GetSettingsResponse settingsResponse) {
Integer maxNumberOfReplicas = findMaxSettingValue(settingsResponse, IndexMetaData.SETTING_NUMBER_OF_REPLICAS);

Settings.Builder settingsBuilder = Settings.builder();
settingsBuilder.put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), DataFrameAnalyticsFields.ID);
settingsBuilder.put(IndexSortConfig.INDEX_SORT_FIELD_SETTING.getKey(), ID_COPY);
settingsBuilder.put(IndexSortConfig.INDEX_SORT_ORDER_SETTING.getKey(), SortOrder.ASC);
if (maxNumberOfShards != null) {
settingsBuilder.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, maxNumberOfShards);
Expand Down Expand Up @@ -151,17 +160,17 @@ private static void addProperties(Map<String, Object> mappingsAsMap) {
Map<String, Object> properties = getOrPutDefault(mappingsAsMap, PROPERTIES, HashMap::new);
Map<String, String> idCopyMapping = new HashMap<>();
idCopyMapping.put("type", "keyword");
properties.put(DataFrameAnalyticsFields.ID, idCopyMapping);
properties.put(ID_COPY, idCopyMapping);
}

private static void addMetaData(Map<String, Object> mappingsAsMap, String analyticsId, Clock clock) {
Map<String, Object> metadata = getOrPutDefault(mappingsAsMap, META, HashMap::new);
metadata.put(DataFrameAnalyticsFields.CREATION_DATE_MILLIS, clock.millis());
metadata.put(DataFrameAnalyticsFields.CREATED_BY, "data-frame-analytics");
metadata.put(CREATION_DATE_MILLIS, clock.millis());
metadata.put(CREATED_BY, "data-frame-analytics");
Map<String, Version> versionMapping = new HashMap<>();
versionMapping.put(DataFrameAnalyticsFields.CREATED, Version.CURRENT);
metadata.put(DataFrameAnalyticsFields.VERSION, versionMapping);
metadata.put(DataFrameAnalyticsFields.ANALYTICS, analyticsId);
versionMapping.put(CREATED, Version.CURRENT);
metadata.put(VERSION, versionMapping);
metadata.put(ANALYTICS, analyticsId);
}

private static <K, V> V getOrPutDefault(Map<K, Object> map, K key, Supplier<V> valueSupplier) {
Expand All @@ -182,7 +191,7 @@ public static void updateMappingsToDestIndex(Client client, DataFrameAnalyticsCo
String type = mappings.keysIt().next();

Map<String, Object> addedMappings = Collections.singletonMap(PROPERTIES,
Collections.singletonMap(DataFrameAnalyticsFields.ID, Collections.singletonMap("type", "keyword")));
Collections.singletonMap(ID_COPY, Collections.singletonMap("type", "keyword")));

PutMappingRequest putMappingRequest = new PutMappingRequest(getIndexResponse.indices());
putMappingRequest.type(type);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ private void reindexDataframeAndStartAnalysis(DataFrameAnalyticsTask task, DataF
reindexRequest.setSourceIndices(config.getSource().getIndex());
reindexRequest.setSourceQuery(config.getSource().getParsedQuery());
reindexRequest.setDestIndex(config.getDest().getIndex());
reindexRequest.setScript(new Script("ctx._source." + DataFrameAnalyticsFields.ID + " = ctx._id"));
reindexRequest.setScript(new Script("ctx._source." + DataFrameAnalyticsIndex.ID_COPY + " = ctx._id"));

final ThreadContext threadContext = client.threadPool().getThreadContext();
final Supplier<ThreadContext.StoredContext> supplier = threadContext.newRestorableContext(false);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.xpack.core.ClientHelper;
import org.elasticsearch.xpack.ml.datafeed.extractor.fields.ExtractedField;
import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsFields;
import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsIndex;

import java.io.IOException;
import java.util.ArrayList;
Expand Down Expand Up @@ -126,7 +126,7 @@ private SearchRequestBuilder buildSearchRequest() {
.setScroll(SCROLL_TIMEOUT)
// This ensures the search throws if there are failures and the scroll context gets cleared automatically
.setAllowPartialSearchResults(false)
.addSort(DataFrameAnalyticsFields.ID, SortOrder.ASC)
.addSort(DataFrameAnalyticsIndex.ID_COPY, SortOrder.ASC)
.setIndices(context.indices)
.setSize(context.scrollSize)
.setQuery(context.query);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -167,12 +167,12 @@ public void testCreateDestinationIndex() throws IOException {
containsInAnyOrder("index.number_of_shards", "index.number_of_replicas", "index.sort.field", "index.sort.order"));
assertThat(createIndexRequest.settings().getAsInt("index.number_of_shards", -1), equalTo(5));
assertThat(createIndexRequest.settings().getAsInt("index.number_of_replicas", -1), equalTo(1));
assertThat(createIndexRequest.settings().get("index.sort.field"), equalTo("_id_copy"));
assertThat(createIndexRequest.settings().get("index.sort.field"), equalTo("ml__id_copy"));
assertThat(createIndexRequest.settings().get("index.sort.order"), equalTo("asc"));

try (XContentParser parser = createParser(JsonXContent.jsonXContent, createIndexRequest.mappings().get("_doc"))) {
Map<String, Object> map = parser.map();
assertThat(extractValue("_doc.properties._id_copy.type", map), equalTo("keyword"));
assertThat(extractValue("_doc.properties.ml__id_copy.type", map), equalTo("keyword"));
assertThat(extractValue("_doc.properties.field_1", map), equalTo("field_1_mappings"));
assertThat(extractValue("_doc.properties.field_2", map), equalTo("field_2_mappings"));
assertThat(extractValue("_doc._meta.analytics", map), equalTo(ANALYTICS_ID));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ public void testTwoPageExtraction() throws IOException {
assertThat(searchRequest, containsString("\"query\":{\"match_all\":{\"boost\":1.0}}"));
assertThat(searchRequest, containsString("\"docvalue_fields\":[{\"field\":\"field_1\"},{\"field\":\"field_2\"}]"));
assertThat(searchRequest, containsString("\"_source\":{\"includes\":[],\"excludes\":[]}"));
assertThat(searchRequest, containsString("\"sort\":[{\"_id_copy\":{\"order\":\"asc\"}}]"));
assertThat(searchRequest, containsString("\"sort\":[{\"ml__id_copy\":{\"order\":\"asc\"}}]"));

// Check continue scroll requests had correct ids
assertThat(dataExtractor.capturedContinueScrollIds.size(), equalTo(2));
Expand Down