Skip to content

Commit

Permalink
Make Geo Context Mapping Parsing More Strict
Browse files Browse the repository at this point in the history
Currently, if geo context is represented by something other than
geo_point or an object with lat and lon fields, the parsing of it
as a geo context can result in ignoring the context altogether,
returning confusing errors such as number_format_exception or trying
to parse the number specifying as long-encoded hash code. It would also
fail if the geo_point was stored.

This commit makes the mapping parsing more strict and will fail during
mapping update or index creation if the geo context doesn't point to
a geo_point field.

Supersedes elastic#32683

Closes elastic#32202
  • Loading branch information
imotov committed Aug 13, 2018
1 parent cba7fce commit 335e70f
Show file tree
Hide file tree
Showing 7 changed files with 161 additions and 7 deletions.
3 changes: 3 additions & 0 deletions docs/reference/migration/migrate_7_0/search.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,9 @@ deprecated in 6.x, has been removed. Context enabled suggestion queries
without contexts have to visit every suggestion, which degrades the search performance
considerably.

For geo context the value of the `path` parameter is now validated against the mapping,
and the context is only accepted if `path` points to a field with `geo_point` type.

==== Semantics changed for `max_concurrent_shard_requests`

`max_concurrent_shard_requests` used to limit the total number of concurrent shard
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.indices.InvalidTypeNameException;
import org.elasticsearch.indices.mapper.MapperRegistry;
import org.elasticsearch.search.suggest.completion.context.GeoContextMapping;

import java.io.Closeable;
import java.io.IOException;
Expand Down Expand Up @@ -421,6 +422,8 @@ private synchronized Map<String, DocumentMapper> internalMerge(@Nullable Documen
MapperMergeValidator.validateFieldReferences(fieldMappers, fieldAliasMappers,
fullPathObjectMappers, fieldTypes);

GeoContextMapping.validateGeoContextPaths(indexSettings.getIndexVersionCreated(), fieldMappers, fieldTypes::get);

if (reason == MergeReason.MAPPING_UPDATE) {
// this check will only be performed on the master node when there is
// a call to the update mapping API. For all other cases like
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.elasticsearch.search.suggest.completion.context;

import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentFragment;
Expand All @@ -28,13 +29,16 @@
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.mapper.CompletionFieldMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.ParseContext;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;

/**
* A {@link ContextMapping} defines criteria that can be used to
Expand Down Expand Up @@ -131,6 +135,31 @@ public final List<InternalQueryContext> parseQueryContext(XContentParser parser)
*/
protected abstract XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException;

/**
* Checks if the current context is consistent with the rest of the fields. For example, the GeoContext
* should check that the field that it points to has the correct type.
*/
protected void validateReferences(Version indexVersionCreated, Function<String, MappedFieldType> fieldResolver) {
// No validation is required by default
}

/**
* Verifies that all field paths specified in contexts point to the fields with correct mappings
*/
public static void validateGeoContextPaths(Version indexVersionCreated, List<FieldMapper> fieldMappers,
Function<String, MappedFieldType> fieldResolver) {
for (FieldMapper fieldMapper : fieldMappers) {
if (CompletionFieldMapper.CONTENT_TYPE.equals(fieldMapper.typeName())) {
CompletionFieldMapper.CompletionFieldType fieldType = ((CompletionFieldMapper) fieldMapper).fieldType();
if (fieldType.hasContextMappings()) {
for (ContextMapping context : fieldType.getContextMappings()) {
context.validateReferences(indexVersionCreated, fieldResolver);
}
}
}
}
}

@Override
public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field(FIELD_NAME, name);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
Expand All @@ -50,7 +51,7 @@
* and creates context queries for defined {@link ContextMapping}s
* for a {@link CompletionFieldMapper}
*/
public class ContextMappings implements ToXContent {
public class ContextMappings implements ToXContent, Iterable<ContextMapping<?>> {

private final List<ContextMapping<?>> contextMappings;
private final Map<String, ContextMapping<?>> contextNameMap;
Expand Down Expand Up @@ -97,6 +98,11 @@ public void addField(ParseContext.Document document, String name, String input,
document.add(new TypedContextField(name, input, weight, contexts, document));
}

@Override
public Iterator<ContextMapping<?>> iterator() {
return contextMappings.iterator();
}

/**
* Field prepends context values with a suggestion
* Context values are associated with a type, denoted by
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,12 +19,17 @@

package org.elasticsearch.search.suggest.completion.context;

import org.apache.logging.log4j.LogManager;
import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.document.LatLonPoint;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
Expand All @@ -42,6 +47,7 @@
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;

import static org.elasticsearch.common.geo.GeoHashUtils.addNeighbors;
Expand Down Expand Up @@ -69,6 +75,8 @@ public class GeoContextMapping extends ContextMapping<GeoQueryContext> {
static final String CONTEXT_PRECISION = "precision";
static final String CONTEXT_NEIGHBOURS = "neighbours";

private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(LogManager.getLogger(GeoContextMapping.class));

private final int precision;
private final String fieldName;

Expand Down Expand Up @@ -205,11 +213,11 @@ public Set<CharSequence> parseContext(Document document) {
for (IndexableField field : fields) {
if (field instanceof StringField) {
spare.resetFromString(field.stringValue());
} else {
// todo return this to .stringValue() once LatLonPoint implements it
geohashes.add(spare.geohash());
} if (field instanceof LatLonPoint || field instanceof LatLonDocValuesField) {
spare.resetFromIndexableField(field);
geohashes.add(spare.geohash());
}
geohashes.add(spare.geohash());
}
}
}
Expand Down Expand Up @@ -279,6 +287,32 @@ public List<InternalQueryContext> toInternalQueryContexts(List<GeoQueryContext>
return internalQueryContextList;
}

@Override
protected void validateReferences(Version indexVersionCreated, Function<String, MappedFieldType> fieldResolver) {
if (fieldName != null) {
MappedFieldType mappedFieldType = fieldResolver.apply(fieldName);
if (mappedFieldType == null) {
if (indexVersionCreated.before(Version.V_7_0_0_alpha1)) {
DEPRECATION_LOGGER.deprecatedAndMaybeLog("geo_context_mapping",
"field [{}] referenced in context [{}] is not defined in the mapping", fieldName, name);
} else {
throw new ElasticsearchParseException(
"field [{}] referenced in context [{}] is not defined in the mapping", fieldName, name);
}
} else if (GeoPointFieldMapper.CONTENT_TYPE.equals(mappedFieldType.typeName()) == false) {
if (indexVersionCreated.before(Version.V_7_0_0_alpha1)) {
DEPRECATION_LOGGER.deprecatedAndMaybeLog("geo_context_mapping",
"field [{}] referenced in context [{}] must be mapped to geo_point, found [{}]",
fieldName, name, mappedFieldType.typeName());
} else {
throw new ElasticsearchParseException(
"field [{}] referenced in context [{}] must be mapped to geo_point, found [{}]",
fieldName, name, mappedFieldType.typeName());
}
}
}
}

@Override
public boolean equals(Object o) {
if (this == o) return true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -493,15 +493,24 @@ public void testGeoNeighbours() throws Exception {
}

public void testGeoField() throws Exception {
// Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.V_5_0_0_alpha5);
// Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
XContentBuilder mapping = jsonBuilder();
mapping.startObject();
mapping.startObject(TYPE);
mapping.startObject("properties");
mapping.startObject("location");
mapping.startObject("properties");
mapping.startObject("pin");
mapping.field("type", "geo_point");
// Enable store and disable indexing sometimes
if (randomBoolean()) {
mapping.field("store", "true");
}
if (randomBoolean()) {
mapping.field("index", "false");
}
mapping.endObject(); // pin
mapping.endObject();
mapping.endObject(); // location
mapping.startObject(FIELD);
mapping.field("type", "completion");
mapping.field("analyzer", "simple");
Expand All @@ -510,7 +519,7 @@ public void testGeoField() throws Exception {
mapping.startObject();
mapping.field("name", "st");
mapping.field("type", "geo");
mapping.field("path", "pin");
mapping.field("path", "location.pin");
mapping.field("precision", 5);
mapping.endObject();
mapping.endArray();
Expand All @@ -524,7 +533,9 @@ public void testGeoField() throws Exception {

XContentBuilder source1 = jsonBuilder()
.startObject()
.startObject("location")
.latlon("pin", 52.529172, 13.407333)
.endObject()
.startObject(FIELD)
.array("input", "Hotel Amsterdam in Berlin")
.endObject()
Expand All @@ -533,7 +544,9 @@ public void testGeoField() throws Exception {

XContentBuilder source2 = jsonBuilder()
.startObject()
.startObject("location")
.latlon("pin", 52.363389, 4.888695)
.endObject()
.startObject(FIELD)
.array("input", "Hotel Berlin in Amsterdam")
.endObject()
Expand Down Expand Up @@ -600,6 +613,7 @@ public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBu
private void createIndexAndMapping(CompletionMappingBuilder completionMappingBuilder) throws IOException {
createIndexAndMappingAndSettings(Settings.EMPTY, completionMappingBuilder);
}

private void createIndexAndMappingAndSettings(Settings settings, CompletionMappingBuilder completionMappingBuilder) throws IOException {
XContentBuilder mapping = jsonBuilder().startObject()
.startObject(TYPE).startObject("properties")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.elasticsearch.search.suggest.completion;

import org.apache.lucene.index.IndexableField;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
Expand Down Expand Up @@ -200,6 +201,70 @@ public void testIndexingWithMultipleContexts() throws Exception {
assertContextSuggestFields(fields, 3);
}

public void testMalformedGeoField() throws Exception {
XContentBuilder mapping = jsonBuilder();
mapping.startObject();
mapping.startObject("type1");
mapping.startObject("properties");
mapping.startObject("pin");
String type = randomFrom("text", "keyword", "long");
mapping.field("type", type);
mapping.endObject();
mapping.startObject("suggestion");
mapping.field("type", "completion");
mapping.field("analyzer", "simple");

mapping.startArray("contexts");
mapping.startObject();
mapping.field("name", "st");
mapping.field("type", "geo");
mapping.field("path", "pin");
mapping.field("precision", 5);
mapping.endObject();
mapping.endArray();

mapping.endObject();

mapping.endObject();
mapping.endObject();
mapping.endObject();

ElasticsearchParseException ex = expectThrows(ElasticsearchParseException.class,
() -> createIndex("test", Settings.EMPTY, "type1", mapping));

assertThat(ex.getMessage(), equalTo("field [pin] referenced in context [st] must be mapped to geo_point, found [" + type + "]"));
}

public void testMissingGeoField() throws Exception {
XContentBuilder mapping = jsonBuilder();
mapping.startObject();
mapping.startObject("type1");
mapping.startObject("properties");
mapping.startObject("suggestion");
mapping.field("type", "completion");
mapping.field("analyzer", "simple");

mapping.startArray("contexts");
mapping.startObject();
mapping.field("name", "st");
mapping.field("type", "geo");
mapping.field("path", "pin");
mapping.field("precision", 5);
mapping.endObject();
mapping.endArray();

mapping.endObject();

mapping.endObject();
mapping.endObject();
mapping.endObject();

ElasticsearchParseException ex = expectThrows(ElasticsearchParseException.class,
() -> createIndex("test", Settings.EMPTY, "type1", mapping));

assertThat(ex.getMessage(), equalTo("field [pin] referenced in context [st] is not defined in the mapping"));
}

public void testParsingQueryContextBasic() throws Exception {
XContentBuilder builder = jsonBuilder().value("ezs42e44yx96");
XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder));
Expand Down

0 comments on commit 335e70f

Please sign in to comment.