diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index af84a44233aa3..6ca54e6a2afb9 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -64,10 +64,10 @@ class ClusterConfiguration { /** * Configuration of the setting discovery.zen.minimum_master_nodes on the nodes. - * In case of more than one node, this defaults to (number of nodes / 2) + 1 + * In case of more than one node, this defaults to the number of nodes */ @Input - Closure minimumMasterNodes = { getNumNodes() > 1 ? getNumNodes().intdiv(2) + 1 : -1 } + Closure minimumMasterNodes = { getNumNodes() > 1 ? getNumNodes() : -1 } @Input String jvmArgs = "-Xms" + System.getProperty('tests.heap.size', '512m') + diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java index 4e5d024f74a27..7e84b266f7f03 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/QueryDSLDocumentationTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.ShapeRelation; import org.elasticsearch.common.geo.builders.CoordinatesBuilder; -import org.elasticsearch.common.geo.builders.ShapeBuilders; +import org.elasticsearch.common.geo.builders.MultiPointBuilder; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.index.query.GeoShapeQueryBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; @@ -189,7 +189,7 @@ public void testGeoShape() throws IOException { // tag::geo_shape GeoShapeQueryBuilder qb = geoShapeQuery( "pin.location", // <1> - ShapeBuilders.newMultiPoint( // <2> + new MultiPointBuilder( // <2> new CoordinatesBuilder() .coordinate(0, 0) .coordinate(0, 10) diff --git a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java index 5d1990a48d06b..dc13913652a34 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java +++ b/core/src/main/java/org/elasticsearch/action/admin/cluster/settings/SettingsUpdater.java @@ -54,15 +54,23 @@ synchronized ClusterState updateSettings(final ClusterState currentState, Settin transientSettings.put(currentState.metaData().transientSettings()); changed |= clusterSettings.updateDynamicSettings(transientToApply, transientSettings, transientUpdates, "transient"); + Settings.Builder persistentSettings = Settings.builder(); persistentSettings.put(currentState.metaData().persistentSettings()); changed |= clusterSettings.updateDynamicSettings(persistentToApply, persistentSettings, persistentUpdates, "persistent"); final ClusterState clusterState; if (changed) { + Settings transientFinalSettings = transientSettings.build(); + Settings persistentFinalSettings = persistentSettings.build(); + // both transient and persistent settings must be consistent by itself we can't allow dependencies to be + // in either of them otherwise a full cluster restart will break the settings validation + clusterSettings.validate(transientFinalSettings, true); + clusterSettings.validate(persistentFinalSettings, true); + MetaData.Builder metaData = MetaData.builder(currentState.metaData()) - .persistentSettings(persistentSettings.build()) - .transientSettings(transientSettings.build()); + .persistentSettings(persistentFinalSettings) + .transientSettings(transientFinalSettings); ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); boolean updatedReadOnly = MetaData.SETTING_READ_ONLY_SETTING.get(metaData.persistentSettings()) diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java index 7d9897b112eae..1624c7950e7f2 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/template/put/TransportPutIndexTemplateAction.java @@ -77,7 +77,7 @@ protected void masterOperation(final PutIndexTemplateRequest request, final Clus } final Settings.Builder templateSettingsBuilder = Settings.builder(); templateSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX); - indexScopedSettings.validate(templateSettingsBuilder); + indexScopedSettings.validate(templateSettingsBuilder.build(), true); // templates must be consistent with regards to dependencies indexTemplateService.putTemplate(new MetaDataIndexTemplateService.PutRequest(cause, request.name()) .patterns(request.patterns()) .order(request.order()) diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 7bfa317c72c70..87cb645a8559d 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -169,6 +169,10 @@ public ActionRequestValidationException validate() { validationException = addValidationError("using [from] is not allowed in a scroll context", validationException); } + if (requestCache != null && requestCache && scroll() != null) { + validationException = + addValidationError("[request_cache] cannot be used in a a scroll context", validationException); + } return validationException; } diff --git a/core/src/main/java/org/elasticsearch/bootstrap/Security.java b/core/src/main/java/org/elasticsearch/bootstrap/Security.java index a1ce20a0e27c8..c742fdf7f84c6 100644 --- a/core/src/main/java/org/elasticsearch/bootstrap/Security.java +++ b/core/src/main/java/org/elasticsearch/bootstrap/Security.java @@ -199,28 +199,28 @@ static Policy readPolicy(URL policyFile, Set codebases) { try { // set codebase properties for (URL url : codebases) { - String shortName = PathUtils.get(url.toURI()).getFileName().toString(); - if (shortName.endsWith(".jar") == false) { + String fileName = PathUtils.get(url.toURI()).getFileName().toString(); + if (fileName.endsWith(".jar") == false) { continue; // tests :( } - String property = "codebase." + shortName; - if (shortName.startsWith("elasticsearch-rest-client")) { - // The rest client is currently the only example where we have an elasticsearch built artifact - // which needs special permissions in policy files when used. This temporary solution is to - // pass in an extra system property that omits the -version.jar suffix the other properties have. - // That allows the snapshots to reference snapshot builds of the client, and release builds to - // referenced release builds of the client, all with the same grant statements. - final String esVersion = Version.CURRENT + (Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : ""); - final int index = property.indexOf("-" + esVersion + ".jar"); - assert index >= 0; - String restClientAlias = property.substring(0, index); - propertiesSet.add(restClientAlias); - System.setProperty(restClientAlias, url.toString()); + // We attempt to use a versionless identifier for each codebase. This assumes a specific version + // format in the jar filename. While we cannot ensure all jars in all plugins use this format, nonconformity + // only means policy grants would need to include the entire jar filename as they always have before. + String property = "codebase." + fileName; + String aliasProperty = "codebase." + fileName.replaceFirst("-\\d+\\.\\d+.*\\.jar", ""); + if (aliasProperty.equals(property) == false) { + propertiesSet.add(aliasProperty); + String previous = System.setProperty(aliasProperty, url.toString()); + if (previous != null) { + throw new IllegalStateException("codebase property already set: " + aliasProperty + " -> " + previous + + ", cannot set to " + url.toString()); + } } propertiesSet.add(property); String previous = System.setProperty(property, url.toString()); if (previous != null) { - throw new IllegalStateException("codebase property already set: " + shortName + "->" + previous); + throw new IllegalStateException("codebase property already set: " + property + " -> " + previous + + ", cannot set to " + url.toString()); } } return Policy.getInstance("JavaPolicy", new URIParameter(policyFile.toURI())); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java index aa273dd92197d..d07052b9b4d28 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexGraveyard.java @@ -34,6 +34,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; import java.io.IOException; import java.util.ArrayList; @@ -432,7 +434,7 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa builder.startObject(); builder.field(INDEX_KEY); index.toXContent(builder, params); - builder.timeValueField(DELETE_DATE_IN_MILLIS_KEY, DELETE_DATE_KEY, deleteDateInMillis, TimeUnit.MILLISECONDS); + builder.dateField(DELETE_DATE_IN_MILLIS_KEY, DELETE_DATE_KEY, deleteDateInMillis); return builder.endObject(); } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index 49568ab300f03..64f2383fe7797 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -220,10 +220,9 @@ public void createIndex(final CreateIndexClusterStateUpdateRequest request, private void onlyCreateIndex(final CreateIndexClusterStateUpdateRequest request, final ActionListener listener) { Settings.Builder updatedSettingsBuilder = Settings.builder(); - updatedSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX); - indexScopedSettings.validate(updatedSettingsBuilder); - request.settings(updatedSettingsBuilder.build()); - + Settings build = updatedSettingsBuilder.put(request.settings()).normalizePrefix(IndexMetaData.INDEX_SETTING_PREFIX).build(); + indexScopedSettings.validate(build, true); // we do validate here - index setting must be consistent + request.settings(build); clusterService.submitStateUpdateTask("create-index [" + request.index() + "], cause [" + request.cause() + "]", new IndexCreationTask(logger, allocationService, request, listener, indicesService, aliasValidator, xContentRegistry, settings, this::validate)); @@ -420,7 +419,6 @@ public ClusterState execute(ClusterState currentState) throws Exception { tmpImdBuilder.primaryTerm(shardId, primaryTerm); } } - // Set up everything, now locally create the index to see that things are ok, and apply final IndexMetaData tmpImd = tmpImdBuilder.build(); ActiveShardCount waitForActiveShards = request.waitForActiveShards(); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java index c96895b94e793..883d7f2fc47ec 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataIndexTemplateService.java @@ -276,7 +276,7 @@ private void validate(PutRequest request) { } try { - indexScopedSettings.validate(request.settings); + indexScopedSettings.validate(request.settings, true); // templates must be consistent with regards to dependencies } catch (IllegalArgumentException iae) { validationErrors.add(iae.getMessage()); for (Throwable t : iae.getSuppressed()) { diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index abc0a4e8ea2de..2c0bc9292945d 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -54,6 +54,7 @@ import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.function.Predicate; import static org.elasticsearch.action.support.ContextPreservingActionListener.wrapPreservingContext; @@ -163,7 +164,7 @@ public void updateSettings(final UpdateSettingsClusterStateUpdateRequest request Settings.Builder settingsForOpenIndices = Settings.builder(); final Set skippedSettings = new HashSet<>(); - indexScopedSettings.validate(normalizedSettings); + indexScopedSettings.validate(normalizedSettings, false); // don't validate dependencies here we check it below // never allow to change the number of shards for (String key : normalizedSettings.keySet()) { Setting setting = indexScopedSettings.get(key); @@ -240,7 +241,9 @@ public ClusterState execute(ClusterState currentState) { if (preserveExisting) { indexSettings.put(indexMetaData.getSettings()); } - metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(indexSettings)); + Settings finalSettings = indexSettings.build(); + indexScopedSettings.validate(finalSettings.filter(k -> indexScopedSettings.isPrivateSetting(k) == false), true); + metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(finalSettings)); } } } @@ -254,7 +257,9 @@ public ClusterState execute(ClusterState currentState) { if (preserveExisting) { indexSettings.put(indexMetaData.getSettings()); } - metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(indexSettings)); + Settings finalSettings = indexSettings.build(); + indexScopedSettings.validate(finalSettings.filter(k -> indexScopedSettings.isPrivateSetting(k) == false), true); + metaDataBuilder.put(IndexMetaData.builder(indexMetaData).settings(finalSettings)); } } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoShapeType.java b/core/src/main/java/org/elasticsearch/common/geo/GeoShapeType.java new file mode 100644 index 0000000000000..f80302969405c --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoShapeType.java @@ -0,0 +1,316 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.geo; + +import com.vividsolutions.jts.geom.Coordinate; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.geo.builders.CircleBuilder; +import org.elasticsearch.common.geo.builders.CoordinatesBuilder; +import org.elasticsearch.common.geo.builders.EnvelopeBuilder; +import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder; +import org.elasticsearch.common.geo.builders.LineStringBuilder; +import org.elasticsearch.common.geo.builders.MultiLineStringBuilder; +import org.elasticsearch.common.geo.builders.MultiPointBuilder; +import org.elasticsearch.common.geo.builders.MultiPolygonBuilder; +import org.elasticsearch.common.geo.builders.PointBuilder; +import org.elasticsearch.common.geo.builders.PolygonBuilder; +import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; +import org.elasticsearch.common.geo.parsers.CoordinateNode; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; +import org.elasticsearch.common.unit.DistanceUnit; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +/** + * Enumeration that lists all {@link GeoShapeType}s that can be parsed and indexed + */ +public enum GeoShapeType { + POINT("point") { + @Override + public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + Orientation orientation, boolean coerce) { + return new PointBuilder().coordinate(validate(coordinates, coerce).coordinate); + } + + @Override + CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { + if (coordinates.isEmpty()) { + throw new ElasticsearchParseException( + "invalid number of points (0) provided when expecting a single coordinate ([lat, lng])"); + } else if (coordinates.children != null) { + throw new ElasticsearchParseException("multipoint data provided when single point data expected."); + } + return coordinates; + } + }, + MULTIPOINT("multipoint") { + @Override + public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + Orientation orientation, boolean coerce) { + validate(coordinates, coerce); + CoordinatesBuilder coordinatesBuilder = new CoordinatesBuilder(); + for (CoordinateNode node : coordinates.children) { + coordinatesBuilder.coordinate(node.coordinate); + } + return new MultiPointBuilder(coordinatesBuilder.build()); + } + + @Override + CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { + if (coordinates.children == null || coordinates.children.isEmpty()) { + if (coordinates.coordinate != null) { + throw new ElasticsearchParseException("single coordinate found when expecting an array of " + + "coordinates. change type to point or change data to an array of >0 coordinates"); + } + throw new ElasticsearchParseException("no data provided for multipoint object when expecting " + + ">0 points (e.g., [[lat, lng]] or [[lat, lng], ...])"); + } else { + for (CoordinateNode point : coordinates.children) { + POINT.validate(point, coerce); + } + } + return coordinates; + } + + }, + LINESTRING("linestring") { + @Override + public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + Orientation orientation, boolean coerce) { + validate(coordinates, coerce); + CoordinatesBuilder line = new CoordinatesBuilder(); + for (CoordinateNode node : coordinates.children) { + line.coordinate(node.coordinate); + } + return new LineStringBuilder(line); + } + + @Override + CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { + if (coordinates.children.size() < 2) { + throw new ElasticsearchParseException("invalid number of points in LineString (found [{}] - must be >= 2)", + coordinates.children.size()); + } + return coordinates; + } + }, + MULTILINESTRING("multilinestring") { + @Override + public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + Orientation orientation, boolean coerce) { + validate(coordinates, coerce); + MultiLineStringBuilder multiline = new MultiLineStringBuilder(); + for (CoordinateNode node : coordinates.children) { + multiline.linestring(LineStringBuilder.class.cast(LINESTRING.getBuilder(node, radius, orientation, coerce))); + } + return multiline; + } + + @Override + CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { + if (coordinates.children.size() < 1) { + throw new ElasticsearchParseException("invalid number of lines in MultiLineString (found [{}] - must be >= 1)", + coordinates.children.size()); + } + return coordinates; + } + }, + POLYGON("polygon") { + @Override + public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + Orientation orientation, boolean coerce) { + validate(coordinates, coerce); + // build shell + LineStringBuilder shell = LineStringBuilder.class.cast(LINESTRING.getBuilder(coordinates.children.get(0), + radius, orientation, coerce)); + // build polygon with shell and holes + PolygonBuilder polygon = new PolygonBuilder(shell, orientation); + for (int i = 1; i < coordinates.children.size(); ++i) { + CoordinateNode child = coordinates.children.get(i); + LineStringBuilder hole = LineStringBuilder.class.cast(LINESTRING.getBuilder(child, radius, orientation, coerce)); + polygon.hole(hole); + } + return polygon; + } + + void validateLinearRing(CoordinateNode coordinates, boolean coerce) { + if (coordinates.children == null || coordinates.children.isEmpty()) { + String error = "Invalid LinearRing found."; + error += (coordinates.coordinate == null) ? + " No coordinate array provided" : " Found a single coordinate when expecting a coordinate array"; + throw new ElasticsearchParseException(error); + } + + int numValidPts = coerce ? 3 : 4; + if (coordinates.children.size() < numValidPts) { + throw new ElasticsearchParseException("invalid number of points in LinearRing (found [{}] - must be >= [{}])", + coordinates.children.size(), numValidPts); + } + // close linear ring iff coerce is set and ring is open, otherwise throw parse exception + if (!coordinates.children.get(0).coordinate.equals( + coordinates.children.get(coordinates.children.size() - 1).coordinate)) { + if (coerce == true) { + coordinates.children.add(coordinates.children.get(0)); + } else { + throw new ElasticsearchParseException("invalid LinearRing found (coordinates are not closed)"); + } + } + } + + @Override + CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { + /** + * Per GeoJSON spec (http://geojson.org/geojson-spec.html#linestring) + * A LinearRing is closed LineString with 4 or more positions. The first and last positions + * are equivalent (they represent equivalent points). Though a LinearRing is not explicitly + * represented as a GeoJSON geometry type, it is referred to in the Polygon geometry type definition. + */ + if (coordinates.children == null || coordinates.children.isEmpty()) { + throw new ElasticsearchParseException( + "invalid LinearRing provided for type polygon. Linear ring must be an array of coordinates"); + } + for (CoordinateNode ring : coordinates.children) { + validateLinearRing(ring, coerce); + } + + return coordinates; + } + }, + MULTIPOLYGON("multipolygon") { + @Override + public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + Orientation orientation, boolean coerce) { + validate(coordinates, coerce); + MultiPolygonBuilder polygons = new MultiPolygonBuilder(orientation); + for (CoordinateNode node : coordinates.children) { + polygons.polygon(PolygonBuilder.class.cast(POLYGON.getBuilder(node, radius, orientation, coerce))); + } + return polygons; + } + + @Override + CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { + // noop; todo validate at least 1 polygon to ensure valid multipolygon + return coordinates; + } + }, + ENVELOPE("envelope") { + @Override + public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + Orientation orientation, boolean coerce) { + validate(coordinates, coerce); + // verify coordinate bounds, correct if necessary + Coordinate uL = coordinates.children.get(0).coordinate; + Coordinate lR = coordinates.children.get(1).coordinate; + if (((lR.x < uL.x) || (uL.y < lR.y))) { + Coordinate uLtmp = uL; + uL = new Coordinate(Math.min(uL.x, lR.x), Math.max(uL.y, lR.y)); + lR = new Coordinate(Math.max(uLtmp.x, lR.x), Math.min(uLtmp.y, lR.y)); + } + return new EnvelopeBuilder(uL, lR); + } + + @Override + CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { + // validate the coordinate array for envelope type + if (coordinates.children.size() != 2) { + throw new ElasticsearchParseException( + "invalid number of points [{}] provided for geo_shape [{}] when expecting an array of 2 coordinates", + coordinates.children.size(), GeoShapeType.ENVELOPE.shapename); + } + return coordinates; + } + }, + CIRCLE("circle") { + @Override + public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + Orientation orientation, boolean coerce) { + return new CircleBuilder().center(coordinates.coordinate).radius(radius); + + } + + @Override + CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { + // noop + return coordinates; + } + }, + GEOMETRYCOLLECTION("geometrycollection") { + @Override + public ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + Orientation orientation, boolean coerce) { + // noop, handled in parser + return null; + } + + @Override + CoordinateNode validate(CoordinateNode coordinates, boolean coerce) { + // noop + return null; + } + }; + + private final String shapename; + private static Map shapeTypeMap = new HashMap<>(); + + static { + for (GeoShapeType type : values()) { + shapeTypeMap.put(type.shapename, type); + } + } + + GeoShapeType(String shapename) { + this.shapename = shapename; + } + + public String shapeName() { + return shapename; + } + + public static GeoShapeType forName(String geoshapename) { + String typename = geoshapename.toLowerCase(Locale.ROOT); + if (shapeTypeMap.containsKey(typename)) { + return shapeTypeMap.get(typename); + } + throw new IllegalArgumentException("unknown geo_shape ["+geoshapename+"]"); + } + + public abstract ShapeBuilder getBuilder(CoordinateNode coordinates, DistanceUnit.Distance radius, + ShapeBuilder.Orientation orientation, boolean coerce); + abstract CoordinateNode validate(CoordinateNode coordinates, boolean coerce); + + public static List getShapeWriteables() { + List namedWriteables = new ArrayList<>(); + namedWriteables.add(new Entry(ShapeBuilder.class, PointBuilder.TYPE.shapeName(), PointBuilder::new)); + namedWriteables.add(new Entry(ShapeBuilder.class, CircleBuilder.TYPE.shapeName(), CircleBuilder::new)); + namedWriteables.add(new Entry(ShapeBuilder.class, EnvelopeBuilder.TYPE.shapeName(), EnvelopeBuilder::new)); + namedWriteables.add(new Entry(ShapeBuilder.class, MultiPointBuilder.TYPE.shapeName(), MultiPointBuilder::new)); + namedWriteables.add(new Entry(ShapeBuilder.class, LineStringBuilder.TYPE.shapeName(), LineStringBuilder::new)); + namedWriteables.add(new Entry(ShapeBuilder.class, MultiLineStringBuilder.TYPE.shapeName(), MultiLineStringBuilder::new)); + namedWriteables.add(new Entry(ShapeBuilder.class, PolygonBuilder.TYPE.shapeName(), PolygonBuilder::new)); + namedWriteables.add(new Entry(ShapeBuilder.class, MultiPolygonBuilder.TYPE.shapeName(), MultiPolygonBuilder::new)); + namedWriteables.add(new Entry(ShapeBuilder.class, GeometryCollectionBuilder.TYPE.shapeName(), GeometryCollectionBuilder::new)); + return namedWriteables; + } +} diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java index 658d8ed84c1a0..108e66d9150be 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java @@ -19,6 +19,9 @@ package org.elasticsearch.common.geo.builders; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.geo.GeoShapeType; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.locationtech.spatial4j.shape.Circle; import com.vividsolutions.jts.geom.Coordinate; @@ -31,9 +34,9 @@ import java.io.IOException; import java.util.Objects; -public class CircleBuilder extends ShapeBuilder { +public class CircleBuilder extends ShapeBuilder { - public static final String FIELD_RADIUS = "radius"; + public static final ParseField FIELD_RADIUS = new ParseField("radius"); public static final GeoShapeType TYPE = GeoShapeType.CIRCLE; private DistanceUnit unit = DistanceUnit.DEFAULT; @@ -148,9 +151,9 @@ public DistanceUnit unit() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapeName()); - builder.field(FIELD_RADIUS, unit.toString(radius)); - builder.field(FIELD_COORDINATES); + builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName()); + builder.field(FIELD_RADIUS.getPreferredName(), unit.toString(radius)); + builder.field(ShapeParser.FIELD_COORDINATES.getPreferredName()); toXContent(builder, center); return builder.endObject(); } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/CoordinateCollection.java b/core/src/main/java/org/elasticsearch/common/geo/builders/CoordinateCollection.java deleted file mode 100644 index b6b9df45d0497..0000000000000 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/CoordinateCollection.java +++ /dev/null @@ -1,155 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.geo.builders; - -import com.vividsolutions.jts.geom.Coordinate; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.XContentBuilder; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; - -/** - * The {@link CoordinateCollection} is an abstract base implementation for {@link LineStringBuilder} and {@link MultiPointBuilder}. - * It holds a common list of {@link Coordinate}, provides setters for adding elements to the list and can render this to XContent. - */ -public abstract class CoordinateCollection> extends ShapeBuilder { - - protected final List coordinates; - - /** - * Construct a new collection of coordinates. - * @param coordinates an initial list of coordinates - * @throws IllegalArgumentException if coordinates is null or empty - */ - protected CoordinateCollection(List coordinates) { - if (coordinates == null || coordinates.size() == 0) { - throw new IllegalArgumentException("cannot create point collection with empty set of points"); - } - this.coordinates = coordinates; - } - - /** - * Read from a stream. - */ - protected CoordinateCollection(StreamInput in) throws IOException { - int size = in.readVInt(); - coordinates = new ArrayList<>(size); - for (int i=0; i < size; i++) { - coordinates.add(readFromStream(in)); - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(coordinates.size()); - for (Coordinate point : coordinates) { - writeCoordinateTo(point, out); - } - } - - @SuppressWarnings("unchecked") - private E thisRef() { - return (E)this; - } - - /** - * Add a new coordinate to the collection - * @param longitude longitude of the coordinate - * @param latitude latitude of the coordinate - * @return this - */ - public E coordinate(double longitude, double latitude) { - return this.coordinate(new Coordinate(longitude, latitude)); - } - - /** - * Add a new coordinate to the collection - * @param coordinate coordinate of the point - * @return this - */ - public E coordinate(Coordinate coordinate) { - this.coordinates.add(coordinate); - return thisRef(); - } - - /** - * Add a array of coordinates to the collection - * - * @param coordinates array of {@link Coordinate}s to add - * @return this - */ - public E coordinates(Coordinate...coordinates) { - return this.coordinates(Arrays.asList(coordinates)); - } - - /** - * Add a collection of coordinates to the collection - * - * @param coordinates array of {@link Coordinate}s to add - * @return this - */ - public E coordinates(Collection coordinates) { - this.coordinates.addAll(coordinates); - return thisRef(); - } - - /** - * Copy all coordinate to a new Array - * - * @param closed if set to true the first point of the array is repeated as last element - * @return Array of coordinates - */ - protected Coordinate[] coordinates(boolean closed) { - Coordinate[] result = coordinates.toArray(new Coordinate[coordinates.size() + (closed?1:0)]); - if(closed) { - result[result.length-1] = result[0]; - } - return result; - } - - /** - * builds an array of coordinates to a {@link XContentBuilder} - * - * @param builder builder to use - * @param closed repeat the first point at the end of the array if it's not already defines as last element of the array - * @return the builder - */ - protected XContentBuilder coordinatesToXcontent(XContentBuilder builder, boolean closed) throws IOException { - builder.startArray(); - for(Coordinate coord : coordinates) { - toXContent(builder, coord); - } - if(closed) { - Coordinate start = coordinates.get(0); - Coordinate end = coordinates.get(coordinates.size()-1); - if(start.x != end.x || start.y != end.y) { - toXContent(builder, coordinates.get(0)); - } - } - builder.endArray(); - return builder; - } -} diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java index 5b80ceeeeea4e..b352aa1d92490 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java @@ -19,6 +19,8 @@ package org.elasticsearch.common.geo.builders; +import org.elasticsearch.common.geo.GeoShapeType; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.locationtech.spatial4j.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; @@ -29,7 +31,7 @@ import java.io.IOException; import java.util.Objects; -public class EnvelopeBuilder extends ShapeBuilder { +public class EnvelopeBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.ENVELOPE; @@ -71,8 +73,8 @@ public Coordinate bottomRight() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapeName()); - builder.startArray(FIELD_COORDINATES); + builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName()); + builder.startArray(ShapeParser.FIELD_COORDINATES.getPreferredName()); toXContent(builder, topLeft); toXContent(builder, bottomRight); builder.endArray(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index de6ed35ff900b..3ea422265a7dd 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -19,6 +19,8 @@ package org.elasticsearch.common.geo.builders; +import org.elasticsearch.common.geo.GeoShapeType; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.locationtech.spatial4j.shape.Shape; import org.elasticsearch.ElasticsearchException; @@ -125,8 +127,8 @@ public int numShapes() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapeName()); - builder.startArray(FIELD_GEOMETRIES); + builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName()); + builder.startArray(ShapeParser.FIELD_GEOMETRIES.getPreferredName()); for (ShapeBuilder shape : shapes) { shape.toXContent(builder, params); } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java index e79578d9ab243..c595c126f7a62 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/LineStringBuilder.java @@ -24,17 +24,18 @@ import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.LineString; +import org.elasticsearch.common.geo.GeoShapeType; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Objects; -public class LineStringBuilder extends CoordinateCollection { +public class LineStringBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.LINESTRING; /** @@ -65,8 +66,8 @@ public LineStringBuilder(StreamInput in) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapeName()); - builder.field(FIELD_COORDINATES); + builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName()); + builder.field(ShapeParser.FIELD_COORDINATES.getPreferredName()); coordinatesToXcontent(builder, false); builder.endObject(); return builder; @@ -91,7 +92,7 @@ public GeoShapeType type() { } @Override - public Shape build() { + public JtsGeometry build() { Coordinate[] coordinates = this.coordinates.toArray(new Coordinate[this.coordinates.size()]); Geometry geometry; if(wrapdateline) { @@ -168,21 +169,4 @@ private static Coordinate[] shift(double shift, Coordinate...coordinates) { } return coordinates; } - - @Override - public int hashCode() { - return Objects.hash(coordinates); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - LineStringBuilder other = (LineStringBuilder) obj; - return Objects.equals(coordinates, other.coordinates); - } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java index 04e25862c8b83..1a4f71da2d494 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java @@ -19,6 +19,8 @@ package org.elasticsearch.common.geo.builders; +import org.elasticsearch.common.geo.GeoShapeType; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.locationtech.spatial4j.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; @@ -27,21 +29,19 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.Objects; -public class MultiLineStringBuilder extends ShapeBuilder { +public class MultiLineStringBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.MULTILINESTRING; private final ArrayList lines = new ArrayList<>(); - public MultiLineStringBuilder() { - } - /** * Read from a stream. */ @@ -52,6 +52,10 @@ public MultiLineStringBuilder(StreamInput in) throws IOException { } } + public MultiLineStringBuilder() { + super(); + } + @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(lines.size()); @@ -81,8 +85,8 @@ public GeoShapeType type() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapeName()); - builder.field(FIELD_COORDINATES); + builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName()); + builder.field(ShapeParser.FIELD_COORDINATES.getPreferredName()); builder.startArray(); for(LineStringBuilder line : lines) { line.coordinatesToXcontent(builder, false); @@ -93,7 +97,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } @Override - public Shape build() { + public JtsGeometry build() { final Geometry geometry; if(wrapdateline) { ArrayList parts = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java index f8a0624436232..ae38126f87bac 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPointBuilder.java @@ -21,7 +21,9 @@ import com.vividsolutions.jts.geom.Coordinate; +import org.elasticsearch.common.geo.GeoShapeType; import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.locationtech.spatial4j.shape.Point; @@ -32,7 +34,7 @@ import java.util.List; import java.util.Objects; -public class MultiPointBuilder extends CoordinateCollection { +public class MultiPointBuilder extends ShapeBuilder, MultiPointBuilder> { public static final GeoShapeType TYPE = GeoShapeType.MULTIPOINT; @@ -54,15 +56,15 @@ public MultiPointBuilder(StreamInput in) throws IOException { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapeName()); - builder.field(FIELD_COORDINATES); + builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName()); + builder.field(ShapeParser.FIELD_COORDINATES.getPreferredName()); super.coordinatesToXcontent(builder, false); builder.endObject(); return builder; } @Override - public Shape build() { + public XShapeCollection build() { //Could wrap JtsGeometry but probably slower due to conversions to/from JTS in relate() //MultiPoint geometry = FACTORY.createMultiPoint(points.toArray(new Coordinate[points.size()])); List shapes = new ArrayList<>(coordinates.size()); @@ -78,21 +80,4 @@ public Shape build() { public GeoShapeType type() { return TYPE; } - - @Override - public int hashCode() { - return Objects.hash(coordinates); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - MultiPointBuilder other = (MultiPointBuilder) obj; - return Objects.equals(coordinates, other.coordinates); - } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index f5e5bca5051c6..3c002631b8d17 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -19,6 +19,8 @@ package org.elasticsearch.common.geo.builders; +import org.elasticsearch.common.geo.GeoShapeType; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.locationtech.spatial4j.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; @@ -102,9 +104,9 @@ public List polygons() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapeName()); - builder.field(FIELD_ORIENTATION, orientation.name().toLowerCase(Locale.ROOT)); - builder.startArray(FIELD_COORDINATES); + builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName()); + builder.field(ShapeParser.FIELD_ORIENTATION.getPreferredName(), orientation.name().toLowerCase(Locale.ROOT)); + builder.startArray(ShapeParser.FIELD_COORDINATES.getPreferredName()); for(PolygonBuilder polygon : polygons) { builder.startArray(); polygon.coordinatesArray(builder, params); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java index fdd9826410a05..029ac14955a3a 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PointBuilder.java @@ -19,86 +19,78 @@ package org.elasticsearch.common.geo.builders; +import org.elasticsearch.common.geo.GeoShapeType; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.locationtech.spatial4j.shape.Point; import com.vividsolutions.jts.geom.Coordinate; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import java.io.IOException; -import java.util.Objects; +import java.util.ArrayList; -public class PointBuilder extends ShapeBuilder { +public class PointBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POINT; - private Coordinate coordinate; - /** * Create a point at [0.0,0.0] */ public PointBuilder() { - this.coordinate = ZERO_ZERO; + super(); + this.coordinates.add(ZERO_ZERO); } - /** - * Read from a stream. - */ - public PointBuilder(StreamInput in) throws IOException { - coordinate = readFromStream(in); + public PointBuilder(double lon, double lat) { + //super(new ArrayList<>(1)); + super(); + this.coordinates.add(new Coordinate(lon, lat)); } - @Override - public void writeTo(StreamOutput out) throws IOException { - writeCoordinateTo(coordinate, out); + public PointBuilder(StreamInput in) throws IOException { + super(in); } public PointBuilder coordinate(Coordinate coordinate) { - this.coordinate = coordinate; + this.coordinates.set(0, coordinate); return this; } public double longitude() { - return coordinate.x; + return coordinates.get(0).x; } public double latitude() { - return coordinate.y; + return coordinates.get(0).y; + } + + /** + * Create a new point + * + * @param longitude longitude of the point + * @param latitude latitude of the point + * @return a new {@link PointBuilder} + */ + public static PointBuilder newPoint(double longitude, double latitude) { + return new PointBuilder().coordinate(new Coordinate(longitude, latitude)); } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapeName()); - builder.field(FIELD_COORDINATES); - toXContent(builder, coordinate); + builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName()); + builder.field(ShapeParser.FIELD_COORDINATES.getPreferredName()); + toXContent(builder, coordinates.get(0)); return builder.endObject(); } @Override public Point build() { - return SPATIAL_CONTEXT.makePoint(coordinate.x, coordinate.y); + return SPATIAL_CONTEXT.makePoint(coordinates.get(0).x, coordinates.get(0).y); } @Override public GeoShapeType type() { return TYPE; } - - @Override - public int hashCode() { - return Objects.hash(coordinate); - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null || getClass() != obj.getClass()) { - return false; - } - PointBuilder other = (PointBuilder) obj; - return Objects.equals(coordinate, other.coordinate); - } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index f88a246dd0b84..919aae37c7329 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -26,12 +26,15 @@ import com.vividsolutions.jts.geom.MultiPolygon; import com.vividsolutions.jts.geom.Polygon; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.geo.GeoShapeType; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.XContentBuilder; import org.locationtech.spatial4j.exception.InvalidShapeException; import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; import java.io.IOException; import java.util.ArrayList; @@ -49,7 +52,7 @@ * Methods to wrap polygons at the dateline and building shapes from the data held by the * builder. */ -public class PolygonBuilder extends ShapeBuilder { +public class PolygonBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.POLYGON; @@ -222,7 +225,7 @@ public Coordinate[][][] coordinates() { } @Override - public Shape build() { + public JtsGeometry build() { return jtsGeometry(buildGeometry(FACTORY, wrapdateline)); } @@ -237,9 +240,9 @@ protected XContentBuilder coordinatesArray(XContentBuilder builder, Params param @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.field(FIELD_TYPE, TYPE.shapeName()); - builder.field(FIELD_ORIENTATION, orientation.name().toLowerCase(Locale.ROOT)); - builder.startArray(FIELD_COORDINATES); + builder.field(ShapeParser.FIELD_TYPE.getPreferredName(), TYPE.shapeName()); + builder.field(ShapeParser.FIELD_ORIENTATION.getPreferredName(), orientation.name().toLowerCase(Locale.ROOT)); + builder.startArray(ShapeParser.FIELD_COORDINATES.getPreferredName()); coordinatesArray(builder, params); builder.endArray(); builder.endObject(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index ea3c001949a83..ef50a667faa20 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -25,18 +25,14 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.Assertions; -import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.geo.GeoShapeType; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.ESLoggerFactory; -import org.elasticsearch.common.unit.DistanceUnit.Distance; import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.locationtech.spatial4j.context.jts.JtsSpatialContext; import org.locationtech.spatial4j.exception.InvalidShapeException; import org.locationtech.spatial4j.shape.Shape; @@ -45,14 +41,16 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Comparator; import java.util.List; import java.util.Locale; +import java.util.Objects; /** * Basic class for building GeoJSON shapes like Polygons, Linestrings, etc */ -public abstract class ShapeBuilder implements NamedWriteable, ToXContentObject { +public abstract class ShapeBuilder> implements NamedWriteable, ToXContentObject { protected static final Logger LOGGER = ESLoggerFactory.getLogger(ShapeBuilder.class.getName()); @@ -63,6 +61,8 @@ public abstract class ShapeBuilder implements NamedWriteable, ToXContentObject { DEBUG = Assertions.ENABLED; } + protected final List coordinates; + public static final double DATELINE = 180; /** @@ -85,104 +85,123 @@ public abstract class ShapeBuilder implements NamedWriteable, ToXContentObject { /** @see org.locationtech.spatial4j.shape.jts.JtsGeometry#index() */ protected static final boolean AUTO_INDEX_JTS_GEOMETRY = true;//may want to turn off once SpatialStrategy impls do it. + /** default ctor */ protected ShapeBuilder() { + coordinates = new ArrayList<>(); } - protected JtsGeometry jtsGeometry(Geometry geom) { - //dateline180Check is false because ElasticSearch does it's own dateline wrapping - JtsGeometry jtsGeometry = new JtsGeometry(geom, SPATIAL_CONTEXT, false, MULTI_POLYGON_MAY_OVERLAP); - if (AUTO_VALIDATE_JTS_GEOMETRY) - jtsGeometry.validate(); - if (AUTO_INDEX_JTS_GEOMETRY) - jtsGeometry.index(); - return jtsGeometry; + /** ctor from list of coordinates */ + protected ShapeBuilder(List coordinates) { + if (coordinates == null || coordinates.size() == 0) { + throw new IllegalArgumentException("cannot create point collection with empty set of points"); + } + this.coordinates = coordinates; } - /** - * Create a new Shape from this builder. Since calling this method could change the - * defined shape. (by inserting new coordinates or change the position of points) - * the builder looses its validity. So this method should only be called once on a builder - * @return new {@link Shape} defined by the builder - */ - public abstract Shape build(); - - /** - * Recursive method which parses the arrays of coordinates used to define - * Shapes - * - * @param parser - * Parser that will be read from - * @return CoordinateNode representing the start of the coordinate tree - * @throws IOException - * Thrown if an error occurs while reading from the - * XContentParser - */ - private static CoordinateNode parseCoordinates(XContentParser parser) throws IOException { - XContentParser.Token token = parser.nextToken(); - - // Base cases - if (token != XContentParser.Token.START_ARRAY && - token != XContentParser.Token.END_ARRAY && - token != XContentParser.Token.VALUE_NULL) { - double lon = parser.doubleValue(); - token = parser.nextToken(); - double lat = parser.doubleValue(); - token = parser.nextToken(); - while (token == XContentParser.Token.VALUE_NUMBER) { - token = parser.nextToken(); - } - return new CoordinateNode(new Coordinate(lon, lat)); - } else if (token == XContentParser.Token.VALUE_NULL) { - throw new IllegalArgumentException("coordinates cannot contain NULL values)"); + /** ctor from serialized stream input */ + protected ShapeBuilder(StreamInput in) throws IOException { + int size = in.readVInt(); + coordinates = new ArrayList<>(size); + for (int i=0; i < size; i++) { + coordinates.add(readFromStream(in)); } + } - List nodes = new ArrayList<>(); - while (token != XContentParser.Token.END_ARRAY) { - nodes.add(parseCoordinates(parser)); - token = parser.nextToken(); + protected static Coordinate readFromStream(StreamInput in) throws IOException { + return new Coordinate(in.readDouble(), in.readDouble()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(coordinates.size()); + for (Coordinate point : coordinates) { + writeCoordinateTo(point, out); } + } + + protected static void writeCoordinateTo(Coordinate coordinate, StreamOutput out) throws IOException { + out.writeDouble(coordinate.x); + out.writeDouble(coordinate.y); + } - return new CoordinateNode(nodes); + @SuppressWarnings("unchecked") + private E thisRef() { + return (E)this; } /** - * Create a new {@link ShapeBuilder} from {@link XContent} - * @param parser parser to read the GeoShape from - * @return {@link ShapeBuilder} read from the parser or null - * if the parsers current token has been null - * @throws IOException if the input could not be read + * Add a new coordinate to the collection + * @param longitude longitude of the coordinate + * @param latitude latitude of the coordinate + * @return this */ - public static ShapeBuilder parse(XContentParser parser) throws IOException { - return GeoShapeType.parse(parser, null); + public E coordinate(double longitude, double latitude) { + return this.coordinate(new Coordinate(longitude, latitude)); } /** - * Create a new {@link ShapeBuilder} from {@link XContent} - * @param parser parser to read the GeoShape from - * @param geoDocMapper document field mapper reference required for spatial parameters relevant - * to the shape construction process (e.g., orientation) - * todo: refactor to place build specific parameters in the SpatialContext - * @return {@link ShapeBuilder} read from the parser or null - * if the parsers current token has been null - * @throws IOException if the input could not be read + * Add a new coordinate to the collection + * @param coordinate coordinate of the point + * @return this */ - public static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper geoDocMapper) throws IOException { - return GeoShapeType.parse(parser, geoDocMapper); + public E coordinate(Coordinate coordinate) { + this.coordinates.add(coordinate); + return thisRef(); } - protected static XContentBuilder toXContent(XContentBuilder builder, Coordinate coordinate) throws IOException { - return builder.startArray().value(coordinate.x).value(coordinate.y).endArray(); + /** + * Add a array of coordinates to the collection + * + * @param coordinates array of {@link Coordinate}s to add + * @return this + */ + public E coordinates(Coordinate...coordinates) { + return this.coordinates(Arrays.asList(coordinates)); } - protected static void writeCoordinateTo(Coordinate coordinate, StreamOutput out) throws IOException { - out.writeDouble(coordinate.x); - out.writeDouble(coordinate.y); + /** + * Add a collection of coordinates to the collection + * + * @param coordinates array of {@link Coordinate}s to add + * @return this + */ + public E coordinates(Collection coordinates) { + this.coordinates.addAll(coordinates); + return thisRef(); } - protected static Coordinate readFromStream(StreamInput in) throws IOException { - return new Coordinate(in.readDouble(), in.readDouble()); + /** + * Copy all coordinate to a new Array + * + * @param closed if set to true the first point of the array is repeated as last element + * @return Array of coordinates + */ + protected Coordinate[] coordinates(boolean closed) { + Coordinate[] result = coordinates.toArray(new Coordinate[coordinates.size() + (closed?1:0)]); + if(closed) { + result[result.length-1] = result[0]; + } + return result; + } + + protected JtsGeometry jtsGeometry(Geometry geom) { + //dateline180Check is false because ElasticSearch does it's own dateline wrapping + JtsGeometry jtsGeometry = new JtsGeometry(geom, SPATIAL_CONTEXT, false, MULTI_POLYGON_MAY_OVERLAP); + if (AUTO_VALIDATE_JTS_GEOMETRY) + jtsGeometry.validate(); + if (AUTO_INDEX_JTS_GEOMETRY) + jtsGeometry.index(); + return jtsGeometry; } + /** + * Create a new Shape from this builder. Since calling this method could change the + * defined shape. (by inserting new coordinates or change the position of points) + * the builder looses its validity. So this method should only be called once on a builder + * @return new {@link Shape} defined by the builder + */ + public abstract T build(); + protected static Coordinate shift(Coordinate coordinate, double dateline) { if (dateline == 0) { return coordinate; @@ -255,58 +274,6 @@ protected static int intersections(double dateline, Edge[] edges) { return numIntersections; } - /** - * Node used to represent a tree of coordinates. - *

- * Can either be a leaf node consisting of a Coordinate, or a parent with - * children - */ - protected static class CoordinateNode implements ToXContentObject { - - protected final Coordinate coordinate; - protected final List children; - - /** - * Creates a new leaf CoordinateNode - * - * @param coordinate - * Coordinate for the Node - */ - protected CoordinateNode(Coordinate coordinate) { - this.coordinate = coordinate; - this.children = null; - } - - /** - * Creates a new parent CoordinateNode - * - * @param children - * Children of the Node - */ - protected CoordinateNode(List children) { - this.children = children; - this.coordinate = null; - } - - protected boolean isEmpty() { - return (coordinate == null && (children == null || children.isEmpty())); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (children == null) { - builder.startArray().value(coordinate.x).value(coordinate.y).endArray(); - } else { - builder.startArray(); - for (CoordinateNode child : children) { - child.toXContent(builder, params); - } - builder.endArray(); - } - return builder; - } - } - /** * This helper class implements a linked list for {@link Coordinate}. It contains * fields for a dateline intersection and component id @@ -415,293 +382,50 @@ public static Orientation fromString(String orientation) { } } - public static final String FIELD_TYPE = "type"; - public static final String FIELD_COORDINATES = "coordinates"; - public static final String FIELD_GEOMETRIES = "geometries"; - public static final String FIELD_ORIENTATION = "orientation"; - protected static final boolean debugEnabled() { return LOGGER.isDebugEnabled() || DEBUG; } + protected static XContentBuilder toXContent(XContentBuilder builder, Coordinate coordinate) throws IOException { + return builder.startArray().value(coordinate.x).value(coordinate.y).endArray(); + } + /** - * Enumeration that lists all {@link GeoShapeType}s that can be handled + * builds an array of coordinates to a {@link XContentBuilder} + * + * @param builder builder to use + * @param closed repeat the first point at the end of the array if it's not already defines as last element of the array + * @return the builder */ - public enum GeoShapeType { - POINT("point"), - MULTIPOINT("multipoint"), - LINESTRING("linestring"), - MULTILINESTRING("multilinestring"), - POLYGON("polygon"), - MULTIPOLYGON("multipolygon"), - GEOMETRYCOLLECTION("geometrycollection"), - ENVELOPE("envelope"), - CIRCLE("circle"); - - private final String shapename; - - GeoShapeType(String shapename) { - this.shapename = shapename; - } - - protected String shapeName() { - return shapename; - } - - public static GeoShapeType forName(String geoshapename) { - String typename = geoshapename.toLowerCase(Locale.ROOT); - for (GeoShapeType type : values()) { - if(type.shapename.equals(typename)) { - return type; - } - } - throw new IllegalArgumentException("unknown geo_shape ["+geoshapename+"]"); - } - - public static ShapeBuilder parse(XContentParser parser) throws IOException { - return parse(parser, null); - } - - /** - * Parse the geometry specified by the source document and return a ShapeBuilder instance used to - * build the actual geometry - * @param parser - parse utility object including source document - * @param shapeMapper - field mapper needed for index specific parameters - * @return ShapeBuilder - a builder instance used to create the geometry - */ - public static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper shapeMapper) throws IOException { - if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { - return null; - } else if (parser.currentToken() != XContentParser.Token.START_OBJECT) { - throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates"); - } - - GeoShapeType shapeType = null; - Distance radius = null; - CoordinateNode node = null; - GeometryCollectionBuilder geometryCollections = null; - - Orientation requestedOrientation = (shapeMapper == null) ? Orientation.RIGHT : shapeMapper.fieldType().orientation(); - boolean coerce = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.COERCE.value() : shapeMapper.coerce().value(); - - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - String fieldName = parser.currentName(); - - if (FIELD_TYPE.equals(fieldName)) { - parser.nextToken(); - shapeType = GeoShapeType.forName(parser.text()); - } else if (FIELD_COORDINATES.equals(fieldName)) { - parser.nextToken(); - node = parseCoordinates(parser); - } else if (FIELD_GEOMETRIES.equals(fieldName)) { - parser.nextToken(); - geometryCollections = parseGeometries(parser, shapeMapper); - } else if (CircleBuilder.FIELD_RADIUS.equals(fieldName)) { - parser.nextToken(); - radius = Distance.parseDistance(parser.text()); - } else if (FIELD_ORIENTATION.equals(fieldName)) { - parser.nextToken(); - requestedOrientation = Orientation.fromString(parser.text()); - } else { - parser.nextToken(); - parser.skipChildren(); - } - } - } - - if (shapeType == null) { - throw new ElasticsearchParseException("shape type not included"); - } else if (node == null && GeoShapeType.GEOMETRYCOLLECTION != shapeType) { - throw new ElasticsearchParseException("coordinates not included"); - } else if (geometryCollections == null && GeoShapeType.GEOMETRYCOLLECTION == shapeType) { - throw new ElasticsearchParseException("geometries not included"); - } else if (radius != null && GeoShapeType.CIRCLE != shapeType) { - throw new ElasticsearchParseException("field [{}] is supported for [{}] only", CircleBuilder.FIELD_RADIUS, - CircleBuilder.TYPE); - } - - switch (shapeType) { - case POINT: return parsePoint(node); - case MULTIPOINT: return parseMultiPoint(node); - case LINESTRING: return parseLineString(node); - case MULTILINESTRING: return parseMultiLine(node); - case POLYGON: return parsePolygon(node, requestedOrientation, coerce); - case MULTIPOLYGON: return parseMultiPolygon(node, requestedOrientation, coerce); - case CIRCLE: return parseCircle(node, radius); - case ENVELOPE: return parseEnvelope(node); - case GEOMETRYCOLLECTION: return geometryCollections; - default: - throw new ElasticsearchParseException("shape type [{}] not included", shapeType); - } - } - - protected static void validatePointNode(CoordinateNode node) { - if (node.isEmpty()) { - throw new ElasticsearchParseException( - "invalid number of points (0) provided when expecting a single coordinate ([lat, lng])"); - } else if (node.coordinate == null) { - if (node.children.isEmpty() == false) { - throw new ElasticsearchParseException("multipoint data provided when single point data expected."); - } - } - } - - protected static PointBuilder parsePoint(CoordinateNode node) { - validatePointNode(node); - return ShapeBuilders.newPoint(node.coordinate); - } - - protected static CircleBuilder parseCircle(CoordinateNode coordinates, Distance radius) { - return ShapeBuilders.newCircleBuilder().center(coordinates.coordinate).radius(radius); - } - - protected static EnvelopeBuilder parseEnvelope(CoordinateNode coordinates) { - // validate the coordinate array for envelope type - if (coordinates.children.size() != 2) { - throw new ElasticsearchParseException( - "invalid number of points [{}] provided for geo_shape [{}] when expecting an array of 2 coordinates", - coordinates.children.size(), GeoShapeType.ENVELOPE.shapename); - } - // verify coordinate bounds, correct if necessary - Coordinate uL = coordinates.children.get(0).coordinate; - Coordinate lR = coordinates.children.get(1).coordinate; - if (((lR.x < uL.x) || (uL.y < lR.y))) { - Coordinate uLtmp = uL; - uL = new Coordinate(Math.min(uL.x, lR.x), Math.max(uL.y, lR.y)); - lR = new Coordinate(Math.max(uLtmp.x, lR.x), Math.min(uLtmp.y, lR.y)); + protected XContentBuilder coordinatesToXcontent(XContentBuilder builder, boolean closed) throws IOException { + builder.startArray(); + for(Coordinate coord : coordinates) { + toXContent(builder, coord); + } + if(closed) { + Coordinate start = coordinates.get(0); + Coordinate end = coordinates.get(coordinates.size()-1); + if(start.x != end.x || start.y != end.y) { + toXContent(builder, coordinates.get(0)); } - return ShapeBuilders.newEnvelope(uL, lR); - } - - protected static void validateMultiPointNode(CoordinateNode coordinates) { - if (coordinates.children == null || coordinates.children.isEmpty()) { - if (coordinates.coordinate != null) { - throw new ElasticsearchParseException("single coordinate found when expecting an array of " + - "coordinates. change type to point or change data to an array of >0 coordinates"); - } - throw new ElasticsearchParseException("no data provided for multipoint object when expecting " + - ">0 points (e.g., [[lat, lng]] or [[lat, lng], ...])"); - } else { - for (CoordinateNode point : coordinates.children) { - validatePointNode(point); - } - } - } - - protected static MultiPointBuilder parseMultiPoint(CoordinateNode coordinates) { - validateMultiPointNode(coordinates); - CoordinatesBuilder points = new CoordinatesBuilder(); - for (CoordinateNode node : coordinates.children) { - points.coordinate(node.coordinate); - } - return new MultiPointBuilder(points.build()); - } - - protected static LineStringBuilder parseLineString(CoordinateNode coordinates) { - /** - * Per GeoJSON spec (http://geojson.org/geojson-spec.html#linestring) - * "coordinates" member must be an array of two or more positions - * LineStringBuilder should throw a graceful exception if < 2 coordinates/points are provided - */ - if (coordinates.children.size() < 2) { - throw new ElasticsearchParseException("invalid number of points in LineString (found [{}] - must be >= 2)", - coordinates.children.size()); - } - - CoordinatesBuilder line = new CoordinatesBuilder(); - for (CoordinateNode node : coordinates.children) { - line.coordinate(node.coordinate); - } - return ShapeBuilders.newLineString(line); - } - - protected static MultiLineStringBuilder parseMultiLine(CoordinateNode coordinates) { - MultiLineStringBuilder multiline = ShapeBuilders.newMultiLinestring(); - for (CoordinateNode node : coordinates.children) { - multiline.linestring(parseLineString(node)); - } - return multiline; - } - - protected static LineStringBuilder parseLinearRing(CoordinateNode coordinates, boolean coerce) { - /** - * Per GeoJSON spec (http://geojson.org/geojson-spec.html#linestring) - * A LinearRing is closed LineString with 4 or more positions. The first and last positions - * are equivalent (they represent equivalent points). Though a LinearRing is not explicitly - * represented as a GeoJSON geometry type, it is referred to in the Polygon geometry type definition. - */ - if (coordinates.children == null) { - String error = "Invalid LinearRing found."; - error += (coordinates.coordinate == null) ? - " No coordinate array provided" : " Found a single coordinate when expecting a coordinate array"; - throw new ElasticsearchParseException(error); - } - - int numValidPts = coerce ? 3 : 4; - if (coordinates.children.size() < numValidPts) { - throw new ElasticsearchParseException("invalid number of points in LinearRing (found [{}] - must be >= [{}])", - coordinates.children.size(), numValidPts); - } - - if (!coordinates.children.get(0).coordinate.equals( - coordinates.children.get(coordinates.children.size() - 1).coordinate)) { - if (coerce) { - coordinates.children.add(coordinates.children.get(0)); - } else { - throw new ElasticsearchParseException("invalid LinearRing found (coordinates are not closed)"); - } - } - return parseLineString(coordinates); - } - - protected static PolygonBuilder parsePolygon(CoordinateNode coordinates, final Orientation orientation, final boolean coerce) { - if (coordinates.children == null || coordinates.children.isEmpty()) { - throw new ElasticsearchParseException( - "invalid LinearRing provided for type polygon. Linear ring must be an array of coordinates"); - } - - LineStringBuilder shell = parseLinearRing(coordinates.children.get(0), coerce); - PolygonBuilder polygon = new PolygonBuilder(shell, orientation); - for (int i = 1; i < coordinates.children.size(); i++) { - polygon.hole(parseLinearRing(coordinates.children.get(i), coerce)); - } - return polygon; } + builder.endArray(); + return builder; + } - protected static MultiPolygonBuilder parseMultiPolygon(CoordinateNode coordinates, final Orientation orientation, - final boolean coerce) { - MultiPolygonBuilder polygons = ShapeBuilders.newMultiPolygon(orientation); - for (CoordinateNode node : coordinates.children) { - polygons.polygon(parsePolygon(node, orientation, coerce)); - } - return polygons; - } + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof ShapeBuilder)) return false; - /** - * Parse the geometries array of a GeometryCollection - * - * @param parser Parser that will be read from - * @return Geometry[] geometries of the GeometryCollection - * @throws IOException Thrown if an error occurs while reading from the XContentParser - */ - protected static GeometryCollectionBuilder parseGeometries(XContentParser parser, GeoShapeFieldMapper mapper) throws - IOException { - if (parser.currentToken() != XContentParser.Token.START_ARRAY) { - throw new ElasticsearchParseException("geometries must be an array of geojson objects"); - } + ShapeBuilder that = (ShapeBuilder) o; - XContentParser.Token token = parser.nextToken(); - GeometryCollectionBuilder geometryCollection = ShapeBuilders.newGeometryCollection(); - while (token != XContentParser.Token.END_ARRAY) { - ShapeBuilder shapeBuilder = GeoShapeType.parse(parser); - geometryCollection.shape(shapeBuilder); - token = parser.nextToken(); - } + return Objects.equals(coordinates, that.coordinates); + } - return geometryCollection; - } + @Override + public int hashCode() { + return Objects.hash(coordinates); } @Override diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilders.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilders.java deleted file mode 100644 index e0afa4c20d50c..0000000000000 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilders.java +++ /dev/null @@ -1,153 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.common.geo.builders; - -import java.util.List; - -import com.vividsolutions.jts.geom.Coordinate; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; - -/** - * A collection of static methods for creating ShapeBuilders. - */ -public class ShapeBuilders { - - /** - * Create a new point - * - * @param longitude longitude of the point - * @param latitude latitude of the point - * @return a new {@link PointBuilder} - */ - public static PointBuilder newPoint(double longitude, double latitude) { - return ShapeBuilders.newPoint(new Coordinate(longitude, latitude)); - } - - /** - * Create a new {@link PointBuilder} from a {@link Coordinate} - * @param coordinate coordinate defining the position of the point - * @return a new {@link PointBuilder} - */ - public static PointBuilder newPoint(Coordinate coordinate) { - return new PointBuilder().coordinate(coordinate); - } - - /** - * Create a new set of points - * @return new {@link MultiPointBuilder} - */ - public static MultiPointBuilder newMultiPoint(List points) { - return new MultiPointBuilder(points); - } - - /** - * Create a new lineString - * @return a new {@link LineStringBuilder} - */ - public static LineStringBuilder newLineString(List list) { - return new LineStringBuilder(list); - } - - /** - * Create a new lineString - * @return a new {@link LineStringBuilder} - */ - public static LineStringBuilder newLineString(CoordinatesBuilder coordinates) { - return new LineStringBuilder(coordinates); - } - - /** - * Create a new Collection of lineStrings - * @return a new {@link MultiLineStringBuilder} - */ - public static MultiLineStringBuilder newMultiLinestring() { - return new MultiLineStringBuilder(); - } - - /** - * Create a new PolygonBuilder - * @return a new {@link PolygonBuilder} - */ - public static PolygonBuilder newPolygon(List shell) { - return new PolygonBuilder(new CoordinatesBuilder().coordinates(shell)); - } - - /** - * Create a new PolygonBuilder - * @return a new {@link PolygonBuilder} - */ - public static PolygonBuilder newPolygon(CoordinatesBuilder shell) { - return new PolygonBuilder(shell); - } - - /** - * Create a new Collection of polygons - * @return a new {@link MultiPolygonBuilder} - */ - public static MultiPolygonBuilder newMultiPolygon() { - return new MultiPolygonBuilder(); - } - - /** - * Create a new Collection of polygons - * @return a new {@link MultiPolygonBuilder} - */ - public static MultiPolygonBuilder newMultiPolygon(ShapeBuilder.Orientation orientation) { - return new MultiPolygonBuilder(orientation); - } - - /** - * Create a new GeometryCollection - * @return a new {@link GeometryCollectionBuilder} - */ - public static GeometryCollectionBuilder newGeometryCollection() { - return new GeometryCollectionBuilder(); - } - - /** - * create a new Circle - * - * @return a new {@link CircleBuilder} - */ - public static CircleBuilder newCircleBuilder() { - return new CircleBuilder(); - } - - /** - * create a new rectangle - * - * @return a new {@link EnvelopeBuilder} - */ - public static EnvelopeBuilder newEnvelope(Coordinate topLeft, Coordinate bottomRight) { - return new EnvelopeBuilder(topLeft, bottomRight); - } - - public static void register(List namedWriteables) { - namedWriteables.add(new Entry(ShapeBuilder.class, PointBuilder.TYPE.shapeName(), PointBuilder::new)); - namedWriteables.add(new Entry(ShapeBuilder.class, CircleBuilder.TYPE.shapeName(), CircleBuilder::new)); - namedWriteables.add(new Entry(ShapeBuilder.class, EnvelopeBuilder.TYPE.shapeName(), EnvelopeBuilder::new)); - namedWriteables.add(new Entry(ShapeBuilder.class, MultiPointBuilder.TYPE.shapeName(), MultiPointBuilder::new)); - namedWriteables.add(new Entry(ShapeBuilder.class, LineStringBuilder.TYPE.shapeName(), LineStringBuilder::new)); - namedWriteables.add(new Entry(ShapeBuilder.class, MultiLineStringBuilder.TYPE.shapeName(), MultiLineStringBuilder::new)); - namedWriteables.add(new Entry(ShapeBuilder.class, PolygonBuilder.TYPE.shapeName(), PolygonBuilder::new)); - namedWriteables.add(new Entry(ShapeBuilder.class, MultiPolygonBuilder.TYPE.shapeName(), MultiPolygonBuilder::new)); - namedWriteables.add(new Entry(ShapeBuilder.class, GeometryCollectionBuilder.TYPE.shapeName(), GeometryCollectionBuilder::new)); - } -} diff --git a/core/src/main/java/org/elasticsearch/common/geo/parsers/CoordinateNode.java b/core/src/main/java/org/elasticsearch/common/geo/parsers/CoordinateNode.java new file mode 100644 index 0000000000000..d766d75d5ec1a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/geo/parsers/CoordinateNode.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.geo.parsers; + +import com.vividsolutions.jts.geom.Coordinate; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; +import java.util.List; + +/** + * Node used to represent a tree of coordinates. + *

+ * Can either be a leaf node consisting of a Coordinate, or a parent with + * children + */ +public class CoordinateNode implements ToXContentObject { + public final Coordinate coordinate; + public final List children; + + /** + * Creates a new leaf CoordinateNode + * + * @param coordinate + * Coordinate for the Node + */ + protected CoordinateNode(Coordinate coordinate) { + this.coordinate = coordinate; + this.children = null; + } + + /** + * Creates a new parent CoordinateNode + * + * @param children + * Children of the Node + */ + protected CoordinateNode(List children) { + this.children = children; + this.coordinate = null; + } + + public boolean isEmpty() { + return (coordinate == null && (children == null || children.isEmpty())); + } + + public boolean isMultiPoint() { + return children != null && children.size() > 1; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (children == null) { + builder.startArray().value(coordinate.x).value(coordinate.y).endArray(); + } else { + builder.startArray(); + for (CoordinateNode child : children) { + child.toXContent(builder, params); + } + builder.endArray(); + } + return builder; + } +} diff --git a/core/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java b/core/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java new file mode 100644 index 0000000000000..90145448be326 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java @@ -0,0 +1,194 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.geo.parsers; + +import com.vividsolutions.jts.geom.Coordinate; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Explicit; +import org.elasticsearch.common.geo.GeoShapeType; +import org.elasticsearch.common.geo.builders.CircleBuilder; +import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder; +import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.GeoShapeFieldMapper; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * Parses shape geometry represented in geojson + * + * complies with geojson specification: https://tools.ietf.org/html/rfc7946 + */ +abstract class GeoJsonParser { + protected static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper shapeMapper) + throws IOException { + GeoShapeType shapeType = null; + DistanceUnit.Distance radius = null; + CoordinateNode coordinateNode = null; + GeometryCollectionBuilder geometryCollections = null; + + ShapeBuilder.Orientation requestedOrientation = + (shapeMapper == null) ? ShapeBuilder.Orientation.RIGHT : shapeMapper.fieldType().orientation(); + Explicit coerce = (shapeMapper == null) ? GeoShapeFieldMapper.Defaults.COERCE : shapeMapper.coerce(); + + String malformedException = null; + + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + String fieldName = parser.currentName(); + + if (ShapeParser.FIELD_TYPE.match(fieldName)) { + parser.nextToken(); + final GeoShapeType type = GeoShapeType.forName(parser.text()); + if (shapeType != null && shapeType.equals(type) == false) { + malformedException = ShapeParser.FIELD_TYPE + " already parsed as [" + + shapeType + "] cannot redefine as [" + type + "]"; + } else { + shapeType = type; + } + } else if (ShapeParser.FIELD_COORDINATES.match(fieldName)) { + parser.nextToken(); + coordinateNode = parseCoordinates(parser); + } else if (ShapeParser.FIELD_GEOMETRIES.match(fieldName)) { + if (shapeType == null) { + shapeType = GeoShapeType.GEOMETRYCOLLECTION; + } else if (shapeType.equals(GeoShapeType.GEOMETRYCOLLECTION) == false) { + malformedException = "cannot have [" + ShapeParser.FIELD_GEOMETRIES + "] with type set to [" + + shapeType + "]"; + } + parser.nextToken(); + geometryCollections = parseGeometries(parser, shapeMapper); + } else if (CircleBuilder.FIELD_RADIUS.match(fieldName)) { + if (shapeType == null) { + shapeType = GeoShapeType.CIRCLE; + } else if (shapeType != null && shapeType.equals(GeoShapeType.CIRCLE) == false) { + malformedException = "cannot have [" + CircleBuilder.FIELD_RADIUS + "] with type set to [" + + shapeType + "]"; + } + parser.nextToken(); + radius = DistanceUnit.Distance.parseDistance(parser.text()); + } else if (ShapeParser.FIELD_ORIENTATION.match(fieldName)) { + if (shapeType != null + && (shapeType.equals(GeoShapeType.POLYGON) || shapeType.equals(GeoShapeType.MULTIPOLYGON)) == false) { + malformedException = "cannot have [" + ShapeParser.FIELD_ORIENTATION + "] with type set to [" + shapeType + "]"; + } + parser.nextToken(); + requestedOrientation = ShapeBuilder.Orientation.fromString(parser.text()); + } else { + parser.nextToken(); + parser.skipChildren(); + } + } + } + + if (malformedException != null) { + throw new ElasticsearchParseException(malformedException); + } else if (shapeType == null) { + throw new ElasticsearchParseException("shape type not included"); + } else if (coordinateNode == null && GeoShapeType.GEOMETRYCOLLECTION != shapeType) { + throw new ElasticsearchParseException("coordinates not included"); + } else if (geometryCollections == null && GeoShapeType.GEOMETRYCOLLECTION == shapeType) { + throw new ElasticsearchParseException("geometries not included"); + } else if (radius != null && GeoShapeType.CIRCLE != shapeType) { + throw new ElasticsearchParseException("field [{}] is supported for [{}] only", CircleBuilder.FIELD_RADIUS, + CircleBuilder.TYPE); + } + + if (shapeType == null) { + throw new ElasticsearchParseException("shape type [{}] not included", shapeType); + } + + if (shapeType.equals(GeoShapeType.GEOMETRYCOLLECTION)) { + return geometryCollections; + } + + return shapeType.getBuilder(coordinateNode, radius, requestedOrientation, coerce.value()); + } + + /** + * Recursive method which parses the arrays of coordinates used to define + * Shapes + * + * @param parser + * Parser that will be read from + * @return CoordinateNode representing the start of the coordinate tree + * @throws IOException + * Thrown if an error occurs while reading from the + * XContentParser + */ + private static CoordinateNode parseCoordinates(XContentParser parser) throws IOException { + XContentParser.Token token = parser.nextToken(); + // Base cases + if (token != XContentParser.Token.START_ARRAY && + token != XContentParser.Token.END_ARRAY && + token != XContentParser.Token.VALUE_NULL) { + return new CoordinateNode(parseCoordinate(parser)); + } else if (token == XContentParser.Token.VALUE_NULL) { + throw new IllegalArgumentException("coordinates cannot contain NULL values)"); + } + + List nodes = new ArrayList<>(); + while (token != XContentParser.Token.END_ARRAY) { + nodes.add(parseCoordinates(parser)); + token = parser.nextToken(); + } + + return new CoordinateNode(nodes); + } + + private static Coordinate parseCoordinate(XContentParser parser) throws IOException { + double lon = parser.doubleValue(); + parser.nextToken(); + double lat = parser.doubleValue(); + XContentParser.Token token = parser.nextToken(); + while (token == XContentParser.Token.VALUE_NUMBER) { + token = parser.nextToken(); + } + // todo support z/alt + return new Coordinate(lon, lat); + } + + /** + * Parse the geometries array of a GeometryCollection + * + * @param parser Parser that will be read from + * @return Geometry[] geometries of the GeometryCollection + * @throws IOException Thrown if an error occurs while reading from the XContentParser + */ + static GeometryCollectionBuilder parseGeometries(XContentParser parser, GeoShapeFieldMapper mapper) throws + IOException { + if (parser.currentToken() != XContentParser.Token.START_ARRAY) { + throw new ElasticsearchParseException("geometries must be an array of geojson objects"); + } + + XContentParser.Token token = parser.nextToken(); + GeometryCollectionBuilder geometryCollection = new GeometryCollectionBuilder(); + while (token != XContentParser.Token.END_ARRAY) { + ShapeBuilder shapeBuilder = ShapeParser.parse(parser); + geometryCollection.shape(shapeBuilder); + token = parser.nextToken(); + } + + return geometryCollection; + } +} diff --git a/core/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java b/core/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java new file mode 100644 index 0000000000000..39540f902fedf --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java @@ -0,0 +1,68 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.geo.parsers; + +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.mapper.GeoShapeFieldMapper; + +import java.io.IOException; + +/** + * first point of entry for a shape parser + */ +public interface ShapeParser { + ParseField FIELD_TYPE = new ParseField("type"); + ParseField FIELD_COORDINATES = new ParseField("coordinates"); + ParseField FIELD_GEOMETRIES = new ParseField("geometries"); + ParseField FIELD_ORIENTATION = new ParseField("orientation"); + + /** + * Create a new {@link ShapeBuilder} from {@link XContent} + * @param parser parser to read the GeoShape from + * @param shapeMapper document field mapper reference required for spatial parameters relevant + * to the shape construction process (e.g., orientation) + * todo: refactor to place build specific parameters in the SpatialContext + * @return {@link ShapeBuilder} read from the parser or null + * if the parsers current token has been null + * @throws IOException if the input could not be read + */ + static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper shapeMapper) throws IOException { + if (parser.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + return GeoJsonParser.parse(parser, shapeMapper); + } + throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates"); + } + + /** + * Create a new {@link ShapeBuilder} from {@link XContent} + * @param parser parser to read the GeoShape from + * @return {@link ShapeBuilder} read from the parser or null + * if the parsers current token has been null + * @throws IOException if the input could not be read + */ + static ShapeBuilder parse(XContentParser parser) throws IOException { + return parse(parser, null); + } +} diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 61f32c67c20cb..38eaef1d14df9 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -264,20 +264,16 @@ public synchronized void addSettingsUpdateConsumer(Setting setting, Consu } /** - * Validates that all settings in the builder are registered and valid + * Validates that all given settings are registered and valid + * @param settings the settings to validate + * @param validateDependencies if true settings dependencies are validated as well. + * @see Setting#getSettingsDependencies(String) */ - public final void validate(Settings.Builder settingsBuilder) { - validate(settingsBuilder.build()); - } - - /** - * * Validates that all given settings are registered and valid - */ - public final void validate(Settings settings) { + public final void validate(Settings settings, boolean validateDependencies) { List exceptions = new ArrayList<>(); for (String key : settings.keySet()) { // settings iterate in deterministic fashion try { - validate(key, settings); + validate(key, settings, validateDependencies); } catch (RuntimeException ex) { exceptions.add(ex); } @@ -285,12 +281,11 @@ public final void validate(Settings settings) { ExceptionsHelper.rethrowAndSuppress(exceptions); } - /** * Validates that the setting is valid */ - public final void validate(String key, Settings settings) { - Setting setting = get(key); + void validate(String key, Settings settings, boolean validateDependencies) { + Setting setting = getRaw(key); if (setting == null) { LevensteinDistance ld = new LevensteinDistance(); List> scoredKeys = new ArrayList<>(); @@ -315,6 +310,20 @@ public final void validate(String key, Settings settings) { "settings"; } throw new IllegalArgumentException(msg); + } else { + Set settingsDependencies = setting.getSettingsDependencies(key); + if (setting.hasComplexMatcher()) { + setting = setting.getConcreteSetting(key); + } + if (validateDependencies && settingsDependencies.isEmpty() == false) { + Set settingKeys = settings.keySet(); + for (String requiredSetting : settingsDependencies) { + if (settingKeys.contains(requiredSetting) == false) { + throw new IllegalArgumentException("Missing required setting [" + + requiredSetting + "] for setting [" + setting.getKey() + "]"); + } + } + } } setting.get(settings); } @@ -375,7 +384,18 @@ default Runnable updater(Settings current, Settings previous) { /** * Returns the {@link Setting} for the given key or null if the setting can not be found. */ - public Setting get(String key) { + public final Setting get(String key) { + Setting raw = getRaw(key); + if (raw == null) { + return null; + } if (raw.hasComplexMatcher()) { + return raw.getConcreteSetting(key); + } else { + return raw; + } + } + + private Setting getRaw(String key) { Setting setting = keySettings.get(key); if (setting != null) { return setting; @@ -383,7 +403,8 @@ public Setting get(String key) { for (Map.Entry> entry : complexMatchers.entrySet()) { if (entry.getValue().match(key)) { assert assertMatcher(key, 1); - return entry.getValue().getConcreteSetting(key); + assert entry.getValue().hasComplexMatcher(); + return entry.getValue(); } } return null; @@ -513,7 +534,7 @@ private boolean updateSettings(Settings toApply, Settings.Builder target, Settin } else if (get(key) == null) { throw new IllegalArgumentException(type + " setting [" + key + "], not recognized"); } else if (isNull == false && canUpdate.test(key)) { - validate(key, toApply); + validate(key, toApply, false); // we might not have a full picture here do to a dependency validation settingsBuilder.copy(key, toApply); updates.copy(key, toApply); changed = true; @@ -654,7 +675,7 @@ public String setValue(String value) { * representation. Otherwise false */ // TODO this should be replaced by Setting.Property.HIDDEN or something like this. - protected boolean isPrivateSetting(String key) { + public boolean isPrivateSetting(String key) { return false; } } diff --git a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index 962e61b5c3c68..d40488eaa34f8 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -191,7 +191,7 @@ protected void validateSettingKey(Setting setting) { } @Override - protected boolean isPrivateSetting(String key) { + public boolean isPrivateSetting(String key) { switch (key) { case IndexMetaData.SETTING_CREATION_DATE: case IndexMetaData.SETTING_INDEX_UUID: diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java index 9b99e67c8c4da..abc589aedafc3 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -42,6 +42,7 @@ import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; +import java.util.HashSet; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.List; @@ -126,7 +127,7 @@ public enum Property { private static final EnumSet EMPTY_PROPERTIES = EnumSet.noneOf(Property.class); private Setting(Key key, @Nullable Setting fallbackSetting, Function defaultValue, Function parser, - Validator validator, Property... properties) { + Validator validator, Property... properties) { assert this instanceof SecureSetting || this.isGroupSetting() || parser.apply(defaultValue.apply(Settings.EMPTY)) != null : "parser returned null"; this.key = key; @@ -457,6 +458,14 @@ public Setting getConcreteSetting(String key) { return this; } + /** + * Returns a set of settings that are required at validation time. Unless all of the dependencies are present in the settings + * object validation of setting must fail. + */ + public Set getSettingsDependencies(String key) { + return Collections.emptySet(); + } + /** * Build a new updater with a noop validator. */ @@ -519,11 +528,13 @@ public String toString() { public static class AffixSetting extends Setting { private final AffixKey key; private final Function> delegateFactory; + private final Set dependencies; - public AffixSetting(AffixKey key, Setting delegate, Function> delegateFactory) { + public AffixSetting(AffixKey key, Setting delegate, Function> delegateFactory, AffixSetting... dependencies) { super(key, delegate.defaultValue, delegate.parser, delegate.properties.toArray(new Property[0])); this.key = key; this.delegateFactory = delegateFactory; + this.dependencies = Collections.unmodifiableSet(new HashSet<>(Arrays.asList(dependencies))); } boolean isGroupSetting() { @@ -534,6 +545,15 @@ private Stream matchStream(Settings settings) { return settings.keySet().stream().filter((key) -> match(key)).map(settingKey -> key.getConcreteString(settingKey)); } + public Set getSettingsDependencies(String settingsKey) { + if (dependencies.isEmpty()) { + return Collections.emptySet(); + } else { + String namespace = key.getNamespace(settingsKey); + return dependencies.stream().map(s -> s.key.toConcreteKey(namespace).key).collect(Collectors.toSet()); + } + } + AbstractScopedSettings.SettingUpdater, T>> newAffixUpdater( BiConsumer consumer, Logger logger, BiConsumer validator) { return new AbstractScopedSettings.SettingUpdater, T>>() { @@ -659,6 +679,13 @@ public Stream> getAllConcreteSettings(Settings settings) { return matchStream(settings).distinct().map(this::getConcreteSetting); } + /** + * Returns distinct namespaces for the given settings + */ + public Set getNamespaces(Settings settings) { + return settings.keySet().stream().filter(this::match).map(key::getNamespace).collect(Collectors.toSet()); + } + /** * Returns a map of all namespaces to it's values give the provided settings */ @@ -1184,13 +1211,15 @@ public static AffixSetting prefixKeySetting(String prefix, Function AffixSetting affixKeySetting(String prefix, String suffix, Function> delegateFactory) { - return affixKeySetting(new AffixKey(prefix, suffix), delegateFactory); + public static AffixSetting affixKeySetting(String prefix, String suffix, Function> delegateFactory, + AffixSetting... dependencies) { + return affixKeySetting(new AffixKey(prefix, suffix), delegateFactory, dependencies); } - private static AffixSetting affixKeySetting(AffixKey key, Function> delegateFactory) { + private static AffixSetting affixKeySetting(AffixKey key, Function> delegateFactory, + AffixSetting... dependencies) { Setting delegate = delegateFactory.apply("_na_"); - return new AffixSetting<>(key, delegate, delegateFactory); + return new AffixSetting<>(key, delegate, delegateFactory, dependencies); }; diff --git a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java index 45b511e1cc168..0304b20e992e5 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SettingsModule.java @@ -132,7 +132,7 @@ public SettingsModule(Settings settings, List> additionalSettings, Li } } // by now we are fully configured, lets check node level settings for unregistered index settings - clusterSettings.validate(settings); + clusterSettings.validate(settings, true); this.settingsFilter = new SettingsFilter(settings, settingsFilterPattern); } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java index d0a9a212bdd14..06269706e0d5f 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java @@ -575,7 +575,8 @@ public void handleResponse(UnicastPingResponse response) { @Override public void handleException(TransportException exp) { - if (exp instanceof ConnectTransportException || exp.getCause() instanceof ConnectTransportException) { + if (exp instanceof ConnectTransportException || exp.getCause() instanceof ConnectTransportException || + exp.getCause() instanceof AlreadyClosedException) { // ok, not connected... logger.trace((Supplier) () -> new ParameterizedMessage("failed to connect to {}", node), exp); } else if (closed == false) { @@ -608,6 +609,9 @@ class UnicastPingRequestHandler implements TransportRequestHandler> IFD getForField(MappedFieldType fieldType final String fieldName = fieldType.name(); IndexFieldData.Builder builder = fieldType.fielddataBuilder(fullyQualifiedIndexName); - IndexFieldDataCache cache; - synchronized (this) { - cache = fieldDataCaches.get(fieldName); - if (cache == null) { - String cacheType = indexSettings.getValue(INDEX_FIELDDATA_CACHE_KEY); - if (FIELDDATA_CACHE_VALUE_NODE.equals(cacheType)) { - cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldName); - } else if ("none".equals(cacheType)){ - cache = new IndexFieldDataCache.None(); - } else { - throw new IllegalArgumentException("cache type not supported [" + cacheType + "] for field [" + fieldName + "]"); + IndexFieldDataCache cache = fieldDataCaches.get(fieldName); + if (cache == null) { + //for perf reason, only synchronize when cache is null + synchronized (this) { + cache = fieldDataCaches.get(fieldName); + //double checked locking to make sure it is thread safe + //especially when other threads calling clear() or clearField() + if (cache == null) { + String cacheType = indexSettings.getValue(INDEX_FIELDDATA_CACHE_KEY); + if (FIELDDATA_CACHE_VALUE_NODE.equals(cacheType)) { + cache = indicesFieldDataCache.buildIndexFieldDataCache(listener, index(), fieldName); + } else if ("none".equals(cacheType)){ + cache = new IndexFieldDataCache.None(); + } else { + throw new IllegalArgumentException("cache type not supported [" + cacheType + "] for field [" + fieldName + "]"); + } + fieldDataCaches.put(fieldName, cache); } - fieldDataCaches.put(fieldName, cache); } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java index 4a066328bc2a9..698b289d758be 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java @@ -46,13 +46,11 @@ public abstract class DocValuesIndexFieldData { protected final Index index; protected final String fieldName; - protected final Logger logger; public DocValuesIndexFieldData(Index index, String fieldName) { super(); this.index = index; this.fieldName = fieldName; - this.logger = Loggers.getLogger(getClass()); } public final String getFieldName() { diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java index 0834d2479f072..4b3643dda059a 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java @@ -19,6 +19,7 @@ package org.elasticsearch.index.fielddata.plain; +import org.apache.logging.log4j.Logger; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.OrdinalMap; @@ -28,6 +29,7 @@ import org.apache.lucene.search.SortedSetSortField; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; @@ -48,6 +50,7 @@ public class SortedSetDVOrdinalsIndexFieldData extends DocValuesIndexFieldData i private final IndexFieldDataCache cache; private final CircuitBreakerService breakerService; private final Function> scriptFunction; + private static final Logger logger = Loggers.getLogger(SortedSetDVOrdinalsIndexFieldData.class); public SortedSetDVOrdinalsIndexFieldData(IndexSettings indexSettings, IndexFieldDataCache cache, String fieldName, CircuitBreakerService breakerService, Function> scriptFunction) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index c605b8d093644..495bdf49cb9fc 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -21,7 +21,6 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; -import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.spatial.prefix.PrefixTreeStrategy; @@ -37,6 +36,7 @@ import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder.Orientation; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -54,6 +54,8 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_MALFORMED; + /** * FieldMapper for indexing {@link org.locationtech.spatial4j.shape.Shape}s. *

@@ -96,6 +98,7 @@ public static class Defaults { public static final Orientation ORIENTATION = Orientation.RIGHT; public static final double LEGACY_DISTANCE_ERROR_PCT = 0.025d; public static final Explicit COERCE = new Explicit<>(false, false); + public static final Explicit IGNORE_MALFORMED = new Explicit<>(false, false); public static final MappedFieldType FIELD_TYPE = new GeoShapeFieldType(); @@ -115,6 +118,7 @@ public static class Defaults { public static class Builder extends FieldMapper.Builder { private Boolean coerce; + private Boolean ignoreMalformed; public Builder(String name) { super(name, Defaults.FIELD_TYPE, Defaults.FIELD_TYPE); @@ -145,6 +149,21 @@ protected Explicit coerce(BuilderContext context) { return Defaults.COERCE; } + public Builder ignoreMalformed(boolean ignoreMalformed) { + this.ignoreMalformed = ignoreMalformed; + return builder; + } + + protected Explicit ignoreMalformed(BuilderContext context) { + if (ignoreMalformed != null) { + return new Explicit<>(ignoreMalformed, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false); + } + return Defaults.IGNORE_MALFORMED; + } + @Override public GeoShapeFieldMapper build(BuilderContext context) { GeoShapeFieldType geoShapeFieldType = (GeoShapeFieldType)fieldType; @@ -154,8 +173,8 @@ public GeoShapeFieldMapper build(BuilderContext context) { } setupFieldType(context); - return new GeoShapeFieldMapper(name, fieldType, coerce(context), context.indexSettings(), multiFieldsBuilder.build(this, - context), copyTo); + return new GeoShapeFieldMapper(name, fieldType, ignoreMalformed(context), coerce(context), context.indexSettings(), + multiFieldsBuilder.build(this, context), copyTo); } } @@ -186,6 +205,9 @@ public Mapper.Builder parse(String name, Map node, ParserContext } else if (Names.STRATEGY.equals(fieldName)) { builder.fieldType().setStrategyName(fieldNode.toString()); iterator.remove(); + } else if (IGNORE_MALFORMED.equals(fieldName)) { + builder.ignoreMalformed(TypeParsers.nodeBooleanValue(fieldName, "ignore_malformed", fieldNode, parserContext)); + iterator.remove(); } else if (Names.COERCE.equals(fieldName)) { builder.coerce(TypeParsers.nodeBooleanValue(fieldName, Names.COERCE, fieldNode, parserContext)); iterator.remove(); @@ -428,11 +450,13 @@ public Query termQuery(Object value, QueryShardContext context) { } protected Explicit coerce; + protected Explicit ignoreMalformed; - public GeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, Explicit coerce, Settings indexSettings, - MultiFields multiFields, CopyTo copyTo) { + public GeoShapeFieldMapper(String simpleName, MappedFieldType fieldType, Explicit ignoreMalformed, + Explicit coerce, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) { super(simpleName, fieldType, Defaults.FIELD_TYPE, indexSettings, multiFields, copyTo); this.coerce = coerce; + this.ignoreMalformed = ignoreMalformed; } @Override @@ -445,7 +469,7 @@ public Mapper parse(ParseContext context) throws IOException { try { Shape shape = context.parseExternalValue(Shape.class); if (shape == null) { - ShapeBuilder shapeBuilder = ShapeBuilder.parse(context.parser(), this); + ShapeBuilder shapeBuilder = ShapeParser.parse(context.parser(), this); if (shapeBuilder == null) { return null; } @@ -453,7 +477,7 @@ public Mapper parse(ParseContext context) throws IOException { } if (fieldType().pointsOnly() && !(shape instanceof Point)) { throw new MapperParsingException("[{" + fieldType().name() + "}] is configured for points only but a " + - ((shape instanceof JtsGeometry) ? ((JtsGeometry)shape).getGeom().getGeometryType() : shape.getClass()) + " was found"); + ((shape instanceof JtsGeometry) ? ((JtsGeometry) shape).getGeom().getGeometryType() : shape.getClass()) + " was found"); } List fields = new ArrayList<>(Arrays.asList(fieldType().defaultStrategy().createIndexableFields(shape))); createFieldNamesField(context, fields); @@ -461,7 +485,9 @@ public Mapper parse(ParseContext context) throws IOException { context.doc().add(field); } } catch (Exception e) { - throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e); + if (ignoreMalformed.value() == false) { + throw new MapperParsingException("failed to parse [" + fieldType().name() + "]", e); + } } return null; } @@ -478,6 +504,9 @@ protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { if (gsfm.coerce.explicit()) { this.coerce = gsfm.coerce; } + if (gsfm.ignoreMalformed.explicit()) { + this.ignoreMalformed = gsfm.ignoreMalformed; + } } @Override @@ -506,7 +535,10 @@ protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, builder.field(Names.STRATEGY_POINTS_ONLY, fieldType().pointsOnly()); } if (includeDefaults || coerce.explicit()) { - builder.field("coerce", coerce.value()); + builder.field(Names.COERCE, coerce.value()); + } + if (includeDefaults || ignoreMalformed.explicit()) { + builder.field(IGNORE_MALFORMED, ignoreMalformed.value()); } } @@ -514,6 +546,10 @@ public Explicit coerce() { return coerce; } + public Explicit ignoreMalformed() { + return ignoreMalformed; + } + @Override protected String contentType() { return CONTENT_TYPE; diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java index 0aef7a3528ea1..0424cf6f14bb8 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryBuilder.java @@ -39,6 +39,7 @@ import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -410,7 +411,7 @@ public void onResponse(GetResponse response) { if (pathElements[currentPathSlot].equals(parser.currentName())) { parser.nextToken(); if (++currentPathSlot == pathElements.length) { - listener.onResponse(ShapeBuilder.parse(parser)); + listener.onResponse(ShapeParser.parse(parser)); } } else { parser.nextToken(); @@ -517,7 +518,7 @@ public static GeoShapeQueryBuilder fromXContent(XContentParser parser) throws IO currentFieldName = parser.currentName(); token = parser.nextToken(); if (SHAPE_FIELD.match(currentFieldName)) { - shape = ShapeBuilder.parse(parser); + shape = ShapeParser.parse(parser); } else if (STRATEGY_FIELD.match(currentFieldName)) { String strategyName = parser.text(); strategy = SpatialStrategy.fromString(strategyName); diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index bfe69097b7bac..0caf9ba35089f 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -1111,6 +1111,12 @@ public void close() { * Can the shard request be cached at all? */ public boolean canCache(ShardSearchRequest request, SearchContext context) { + // Queries that create a scroll context cannot use the cache. + // They modify the search context during their execution so using the cache + // may invalidate the scroll for the next query. + if (request.scroll() != null) { + return false; + } // We cannot cache with DFS because results depend not only on the content of the index but also // on the overridden statistics. So if you ran two queries on the same index with different stats @@ -1119,6 +1125,7 @@ public boolean canCache(ShardSearchRequest request, SearchContext context) { if (SearchType.QUERY_THEN_FETCH != context.searchType()) { return false; } + IndexSettings settings = context.indexShard().indexSettings(); // if not explicitly set in the request, use the index setting, if not, use the request if (request.requestCache() == null) { diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 6a56624730cf7..dd7cf962d6cd7 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -21,8 +21,8 @@ import org.apache.lucene.search.BooleanQuery; import org.elasticsearch.common.NamedRegistry; +import org.elasticsearch.common.geo.GeoShapeType; import org.elasticsearch.common.geo.ShapesAvailability; -import org.elasticsearch.common.geo.builders.ShapeBuilders; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; import org.elasticsearch.common.io.stream.Writeable; @@ -250,6 +250,7 @@ import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import java.util.ArrayList; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.Consumer; @@ -531,7 +532,7 @@ private void registerPipelineAggregation(PipelineAggregationSpec spec) { private void registerShapes() { if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) { - ShapeBuilders.register(namedWriteables); + namedWriteables.addAll(GeoShapeType.getShapeWriteables()); } } diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 7268e0f72380b..603e95c3102d0 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -24,14 +24,14 @@ //// SecurityManager impl: //// Must have all permissions to properly perform access checks -grant codeBase "${codebase.securesm-1.1.jar}" { +grant codeBase "${codebase.securesm}" { permission java.security.AllPermission; }; //// Very special jar permissions: //// These are dangerous permissions that we don't want to grant to everything. -grant codeBase "${codebase.lucene-core-7.1.0.jar}" { +grant codeBase "${codebase.lucene-core}" { // needed to allow MMapDirectory's "unmap hack" (die unmap hack, die) // java 8 package permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; @@ -42,7 +42,7 @@ grant codeBase "${codebase.lucene-core-7.1.0.jar}" { permission java.lang.RuntimePermission "accessDeclaredMembers"; }; -grant codeBase "${codebase.lucene-misc-7.1.0.jar}" { +grant codeBase "${codebase.lucene-misc}" { // needed to allow shard shrinking to use hard-links if possible via lucenes HardlinkCopyDirectoryWrapper permission java.nio.file.LinkPermission "hard"; }; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index 453621b138e0a..539587c409d42 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -21,7 +21,7 @@ //// These are mock objects and test management that we allow test framework libs //// to provide on our behalf. But tests themselves cannot do this stuff! -grant codeBase "${codebase.securemock-1.2.jar}" { +grant codeBase "${codebase.securemock}" { // needed to access ReflectionFactory (see below) permission java.lang.RuntimePermission "accessClassInPackage.sun.reflect"; // needed for reflection in ibm jdk @@ -33,7 +33,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" { permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; -grant codeBase "${codebase.lucene-test-framework-7.1.0.jar}" { +grant codeBase "${codebase.lucene-test-framework}" { // needed by RamUsageTester permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; // needed for testing hardlinks in StoreRecoveryTests since we install MockFS @@ -42,7 +42,7 @@ grant codeBase "${codebase.lucene-test-framework-7.1.0.jar}" { permission java.lang.RuntimePermission "accessDeclaredMembers"; }; -grant codeBase "${codebase.randomizedtesting-runner-2.5.2.jar}" { +grant codeBase "${codebase.randomizedtesting-runner}" { // optionally needed for access to private test methods (e.g. beforeClass) permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; // needed to fail tests on uncaught exceptions from other threads @@ -53,12 +53,12 @@ grant codeBase "${codebase.randomizedtesting-runner-2.5.2.jar}" { permission java.lang.RuntimePermission "accessDeclaredMembers"; }; -grant codeBase "${codebase.junit-4.12.jar}" { +grant codeBase "${codebase.junit}" { // needed for TestClass creation permission java.lang.RuntimePermission "accessDeclaredMembers"; }; -grant codeBase "${codebase.mocksocket-1.2.jar}" { +grant codeBase "${codebase.mocksocket}" { // mocksocket makes and accepts socket connections permission java.net.SocketPermission "*", "accept,connect"; }; @@ -70,12 +70,12 @@ grant codeBase "${codebase.elasticsearch-rest-client}" { permission java.net.NetPermission "getProxySelector"; }; -grant codeBase "${codebase.httpcore-nio-4.4.5.jar}" { +grant codeBase "${codebase.httpcore-nio}" { // httpcore makes socket connections for rest tests permission java.net.SocketPermission "*", "connect"; }; -grant codeBase "${codebase.httpasyncclient-4.1.2.jar}" { +grant codeBase "${codebase.httpasyncclient}" { // httpasyncclient makes socket connections for rest tests permission java.net.SocketPermission "*", "connect"; // rest client uses system properties which gets the default proxy diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java index 9178d112acade..ef801dad28eb4 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.cluster.metadata; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.settings.Settings; @@ -36,6 +37,7 @@ import java.util.List; import java.util.stream.Collectors; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; @@ -66,6 +68,11 @@ public void testXContent() throws IOException { builder.startObject(); graveyard.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); + if (graveyard.getTombstones().size() > 0) { + // check that date properly printed + assertThat(Strings.toString(graveyard, false, true), + containsString(XContentBuilder.DEFAULT_DATE_PRINTER.print(graveyard.getTombstones().get(0).getDeleteDateInMillis()))); + } XContentParser parser = createParser(JsonXContent.jsonXContent, builder.bytes()); parser.nextToken(); // the beginning of the parser assertThat(IndexGraveyard.fromXContent(parser), equalTo(graveyard)); diff --git a/core/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java new file mode 100644 index 0000000000000..4c2f58cf6c336 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java @@ -0,0 +1,40 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.geo; + +import com.vividsolutions.jts.geom.GeometryFactory; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static org.elasticsearch.common.geo.builders.ShapeBuilder.SPATIAL_CONTEXT; + +/** + * Created by nknize on 9/22/17. + */ +abstract class BaseGeoParsingTestCase extends ESTestCase { + protected static final GeometryFactory GEOMETRY_FACTORY = SPATIAL_CONTEXT.getGeometryFactory(); + + public abstract void testParsePoint() throws IOException; + public abstract void testParseMultiPoint() throws IOException; + public abstract void testParseLineString() throws IOException; + public abstract void testParseMultiLineString() throws IOException; + public abstract void testParsePolygon() throws IOException; + public abstract void testParseMultiPolygon() throws IOException; +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java similarity index 97% rename from core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java rename to core/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java index 5ac50b91bd3f5..32f384d96b118 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/GeoJSONShapeParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java @@ -21,7 +21,6 @@ import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; -import com.vividsolutions.jts.geom.GeometryFactory; import com.vividsolutions.jts.geom.LineString; import com.vividsolutions.jts.geom.LinearRing; import com.vividsolutions.jts.geom.MultiLineString; @@ -29,12 +28,11 @@ import com.vividsolutions.jts.geom.Polygon; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions; import org.locationtech.spatial4j.exception.InvalidShapeException; import org.locationtech.spatial4j.shape.Circle; @@ -55,11 +53,10 @@ /** * Tests for {@code GeoJSONShapeParser} */ -public class GeoJSONShapeParserTests extends ESTestCase { +public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase { - private static final GeometryFactory GEOMETRY_FACTORY = SPATIAL_CONTEXT.getGeometryFactory(); - - public void testParseSimplePoint() throws IOException { + @Override + public void testParsePoint() throws IOException { XContentBuilder pointGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Point") @@ -70,6 +67,7 @@ public void testParseSimplePoint() throws IOException { assertGeometryEquals(new JtsPoint(expected, SPATIAL_CONTEXT), pointGeoJson); } + @Override public void testParseLineString() throws IOException { XContentBuilder lineGeoJson = XContentFactory.jsonBuilder() .startObject() @@ -89,6 +87,7 @@ public void testParseLineString() throws IOException { assertGeometryEquals(jtsGeom(expected), lineGeoJson); } + @Override public void testParseMultiLineString() throws IOException { XContentBuilder multilinesGeoJson = XContentFactory.jsonBuilder() .startObject() @@ -205,7 +204,8 @@ public void testParseEnvelope() throws IOException { ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); } - public void testParsePolygonNoHoles() throws IOException { + @Override + public void testParsePolygon() throws IOException { XContentBuilder polygonGeoJson = XContentFactory.jsonBuilder() .startObject() .field("type", "Polygon") @@ -344,7 +344,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); - Shape shape = ShapeBuilder.parse(parser).build(); + Shape shape = ShapeParser.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); @@ -364,7 +364,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); - shape = ShapeBuilder.parse(parser).build(); + shape = ShapeParser.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); @@ -384,7 +384,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); - shape = ShapeBuilder.parse(parser).build(); + shape = ShapeParser.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); @@ -404,7 +404,7 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); - shape = ShapeBuilder.parse(parser).build(); + shape = ShapeParser.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); } @@ -432,7 +432,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); - Shape shape = ShapeBuilder.parse(parser).build(); + Shape shape = ShapeParser.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); @@ -458,7 +458,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); - shape = ShapeBuilder.parse(parser).build(); + shape = ShapeParser.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); @@ -484,7 +484,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); - shape = ShapeBuilder.parse(parser).build(); + shape = ShapeParser.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); @@ -510,7 +510,7 @@ public void testParseOGCPolygonWithHoles() throws IOException { parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); parser.nextToken(); - shape = ShapeBuilder.parse(parser).build(); + shape = ShapeParser.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); } @@ -671,6 +671,7 @@ public void testParseSelfCrossingPolygon() throws IOException { ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); } + @Override public void testParseMultiPoint() throws IOException { XContentBuilder multiPointGeoJson = XContentFactory.jsonBuilder() .startObject() @@ -687,6 +688,7 @@ public void testParseMultiPoint() throws IOException { assertGeometryEquals(expected, multiPointGeoJson); } + @Override public void testParseMultiPolygon() throws IOException { // test #1: two polygons; one without hole, one with hole XContentBuilder multiPolygonGeoJson = XContentFactory.jsonBuilder() @@ -882,7 +884,7 @@ public void testParseOrientationOption() throws IOException { XContentParser parser = createParser(polygonGeoJson); parser.nextToken(); - Shape shape = ShapeBuilder.parse(parser).build(); + Shape shape = ShapeParser.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); @@ -911,7 +913,7 @@ public void testParseOrientationOption() throws IOException { parser = createParser(polygonGeoJson); parser.nextToken(); - shape = ShapeBuilder.parse(parser).build(); + shape = ShapeParser.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); @@ -940,7 +942,7 @@ public void testParseOrientationOption() throws IOException { parser = createParser(polygonGeoJson); parser.nextToken(); - shape = ShapeBuilder.parse(parser).build(); + shape = ShapeParser.parse(parser).build(); ElasticsearchGeoAssertions.assertPolygon(shape); @@ -969,7 +971,7 @@ public void testParseOrientationOption() throws IOException { parser = createParser(polygonGeoJson); parser.nextToken(); - shape = ShapeBuilder.parse(parser).build(); + shape = ShapeParser.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); @@ -998,7 +1000,7 @@ public void testParseOrientationOption() throws IOException { parser = createParser(polygonGeoJson); parser.nextToken(); - shape = ShapeBuilder.parse(parser).build(); + shape = ShapeParser.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); @@ -1027,7 +1029,7 @@ public void testParseOrientationOption() throws IOException { parser = createParser(polygonGeoJson); parser.nextToken(); - shape = ShapeBuilder.parse(parser).build(); + shape = ShapeParser.parse(parser).build(); ElasticsearchGeoAssertions.assertMultiPolygon(shape); } @@ -1035,7 +1037,7 @@ public void testParseOrientationOption() throws IOException { private void assertGeometryEquals(Shape expected, XContentBuilder geoJson) throws IOException { XContentParser parser = createParser(geoJson); parser.nextToken(); - ElasticsearchGeoAssertions.assertEquals(expected, ShapeBuilder.parse(parser).build()); + ElasticsearchGeoAssertions.assertEquals(expected, ShapeParser.parse(parser).build()); } private ShapeCollection shapeCollection(Shape... shapes) { diff --git a/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java index dc01a39cb8145..d1f7d5601a6cc 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/ShapeBuilderTests.java @@ -24,10 +24,13 @@ import com.vividsolutions.jts.geom.Polygon; import org.elasticsearch.common.geo.builders.CoordinatesBuilder; +import org.elasticsearch.common.geo.builders.CircleBuilder; +import org.elasticsearch.common.geo.builders.EnvelopeBuilder; import org.elasticsearch.common.geo.builders.LineStringBuilder; +import org.elasticsearch.common.geo.builders.MultiLineStringBuilder; +import org.elasticsearch.common.geo.builders.PointBuilder; import org.elasticsearch.common.geo.builders.PolygonBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.common.geo.builders.ShapeBuilders; import org.elasticsearch.test.ESTestCase; import org.locationtech.spatial4j.exception.InvalidShapeException; import org.locationtech.spatial4j.shape.Circle; @@ -46,13 +49,13 @@ public class ShapeBuilderTests extends ESTestCase { public void testNewPoint() { - Point point = ShapeBuilders.newPoint(-100, 45).build(); + Point point = new PointBuilder().coordinate(-100, 45).build(); assertEquals(-100D, point.getX(), 0.0d); assertEquals(45D, point.getY(), 0.0d); } public void testNewRectangle() { - Rectangle rectangle = ShapeBuilders.newEnvelope(new Coordinate(-45, 30), new Coordinate(45, -30)).build(); + Rectangle rectangle = new EnvelopeBuilder(new Coordinate(-45, 30), new Coordinate(45, -30)).build(); assertEquals(-45D, rectangle.getMinX(), 0.0d); assertEquals(-30D, rectangle.getMinY(), 0.0d); assertEquals(45D, rectangle.getMaxX(), 0.0d); @@ -60,7 +63,7 @@ public void testNewRectangle() { } public void testNewPolygon() { - Polygon polygon = ShapeBuilders.newPolygon(new CoordinatesBuilder() + Polygon polygon = new PolygonBuilder(new CoordinatesBuilder() .coordinate(-45, 30) .coordinate(45, 30) .coordinate(45, -30) @@ -75,7 +78,7 @@ public void testNewPolygon() { } public void testNewPolygon_coordinate() { - Polygon polygon = ShapeBuilders.newPolygon(new CoordinatesBuilder() + Polygon polygon = new PolygonBuilder(new CoordinatesBuilder() .coordinate(new Coordinate(-45, 30)) .coordinate(new Coordinate(45, 30)) .coordinate(new Coordinate(45, -30)) @@ -90,7 +93,7 @@ public void testNewPolygon_coordinate() { } public void testNewPolygon_coordinates() { - Polygon polygon = ShapeBuilders.newPolygon(new CoordinatesBuilder() + Polygon polygon = new PolygonBuilder(new CoordinatesBuilder() .coordinates(new Coordinate(-45, 30), new Coordinate(45, 30), new Coordinate(45, -30), new Coordinate(-45, -30), new Coordinate(-45, 30)) ).toPolygon(); @@ -103,7 +106,7 @@ public void testNewPolygon_coordinates() { public void testLineStringBuilder() { // Building a simple LineString - ShapeBuilders.newLineString(new CoordinatesBuilder() + new LineStringBuilder(new CoordinatesBuilder() .coordinate(-130.0, 55.0) .coordinate(-130.0, -40.0) .coordinate(-15.0, -40.0) @@ -114,7 +117,7 @@ public void testLineStringBuilder() { .coordinate(-110.0, 55.0)).build(); // Building a linestring that needs to be wrapped - ShapeBuilders.newLineString(new CoordinatesBuilder() + new LineStringBuilder(new CoordinatesBuilder() .coordinate(100.0, 50.0) .coordinate(110.0, -40.0) .coordinate(240.0, -40.0) @@ -127,7 +130,7 @@ public void testLineStringBuilder() { .build(); // Building a lineString on the dateline - ShapeBuilders.newLineString(new CoordinatesBuilder() + new LineStringBuilder(new CoordinatesBuilder() .coordinate(-180.0, 80.0) .coordinate(-180.0, 40.0) .coordinate(-180.0, -40.0) @@ -136,7 +139,7 @@ public void testLineStringBuilder() { .build(); // Building a lineString on the dateline - ShapeBuilders.newLineString(new CoordinatesBuilder() + new LineStringBuilder(new CoordinatesBuilder() .coordinate(180.0, 80.0) .coordinate(180.0, 40.0) .coordinate(180.0, -40.0) @@ -146,7 +149,7 @@ public void testLineStringBuilder() { } public void testMultiLineString() { - ShapeBuilders.newMultiLinestring() + new MultiLineStringBuilder() .linestring(new LineStringBuilder(new CoordinatesBuilder() .coordinate(-100.0, 50.0) .coordinate(50.0, 50.0) @@ -164,7 +167,7 @@ public void testMultiLineString() { .build(); // LineString that needs to be wrapped - ShapeBuilders.newMultiLinestring() + new MultiLineStringBuilder() .linestring(new LineStringBuilder(new CoordinatesBuilder() .coordinate(150.0, 60.0) .coordinate(200.0, 60.0) @@ -183,7 +186,7 @@ public void testMultiLineString() { } public void testPolygonSelfIntersection() { - PolygonBuilder newPolygon = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder newPolygon = new PolygonBuilder(new CoordinatesBuilder() .coordinate(-40.0, 50.0) .coordinate(40.0, 50.0) .coordinate(-40.0, -50.0) @@ -194,31 +197,31 @@ public void testPolygonSelfIntersection() { public void testGeoCircle() { double earthCircumference = 40075016.69; - Circle circle = ShapeBuilders.newCircleBuilder().center(0, 0).radius("100m").build(); + Circle circle = new CircleBuilder().center(0, 0).radius("100m").build(); assertEquals((360 * 100) / earthCircumference, circle.getRadius(), 0.00000001); assertEquals(new PointImpl(0, 0, ShapeBuilder.SPATIAL_CONTEXT), circle.getCenter()); - circle = ShapeBuilders.newCircleBuilder().center(+180, 0).radius("100m").build(); + circle = new CircleBuilder().center(+180, 0).radius("100m").build(); assertEquals((360 * 100) / earthCircumference, circle.getRadius(), 0.00000001); assertEquals(new PointImpl(180, 0, ShapeBuilder.SPATIAL_CONTEXT), circle.getCenter()); - circle = ShapeBuilders.newCircleBuilder().center(-180, 0).radius("100m").build(); + circle = new CircleBuilder().center(-180, 0).radius("100m").build(); assertEquals((360 * 100) / earthCircumference, circle.getRadius(), 0.00000001); assertEquals(new PointImpl(-180, 0, ShapeBuilder.SPATIAL_CONTEXT), circle.getCenter()); - circle = ShapeBuilders.newCircleBuilder().center(0, 90).radius("100m").build(); + circle = new CircleBuilder().center(0, 90).radius("100m").build(); assertEquals((360 * 100) / earthCircumference, circle.getRadius(), 0.00000001); assertEquals(new PointImpl(0, 90, ShapeBuilder.SPATIAL_CONTEXT), circle.getCenter()); - circle = ShapeBuilders.newCircleBuilder().center(0, -90).radius("100m").build(); + circle = new CircleBuilder().center(0, -90).radius("100m").build(); assertEquals((360 * 100) / earthCircumference, circle.getRadius(), 0.00000001); assertEquals(new PointImpl(0, -90, ShapeBuilder.SPATIAL_CONTEXT), circle.getCenter()); double randomLat = (randomDouble() * 180) - 90; double randomLon = (randomDouble() * 360) - 180; double randomRadius = randomIntBetween(1, (int) earthCircumference / 4); - circle = ShapeBuilders.newCircleBuilder().center(randomLon, randomLat).radius(randomRadius + "m").build(); + circle = new CircleBuilder().center(randomLon, randomLat).radius(randomRadius + "m").build(); assertEquals((360 * randomRadius) / earthCircumference, circle.getRadius(), 0.00000001); assertEquals(new PointImpl(randomLon, randomLat, ShapeBuilder.SPATIAL_CONTEXT), circle.getCenter()); } public void testPolygonWrapping() { - Shape shape = ShapeBuilders.newPolygon(new CoordinatesBuilder() + Shape shape = new PolygonBuilder(new CoordinatesBuilder() .coordinate(-150.0, 65.0) .coordinate(-250.0, 65.0) .coordinate(-250.0, -65.0) @@ -231,7 +234,7 @@ public void testPolygonWrapping() { } public void testLineStringWrapping() { - Shape shape = ShapeBuilders.newLineString(new CoordinatesBuilder() + Shape shape = new LineStringBuilder(new CoordinatesBuilder() .coordinate(-150.0, 65.0) .coordinate(-250.0, 65.0) .coordinate(-250.0, -65.0) @@ -248,7 +251,7 @@ public void testDatelineOGC() { // expected results: 3 polygons, 1 with a hole // a giant c shape - PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(174,0) .coordinate(-176,0) .coordinate(-176,3) @@ -292,7 +295,7 @@ public void testDateline() { // expected results: 3 polygons, 1 with a hole // a giant c shape - PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(-186,0) .coordinate(-176,0) .coordinate(-176,3) @@ -331,7 +334,7 @@ public void testDateline() { } public void testComplexShapeWithHole() { - PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(-85.0018514,37.1311314) .coordinate(-85.0016645,37.1315293) .coordinate(-85.0016246,37.1317069) @@ -407,7 +410,7 @@ public void testComplexShapeWithHole() { } public void testShapeWithHoleAtEdgeEndPoints() { - PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(-4, 2) .coordinate(4, 2) .coordinate(6, 0) @@ -430,7 +433,7 @@ public void testShapeWithHoleAtEdgeEndPoints() { } public void testShapeWithPointOnDateline() { - PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(180, 0) .coordinate(176, 4) .coordinate(176, -4) @@ -443,7 +446,7 @@ public void testShapeWithPointOnDateline() { public void testShapeWithEdgeAlongDateline() { // test case 1: test the positive side of the dateline - PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(180, 0) .coordinate(176, 4) .coordinate(180, -4) @@ -454,7 +457,7 @@ public void testShapeWithEdgeAlongDateline() { assertPolygon(shape); // test case 2: test the negative side of the dateline - builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(-176, 4) .coordinate(-180, 0) .coordinate(-180, -4) @@ -467,7 +470,7 @@ public void testShapeWithEdgeAlongDateline() { public void testShapeWithBoundaryHoles() { // test case 1: test the positive side of the dateline - PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(-177, 10) .coordinate(176, 15) .coordinate(172, 0) @@ -486,7 +489,7 @@ public void testShapeWithBoundaryHoles() { assertMultiPolygon(shape); // test case 2: test the negative side of the dateline - builder = ShapeBuilders.newPolygon( + builder = new PolygonBuilder( new CoordinatesBuilder() .coordinate(-176, 15) .coordinate(179, 10) @@ -510,7 +513,7 @@ public void testShapeWithBoundaryHoles() { public void testShapeWithTangentialHole() { // test a shape with one tangential (shared) vertex (should pass) - PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(179, 10) .coordinate(168, 15) .coordinate(164, 0) @@ -531,7 +534,7 @@ public void testShapeWithTangentialHole() { public void testShapeWithInvalidTangentialHole() { // test a shape with one invalid tangential (shared) vertex (should throw exception) - PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(179, 10) .coordinate(168, 15) .coordinate(164, 0) @@ -552,7 +555,7 @@ public void testShapeWithInvalidTangentialHole() { public void testBoundaryShapeWithTangentialHole() { // test a shape with one tangential (shared) vertex for each hole (should pass) - PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(-177, 10) .coordinate(176, 15) .coordinate(172, 0) @@ -579,7 +582,7 @@ public void testBoundaryShapeWithTangentialHole() { public void testBoundaryShapeWithInvalidTangentialHole() { // test shape with two tangential (shared) vertices (should throw exception) - PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(-177, 10) .coordinate(176, 15) .coordinate(172, 0) @@ -602,7 +605,7 @@ public void testBoundaryShapeWithInvalidTangentialHole() { * Test an enveloping polygon around the max mercator bounds */ public void testBoundaryShape() { - PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(-180, 90) .coordinate(180, 90) .coordinate(180, -90) @@ -616,7 +619,7 @@ public void testBoundaryShape() { public void testShapeWithAlternateOrientation() { // cw: should produce a multi polygon spanning hemispheres - PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(180, 0) .coordinate(176, 4) .coordinate(-176, 4) @@ -627,7 +630,7 @@ public void testShapeWithAlternateOrientation() { assertPolygon(shape); // cw: geo core will convert to ccw across the dateline - builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(180, 0) .coordinate(-176, 4) .coordinate(176, 4) @@ -640,7 +643,7 @@ public void testShapeWithAlternateOrientation() { } public void testInvalidShapeWithConsecutiveDuplicatePoints() { - PolygonBuilder builder = ShapeBuilders.newPolygon(new CoordinatesBuilder() + PolygonBuilder builder = new PolygonBuilder(new CoordinatesBuilder() .coordinate(180, 0) .coordinate(176, 4) .coordinate(176, 4) diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index c3e4781b3d27c..5ac55832959d7 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -19,13 +19,14 @@ package org.elasticsearch.common.geo.builders; +import org.elasticsearch.common.geo.GeoShapeType; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; @@ -33,8 +34,6 @@ import org.junit.BeforeClass; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; @@ -49,9 +48,7 @@ public abstract class AbstractShapeBuilderTestCase exte @BeforeClass public static void init() { if (namedWriteableRegistry == null) { - List shapes = new ArrayList<>(); - ShapeBuilders.register(shapes); - namedWriteableRegistry = new NamedWriteableRegistry(shapes); + namedWriteableRegistry = new NamedWriteableRegistry(GeoShapeType.getShapeWriteables()); } } @@ -82,9 +79,9 @@ public void testFromXContent() throws IOException { } XContentBuilder builder = testShape.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser shapeParser = createParser(shuffled); - shapeParser.nextToken(); - ShapeBuilder parsedShape = ShapeBuilder.parse(shapeParser); + XContentParser shapeContentParser = createParser(shuffled); + shapeContentParser.nextToken(); + ShapeBuilder parsedShape = ShapeParser.parse(shapeContentParser); assertNotSame(testShape, parsedShape); assertEquals(testShape, parsedShape); assertEquals(testShape.hashCode(), parsedShape.hashCode()); diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java index be7772b0da60a..3b5f2662316ca 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/LineStringBuilderTests.java @@ -49,7 +49,7 @@ protected LineStringBuilder createMutation(LineStringBuilder original) throws IO } static LineStringBuilder mutate(LineStringBuilder original) throws IOException { - LineStringBuilder mutation = (LineStringBuilder) copyShape(original); + LineStringBuilder mutation = copyShape(original); Coordinate[] coordinates = original.coordinates(false); Coordinate coordinate = randomFrom(coordinates); if (randomBoolean()) { @@ -65,7 +65,7 @@ static LineStringBuilder mutate(LineStringBuilder original) throws IOException { coordinate.y = randomDoubleBetween(-90.0, 90.0, true); } } - return mutation.coordinates(coordinates); + return LineStringBuilder.class.cast(mutation.coordinates(coordinates)); } static LineStringBuilder createRandomShape() { diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java index 9fbaed815be1d..b650939594077 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilderTests.java @@ -68,6 +68,6 @@ static MultiLineStringBuilder mutate(MultiLineStringBuilder original) throws IOE } static MultiLineStringBuilder createRandomShape() { - return new MultiLineStringBuilder(); + return MultiLineStringBuilder.class.cast(RandomShapeGenerator.createShape(random(), ShapeType.MULTILINESTRING)); } } diff --git a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java index 14c1acc8c60ee..c0a799e1c306e 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/builders/MultiPointBuilderTests.java @@ -70,7 +70,7 @@ static MultiPointBuilder mutate(MultiPointBuilder original) throws IOException { } else { coordinates = new Coordinate[]{new Coordinate(1.0, 1.0)}; } - return mutation.coordinates(coordinates); + return MultiPointBuilder.class.cast(mutation.coordinates(coordinates)); } static MultiPointBuilder createRandomShape() { diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java index bd4ac25a8747b..2015a6b42d16f 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java @@ -122,7 +122,7 @@ public void testResetSettingWithIPValidator() { Settings.Builder builder = Settings.builder(); Settings updates = Settings.builder().putNull("index.routing.allocation.require._ip") .put("index.some.dyn.setting", 1).build(); - settings.validate(updates); + settings.validate(updates, false); settings.updateDynamicSettings(updates, Settings.builder().put(currentSettings), builder, "node"); currentSettings = builder.build(); @@ -160,6 +160,26 @@ public void testAddConsumer() { assertEquals(0, consumer2.get()); } + public void testDependentSettings() { + Setting.AffixSetting stringSetting = Setting.affixKeySetting("foo.", "name", + (k) -> Setting.simpleString(k, Property.Dynamic, Property.NodeScope)); + Setting.AffixSetting intSetting = Setting.affixKeySetting("foo.", "bar", + (k) -> Setting.intSetting(k, 1, Property.Dynamic, Property.NodeScope), stringSetting); + + AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY,new HashSet<>(Arrays.asList(intSetting, stringSetting))); + + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, + () -> service.validate(Settings.builder().put("foo.test.bar", 7).build(), true)); + assertEquals("Missing required setting [foo.test.name] for setting [foo.test.bar]", iae.getMessage()); + + service.validate(Settings.builder() + .put("foo.test.name", "test") + .put("foo.test.bar", 7) + .build(), true); + + service.validate(Settings.builder().put("foo.test.bar", 7).build(), false); + } + public void testAddConsumerAffix() { Setting.AffixSetting intSetting = Setting.affixKeySetting("foo.", "bar", (k) -> Setting.intSetting(k, 1, Property.Dynamic, Property.NodeScope)); @@ -585,7 +605,7 @@ public void testValidateWithSuggestion() { Settings.EMPTY, IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, - () -> settings.validate(Settings.builder().put("index.numbe_of_replica", "1").build())); + () -> settings.validate(Settings.builder().put("index.numbe_of_replica", "1").build(), false)); assertEquals(iae.getMessage(), "unknown setting [index.numbe_of_replica] did you mean [index.number_of_replicas]?"); } @@ -595,26 +615,23 @@ public void testValidate() { IndexScopedSettings.BUILT_IN_INDEX_SETTINGS); String unknownMsgSuffix = " please check that any required plugins are installed, or check the breaking changes documentation for" + " removed settings"; - settings.validate(Settings.builder().put("index.store.type", "boom")); - settings.validate(Settings.builder().put("index.store.type", "boom").build()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> - settings.validate(Settings.builder().put("index.store.type", "boom").put("i.am.not.a.setting", true))); - assertEquals("unknown setting [i.am.not.a.setting]" + unknownMsgSuffix, e.getMessage()); + settings.validate(Settings.builder().put("index.store.type", "boom").build(), false); - e = expectThrows(IllegalArgumentException.class, () -> - settings.validate(Settings.builder().put("index.store.type", "boom").put("i.am.not.a.setting", true).build())); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + settings.validate(Settings.builder().put("index.store.type", "boom").put("i.am.not.a.setting", true).build(), false)); assertEquals("unknown setting [i.am.not.a.setting]" + unknownMsgSuffix, e.getMessage()); e = expectThrows(IllegalArgumentException.class, () -> - settings.validate(Settings.builder().put("index.store.type", "boom").put("index.number_of_replicas", true).build())); + settings.validate(Settings.builder().put("index.store.type", "boom").put("index.number_of_replicas", true).build(), false)); assertEquals("Failed to parse value [true] for setting [index.number_of_replicas]", e.getMessage()); e = expectThrows(IllegalArgumentException.class, () -> - settings.validate("index.number_of_replicas", Settings.builder().put("index.number_of_replicas", "true").build())); + settings.validate("index.number_of_replicas", Settings.builder().put("index.number_of_replicas", "true").build(), false)); assertEquals("Failed to parse value [true] for setting [index.number_of_replicas]", e.getMessage()); e = expectThrows(IllegalArgumentException.class, () -> - settings.validate("index.similarity.classic.type", Settings.builder().put("index.similarity.classic.type", "mine").build())); + settings.validate("index.similarity.classic.type", Settings.builder().put("index.similarity.classic.type", "mine").build(), + false)); assertEquals("illegal value for [index.similarity.classic] cannot redefine built-in similarity", e.getMessage()); } @@ -624,12 +641,12 @@ public void testValidateSecureSettings() { Settings settings = Settings.builder().setSecureSettings(secureSettings).build(); final ClusterSettings clusterSettings = new ClusterSettings(settings, Collections.emptySet()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> clusterSettings.validate(settings)); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> clusterSettings.validate(settings, false)); assertThat(e.getMessage(), startsWith("unknown secure setting [some.secure.setting]")); ClusterSettings clusterSettings2 = new ClusterSettings(settings, Collections.singleton(SecureSetting.secureString("some.secure.setting", null))); - clusterSettings2.validate(settings); + clusterSettings2.validate(settings, false); } public void testDiffSecureSettings() { @@ -722,7 +739,7 @@ public void testLoggingUpdates() { IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, - () -> settings.validate(Settings.builder().put("logger._root", "boom").build())); + () -> settings.validate(Settings.builder().put("logger._root", "boom").build(), false)); assertEquals("Unknown level constant [BOOM].", ex.getMessage()); assertEquals(level, ESLoggerFactory.getRootLogger().getLevel()); settings.applySettings(Settings.builder().put("logger._root", "TRACE").build()); diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java index 65d51e126c9f6..4a4beb2e0e3ef 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java @@ -30,6 +30,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.stream.Collectors; @@ -42,6 +43,7 @@ import static org.hamcrest.Matchers.is; public class SettingTests extends ESTestCase { + public void testGet() { Setting booleanSetting = Setting.boolSetting("foo.bar", false, Property.Dynamic, Property.NodeScope); assertFalse(booleanSetting.get(Settings.EMPTY)); @@ -577,6 +579,22 @@ public void testAffixKeySetting() { assertFalse(listAffixSetting.match("foo")); } + public void testAffixSettingNamespaces() { + Setting.AffixSetting setting = + Setting.affixKeySetting("foo.", "enable", (key) -> Setting.boolSetting(key, false, Property.NodeScope)); + Settings build = Settings.builder() + .put("foo.bar.enable", "true") + .put("foo.baz.enable", "true") + .put("foo.boom.enable", "true") + .put("something.else", "true") + .build(); + Set namespaces = setting.getNamespaces(build); + assertEquals(3, namespaces.size()); + assertTrue(namespaces.contains("bar")); + assertTrue(namespaces.contains("baz")); + assertTrue(namespaces.contains("boom")); + } + public void testAffixAsMap() { Setting.AffixSetting setting = Setting.prefixKeySetting("foo.bar.", key -> Setting.simpleString(key, Property.NodeScope)); diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index 3c7a49a176635..853294de186d1 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -258,6 +258,16 @@ protected Version getVersion() { assertPingCount(handleD, handleA, 0); assertPingCount(handleD, handleB, 0); assertPingCount(handleD, handleC, 3); + + zenPingC.close(); + handleD.counters.clear(); + logger.info("ping from UZP_D after closing UZP_C"); + pingResponses = zenPingD.pingAndWait().toList(); + // check that node does not respond to pings anymore after the ping service has been closed + assertThat(pingResponses.size(), equalTo(0)); + assertPingCount(handleD, handleA, 0); + assertPingCount(handleD, handleB, 0); + assertPingCount(handleD, handleC, 3); } public void testUnknownHostNotCached() throws ExecutionException, InterruptedException { diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index 6be786aff88b5..79c306f43f151 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -498,7 +498,7 @@ public void testSingleTypeSetting() { assertTrue(index.isSingleType()); expectThrows(IllegalArgumentException.class, () -> { index.getScopedSettings() - .validate(Settings.builder().put(IndexSettings.INDEX_MAPPING_SINGLE_TYPE_SETTING_KEY, randomBoolean()).build()); + .validate(Settings.builder().put(IndexSettings.INDEX_MAPPING_SINGLE_TYPE_SETTING_KEY, randomBoolean()).build(), false); }); } { diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java index 33e3bc201835d..67c8435520aef 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalMapper.java @@ -24,13 +24,13 @@ import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; +import org.elasticsearch.common.geo.builders.PointBuilder; +import org.locationtech.spatial4j.shape.Point; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.geo.builders.ShapeBuilders; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.query.QueryShardContext; -import org.locationtech.spatial4j.shape.Point; import java.io.IOException; import java.nio.charset.Charset; @@ -181,7 +181,7 @@ public Mapper parse(ParseContext context) throws IOException { pointMapper.parse(context.createExternalValueContext(point)); // Let's add a Dummy Shape - Point shape = ShapeBuilders.newPoint(-100, 45).build(); + Point shape = new PointBuilder(-100, 45).build(); shapeMapper.parse(context.createExternalValueContext(shape)); context = context.createExternalValueContext(generatedValue); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java index f0109e10abbb4..6dacbc9f64ab9 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalValuesMapperIntegrationIT.java @@ -21,7 +21,7 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.geo.builders.ShapeBuilders; +import org.elasticsearch.common.geo.builders.PointBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.plugins.Plugin; @@ -118,7 +118,7 @@ public void testExternalValues() throws Exception { assertThat(response.getHits().getTotalHits(), equalTo((long) 1)); response = client().prepareSearch("test-idx") - .setPostFilter(QueryBuilders.geoShapeQuery("field.shape", ShapeBuilders.newPoint(-100, 45)).relation(ShapeRelation.WITHIN)) + .setPostFilter(QueryBuilders.geoShapeQuery("field.shape", new PointBuilder(-100, 45)).relation(ShapeRelation.WITHIN)) .execute().actionGet(); assertThat(response.getHits().getTotalHits(), equalTo((long) 1)); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java index 5972a8ecee8c9..e43cfbe1fd1c1 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/GeoShapeFieldMapperTests.java @@ -22,6 +22,7 @@ import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; +import org.elasticsearch.common.Explicit; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.builders.ShapeBuilder; @@ -103,7 +104,7 @@ public void testOrientationParsing() throws IOException { } /** - * Test that orientation parameter correctly parses + * Test that coerce parameter correctly parses */ public void testCoerceParsing() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") @@ -136,6 +137,41 @@ public void testCoerceParsing() throws IOException { assertThat(coerce, equalTo(false)); } + /** + * Test that ignore_malformed parameter correctly parses + */ + public void testIgnoreMalformedParsing() throws IOException { + String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("ignore_malformed", "true") + .endObject().endObject() + .endObject().endObject().string(); + + DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); + FieldMapper fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); + + Explicit ignoreMalformed = ((GeoShapeFieldMapper)fieldMapper).ignoreMalformed(); + assertThat(ignoreMalformed.value(), equalTo(true)); + + // explicit false ignore_malformed test + mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") + .startObject("properties").startObject("location") + .field("type", "geo_shape") + .field("ignore_malformed", "false") + .endObject().endObject() + .endObject().endObject().string(); + + defaultMapper = createIndex("test2").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping)); + fieldMapper = defaultMapper.mappers().getMapper("location"); + assertThat(fieldMapper, instanceOf(GeoShapeFieldMapper.class)); + + ignoreMalformed = ((GeoShapeFieldMapper)fieldMapper).ignoreMalformed(); + assertThat(ignoreMalformed.explicit(), equalTo(true)); + assertThat(ignoreMalformed.value(), equalTo(false)); + } + public void testGeohashConfiguration() throws IOException { String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties").startObject("location") diff --git a/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java index f1df9ff82a1ac..077f0f48d004a 100644 --- a/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/GeoShapeQueryBuilderTests.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.geo.SpatialStrategy; import org.elasticsearch.common.geo.builders.EnvelopeBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder; -import org.elasticsearch.common.geo.builders.ShapeBuilders; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -200,7 +199,7 @@ public void testInvalidRelation() throws IOException { // see #3878 public void testThatXContentSerializationInsideOfArrayWorks() throws Exception { - EnvelopeBuilder envelopeBuilder = ShapeBuilders.newEnvelope(new Coordinate(0, 0), new Coordinate(10, 10)); + EnvelopeBuilder envelopeBuilder = new EnvelopeBuilder(new Coordinate(0, 0), new Coordinate(10, 10)); GeoShapeQueryBuilder geoQuery = QueryBuilders.geoShapeQuery("searchGeometry", envelopeBuilder); JsonXContent.contentBuilder().startArray().value(geoQuery).endArray(); } diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index e8c1604c79915..7ff0725449e8d 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -85,6 +85,17 @@ protected Collection> nodePlugins() { public static class DummySettingPlugin extends Plugin { public static final Setting DUMMY_SETTING = Setting.simpleString("index.dummy", Setting.Property.IndexScope, Setting.Property.Dynamic); + + public static final Setting.AffixSetting DUMMY_ACCOUNT_USER = Setting.affixKeySetting("index.acc.", "user", + k -> Setting.simpleString(k, Setting.Property.IndexScope, Setting.Property.Dynamic)); + public static final Setting DUMMY_ACCOUNT_PW = Setting.affixKeySetting("index.acc.", "pw", + k -> Setting.simpleString(k, Setting.Property.IndexScope, Setting.Property.Dynamic), DUMMY_ACCOUNT_USER); + + public static final Setting.AffixSetting DUMMY_ACCOUNT_USER_CLUSTER = Setting.affixKeySetting("cluster.acc.", "user", + k -> Setting.simpleString(k, Setting.Property.NodeScope, Setting.Property.Dynamic)); + public static final Setting DUMMY_ACCOUNT_PW_CLUSTER = Setting.affixKeySetting("cluster.acc.", "pw", + k -> Setting.simpleString(k, Setting.Property.NodeScope, Setting.Property.Dynamic), DUMMY_ACCOUNT_USER_CLUSTER); + @Override public void onIndexModule(IndexModule indexModule) { indexModule.addSettingsUpdateConsumer(DUMMY_SETTING, (s) -> {}, (s) -> { @@ -95,7 +106,8 @@ public void onIndexModule(IndexModule indexModule) { @Override public List> getSettings() { - return Collections.singletonList(DUMMY_SETTING); + return Arrays.asList(DUMMY_SETTING, DUMMY_ACCOUNT_PW, DUMMY_ACCOUNT_USER, + DUMMY_ACCOUNT_PW_CLUSTER, DUMMY_ACCOUNT_USER_CLUSTER); } } @@ -112,6 +124,124 @@ public List> getSettings() { } } + public void testUpdateDependentClusterSettings() { + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> + client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder() + .put("cluster.acc.test.pw", "asdf")).get()); + assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); + + iae = expectThrows(IllegalArgumentException.class, () -> + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() + .put("cluster.acc.test.pw", "asdf")).get()); + assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); + + iae = expectThrows(IllegalArgumentException.class, () -> + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() + .put("cluster.acc.test.pw", "asdf")).setPersistentSettings(Settings.builder() + .put("cluster.acc.test.user", "asdf")).get()); + assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); + + if (randomBoolean()) { + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() + .put("cluster.acc.test.pw", "asdf") + .put("cluster.acc.test.user", "asdf")).get(); + iae = expectThrows(IllegalArgumentException.class, () -> + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() + .putNull("cluster.acc.test.user")).get()); + assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); + client().admin().cluster().prepareUpdateSettings().setTransientSettings(Settings.builder() + .putNull("cluster.acc.test.pw") + .putNull("cluster.acc.test.user")).get(); + } else { + client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder() + .put("cluster.acc.test.pw", "asdf") + .put("cluster.acc.test.user", "asdf")).get(); + + iae = expectThrows(IllegalArgumentException.class, () -> + client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder() + .putNull("cluster.acc.test.user")).get()); + assertEquals("Missing required setting [cluster.acc.test.user] for setting [cluster.acc.test.pw]", iae.getMessage()); + + client().admin().cluster().prepareUpdateSettings().setPersistentSettings(Settings.builder() + .putNull("cluster.acc.test.pw") + .putNull("cluster.acc.test.user")).get(); + } + + } + + public void testUpdateDependentIndexSettings() { + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> + prepareCreate("test", Settings.builder().put("index.acc.test.pw", "asdf")).get()); + assertEquals("Missing required setting [index.acc.test.user] for setting [index.acc.test.pw]", iae.getMessage()); + + createIndex("test"); + for (int i = 0; i < 2; i++) { + if (i == 1) { + // now do it on a closed index + client().admin().indices().prepareClose("test").get(); + } + + iae = expectThrows(IllegalArgumentException.class, () -> + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings( + Settings.builder() + .put("index.acc.test.pw", "asdf")) + .execute() + .actionGet()); + assertEquals("Missing required setting [index.acc.test.user] for setting [index.acc.test.pw]", iae.getMessage()); + + // user has no dependency + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings( + Settings.builder() + .put("index.acc.test.user", "asdf")) + .execute() + .actionGet(); + + // now we are consistent + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings( + Settings.builder() + .put("index.acc.test.pw", "test")) + .execute() + .actionGet(); + + // now try to remove it and make sure it fails + iae = expectThrows(IllegalArgumentException.class, () -> + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings( + Settings.builder() + .putNull("index.acc.test.user")) + .execute() + .actionGet()); + assertEquals("Missing required setting [index.acc.test.user] for setting [index.acc.test.pw]", iae.getMessage()); + + // now we are consistent + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings( + Settings.builder() + .putNull("index.acc.test.pw") + .putNull("index.acc.test.user")) + .execute() + .actionGet(); + } + } + public void testResetDefault() { createIndex("test"); diff --git a/core/src/test/java/org/elasticsearch/search/SearchRequestTests.java b/core/src/test/java/org/elasticsearch/search/SearchRequestTests.java index d37b8b4b13392..eb643885e830a 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchRequestTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchRequestTests.java @@ -91,8 +91,10 @@ public void testValidate() throws IOException { assertNull(validationErrors); } { - // disabeling `track_total_hits` isn't valid in scroll context + // disabling `track_total_hits` isn't valid in scroll context SearchRequest searchRequest = createSearchRequest().source(new SearchSourceBuilder()); + // make sure we don't set the request cache for a scroll query + searchRequest.requestCache(false); searchRequest.scroll(new TimeValue(1000)); searchRequest.source().trackTotalHits(false); ActionRequestValidationException validationErrors = searchRequest.validate(); @@ -103,6 +105,8 @@ public void testValidate() throws IOException { { // scroll and `from` isn't valid SearchRequest searchRequest = createSearchRequest().source(new SearchSourceBuilder()); + // make sure we don't set the request cache for a scroll query + searchRequest.requestCache(false); searchRequest.scroll(new TimeValue(1000)); searchRequest.source().from(10); ActionRequestValidationException validationErrors = searchRequest.validate(); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java index 9bc07a251d8a5..143ad4553c7dd 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalExtendedStatsTests.java @@ -82,7 +82,7 @@ protected void assertReduced(InternalExtendedStats reduced, List - "price_field": 2 <2> - } - }, - "status_field": { - "type": "keyword" - }, - "price_field": { - "type": "long" - }, - "field": { - "type": "text" - } - } - } - } -} --------------------------------------------------- -// CONSOLE - -<1> A boost of zero hints to the percolator that if there are other clauses in a conjunction query then these should be - preferred over this one. - -<2> Any boost higher than 1 overrides the default behaviour when it comes to selecting the best clause. The clause - that has the field with the highest boost will be selected from a conjunction query for extraction. - -The steps the percolator field mapper takes when selecting a clause from a conjunction query: - -* If there are clauses that have boosted fields then the clause with highest boost field is selected. -* If there are range based clauses and term based clauses then term based clauses are picked over range based clauses -* From all term based clauses the clause with longest term is picked. -* In the case when there are only range based clauses then the range clause with smallest range is picked over clauses with wider ranges. - [float] ==== Reindexing your percolator queries diff --git a/docs/reference/migration/migrate_7_0/search.asciidoc b/docs/reference/migration/migrate_7_0/search.asciidoc index a2e5d1ccf8563..12847354cf895 100644 --- a/docs/reference/migration/migrate_7_0/search.asciidoc +++ b/docs/reference/migration/migrate_7_0/search.asciidoc @@ -33,3 +33,10 @@ The Search API returns `400 - Bad request` while it would previously return * the number of slices is too large * keep alive for scroll is too large * number of filters in the adjacency matrix aggregation is too large + + +==== Scroll queries cannot use the request_cache anymore + +Setting `request_cache:true` on a query that creates a scroll ('scroll=1m`) +has been deprecated in 6 and will now return a `400 - Bad request`. +Scroll queries are not meant to be cached. diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc index babdccc2d95fe..49709ba1d6dc1 100644 --- a/docs/reference/setup/install.asciidoc +++ b/docs/reference/setup/install.asciidoc @@ -41,7 +41,7 @@ Images are available for running Elasticsearch as Docker containers. They may be downloaded from the Elastic Docker Registry. The default image ships with {xpack-ref}/index.html[X-Pack] pre-installed. + -<> +{ref}/docker.html[Install {es} with Docker] [float] [[config-mgmt-tools]] @@ -66,4 +66,9 @@ include::install/rpm.asciidoc[] include::install/windows.asciidoc[] -include::install/docker.asciidoc[] +ifdef::include-xpack[] +:edit_url!: +include::{xes-repo-dir}/setup/docker.asciidoc[] + +:edit_url: +endif::include-xpack[] diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc deleted file mode 100644 index 1bcdefc5bc2b5..0000000000000 --- a/docs/reference/setup/install/docker.asciidoc +++ /dev/null @@ -1,336 +0,0 @@ -[[docker]] -=== Install Elasticsearch with Docker - -Elasticsearch is also available as Docker images. -The images use https://hub.docker.com/_/centos/[centos:7] as the base image and -are available with {xpack-ref}/xpack-introduction.html[X-Pack]. - -A list of all published Docker images and tags can be found in https://www.docker.elastic.co[www.docker.elastic.co]. The source code can be found -on https://github.com/elastic/elasticsearch-docker/tree/{branch}[GitHub]. - -==== Image types - -The images are available in three different configurations or "flavors". The -`basic` flavor, which is the default, ships with X-Pack Basic features -pre-installed and automatically activated with a free licence. The `platinum` -flavor features all X-Pack functionally under a 30-day trial licence. The `oss` -flavor does not include X-Pack, and contains only open-source Elasticsearch. - -NOTE: {xpack-ref}/xpack-security.html[X-Pack Security] is enabled in the `platinum` -image. To access your cluster, it's necessary to set an initial password for the -`elastic` user. The initial password can be set at start up time via the -`ELASTIC_PASSWORD` environment variable: - -["source","txt",subs="attributes"] --------------------------------------------- -docker run -e ELASTIC_PASSWORD=MagicWord {docker-repo}-platinum:{version} --------------------------------------------- - -NOTE: The `platinum` image includes a trial license for 30 days. After that, you -can obtain one of the https://www.elastic.co/subscriptions[available -subscriptions] or revert to a Basic licence. The Basic license is free and -includes a selection of X-Pack features. - -Obtaining Elasticsearch for Docker is as simple as issuing a +docker pull+ command against the Elastic Docker registry. - -ifeval::["{release-state}"=="unreleased"] - -WARNING: Version {version} of Elasticsearch has not yet been released, so no -Docker image is currently available for this version. - -endif::[] - -ifeval::["{release-state}"!="unreleased"] - -Docker images can be retrieved with the following commands: - -["source","sh",subs="attributes"] --------------------------------------------- -docker pull {docker-repo}:{version} -docker pull {docker-repo}-platinum:{version} -docker pull {docker-repo}-oss:{version} --------------------------------------------- - -endif::[] - -[[docker-cli-run]] -==== Running Elasticsearch from the command line - -[[docker-cli-run-dev-mode]] -===== Development mode - -ifeval::["{release-state}"=="unreleased"] - -WARNING: Version {version} of the Elasticsearch Docker image has not yet been released. - -endif::[] - -ifeval::["{release-state}"!="unreleased"] - -Elasticsearch can be quickly started for development or testing use with the following command: - -["source","sh",subs="attributes"] --------------------------------------------- -docker run -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" {docker-image} --------------------------------------------- - -endif::[] - -[[docker-cli-run-prod-mode]] -===== Production mode - -[[docker-prod-prerequisites]] -[IMPORTANT] -========================= - -The `vm.max_map_count` kernel setting needs to be set to at least `262144` for production use. -Depending on your platform: - -* Linux -+ -The `vm.max_map_count` setting should be set permanently in /etc/sysctl.conf: -+ -[source,sh] --------------------------------------------- -$ grep vm.max_map_count /etc/sysctl.conf -vm.max_map_count=262144 ----------------------------------- -+ -To apply the setting on a live system type: `sysctl -w vm.max_map_count=262144` -+ -* macOS with https://docs.docker.com/engine/installation/mac/#/docker-for-mac[Docker for Mac] -+ -The `vm.max_map_count` setting must be set within the xhyve virtual machine: -+ -["source","sh"] --------------------------------------------- -$ screen ~/Library/Containers/com.docker.docker/Data/com.docker.driver.amd64-linux/tty --------------------------------------------- -+ -Log in with 'root' and no password. -Then configure the `sysctl` setting as you would for Linux: -+ -["source","sh"] --------------------------------------------- -sysctl -w vm.max_map_count=262144 --------------------------------------------- -+ -* Windows and macOS with https://www.docker.com/products/docker-toolbox[Docker Toolbox] -+ -The `vm.max_map_count` setting must be set via docker-machine: -+ -["source","txt"] --------------------------------------------- -docker-machine ssh -sudo sysctl -w vm.max_map_count=262144 --------------------------------------------- -========================= - -The following example brings up a cluster comprising two Elasticsearch nodes. -To bring up the cluster, use the <> and just type: - -ifeval::["{release-state}"=="unreleased"] - -WARNING: Version {version} of Elasticsearch has not yet been released, so a -`docker-compose.yml` is not available for this version. - -endif::[] - -ifeval::["{release-state}"!="unreleased"] - -["source","sh"] --------------------------------------------- -docker-compose up --------------------------------------------- - -endif::[] - -[NOTE] -`docker-compose` is not pre-installed with Docker on Linux. -Instructions for installing it can be found on the -https://docs.docker.com/compose/install/#install-using-pip[Docker Compose webpage]. - -The node `elasticsearch` listens on `localhost:9200` while `elasticsearch2` -talks to `elasticsearch` over a Docker network. - -This example also uses https://docs.docker.com/engine/tutorials/dockervolumes[Docker named volumes], called `esdata1` and `esdata2` which will be created if not already present. - -[[docker-prod-cluster-composefile]] -`docker-compose.yml`: -ifeval::["{release-state}"=="unreleased"] - -WARNING: Version {version} of Elasticsearch has not yet been released, so a -`docker-compose.yml` is not available for this version. - -endif::[] - -ifeval::["{release-state}"!="unreleased"] -["source","yaml",subs="attributes"] --------------------------------------------- -version: 2.2 -services: - elasticsearch: - image: {docker-image} - container_name: elasticsearch - environment: - - cluster.name=docker-cluster - - bootstrap.memory_lock=true - - "ES_JAVA_OPTS=-Xms512m -Xmx512m" - ulimits: - memlock: - soft: -1 - hard: -1 - volumes: - - esdata1:/usr/share/elasticsearch/data - ports: - - 9200:9200 - networks: - - esnet - elasticsearch2: - image: {docker-image} - container_name: elasticsearch2 - environment: - - cluster.name=docker-cluster - - bootstrap.memory_lock=true - - "ES_JAVA_OPTS=-Xms512m -Xmx512m" - - "discovery.zen.ping.unicast.hosts=elasticsearch" - ulimits: - memlock: - soft: -1 - hard: -1 - volumes: - - esdata2:/usr/share/elasticsearch/data - networks: - - esnet - -volumes: - esdata1: - driver: local - esdata2: - driver: local - -networks: - esnet: --------------------------------------------- -endif::[] - -To stop the cluster, type `docker-compose down`. Data volumes will persist, so it's possible to start the cluster again with the same data using `docker-compose up`. -To destroy the cluster **and the data volumes**, just type `docker-compose down -v`. - -===== Inspect status of cluster: - -["source","txt"] --------------------------------------------- -curl http://127.0.0.1:9200/_cat/health -1472225929 15:38:49 docker-cluster green 2 2 4 2 0 0 0 0 - 100.0% --------------------------------------------- -// NOTCONSOLE - -Log messages go to the console and are handled by the configured Docker logging driver. By default you can access logs with `docker logs`. - -[[docker-configuration-methods]] -==== Configuring Elasticsearch with Docker - -Elasticsearch loads its configuration from files under `/usr/share/elasticsearch/config/`. These configuration files are documented in <> and <>. - -The image offers several methods for configuring Elasticsearch settings with the conventional approach being to provide customized files, i.e. `elasticsearch.yml`, but it's also possible to use environment variables to set options: - -===== A. Present the parameters via Docker environment variables -For example, to define the cluster name with `docker run` you can pass `-e "cluster.name=mynewclustername"`. Double quotes are required. - -===== B. Bind-mounted configuration -Create your custom config file and mount this over the image's corresponding file. -For example, bind-mounting a `custom_elasticsearch.yml` with `docker run` can be accomplished with the parameter: - -["source","sh"] --------------------------------------------- --v full_path_to/custom_elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml --------------------------------------------- -IMPORTANT: The container **runs Elasticsearch as user `elasticsearch` using uid:gid `1000:1000`**. Bind mounted host directories and files, such as `custom_elasticsearch.yml` above, **need to be accessible by this user**. For the https://www.elastic.co/guide/en/elasticsearch/reference/current/important-settings.html#path-settings[data and log dirs], such as `/usr/share/elasticsearch/data`, write access is required as well. Also see note 1 below. - -===== C. Customized image -In some environments, it may make more sense to prepare a custom image containing your configuration. A `Dockerfile` to achieve this may be as simple as: - -["source","sh",subs="attributes"] --------------------------------------------- -FROM docker.elastic.co/elasticsearch/elasticsearch:{version} -COPY --chown=elasticsearch:elasticsearch elasticsearch.yml /usr/share/elasticsearch/config/ --------------------------------------------- - -You could then build and try the image with something like: - -["source","sh"] --------------------------------------------- -docker build --tag=elasticsearch-custom . -docker run -ti -v /usr/share/elasticsearch/data elasticsearch-custom --------------------------------------------- - -===== D. Override the image's default https://docs.docker.com/engine/reference/run/#cmd-default-command-or-options[CMD] - -Options can be passed as command-line options to the Elasticsearch process by -overriding the default command for the image. For example: - -["source","sh"] --------------------------------------------- -docker run bin/elasticsearch -Ecluster.name=mynewclustername --------------------------------------------- - -==== Notes for production use and defaults - -We have collected a number of best practices for production use. -Any Docker parameters mentioned below assume the use of `docker run`. - -. By default, Elasticsearch runs inside the container as user `elasticsearch` using uid:gid `1000:1000`. -+ -CAUTION: One exception is https://docs.openshift.com/container-platform/3.6/creating_images/guidelines.html#openshift-specific-guidelines[Openshift] which runs containers using an arbitrarily assigned user ID. Openshift will present persistent volumes with the gid set to `0` which will work without any adjustments. -+ -If you are bind-mounting a local directory or file, ensure it is readable by this user, while the <> additionally require write access. A good strategy is to grant group access to gid `1000` or `0` for the local directory. As an example, to prepare a local directory for storing data through a bind-mount: -+ - mkdir esdatadir - chmod g+rwx esdatadir - chgrp 1000 esdatadir -+ -As a last resort, you can also force the container to mutate the ownership of any bind-mounts used for the <> through the environment variable `TAKE_FILE_OWNERSHIP`; in this case they will be owned by uid:gid `1000:0` providing read/write access to the elasticsearch process as required. -+ -. It is important to ensure increased ulimits for <> and <> are available for the Elasticsearch containers. Verify the https://github.com/moby/moby/tree/ea4d1243953e6b652082305a9c3cda8656edab26/contrib/init[init system] for the Docker daemon is already setting those to acceptable values and, if needed, adjust them in the Daemon, or override them per container, for example using `docker run`: -+ - --ulimit nofile=65536:65536 -+ -NOTE: One way of checking the Docker daemon defaults for the aforementioned ulimits is by running: -+ - docker run --rm centos:7 /bin/bash -c 'ulimit -Hn && ulimit -Sn && ulimit -Hu && ulimit -Su' -+ -. Swapping needs to be disabled for performance and node stability. This can be -achieved through any of the methods mentioned in the -<>. If you opt for the -`bootstrap.memory_lock: true` approach, apart from defining it through any of -the <>, you will -additionally need the `memlock: true` ulimit, either defined in the -https://docs.docker.com/engine/reference/commandline/dockerd/#default-ulimits[Docker -Daemon] or specifically set for the container. This is demonstrated above in the -<>. If using `docker run`: -+ - -e "bootstrap.memory_lock=true" --ulimit memlock=-1:-1 -+ -. The image https://docs.docker.com/engine/reference/builder/#/expose[exposes] TCP ports 9200 and 9300. For clusters it is recommended to randomize the published ports with `--publish-all`, unless you are pinning one container per host. -+ -. Use the `ES_JAVA_OPTS` environment variable to set heap size, e.g. to use 16GB -use `-e ES_JAVA_OPTS="-Xms16g -Xmx16g"` with `docker run`. -+ -. Pin your deployments to a specific version of the Elasticsearch Docker image, e.g. +docker.elastic.co/elasticsearch/elasticsearch:{version}+. -+ -. Always use a volume bound on `/usr/share/elasticsearch/data`, as shown in the <>, for the following reasons: -+ -.. The data of your elasticsearch node won't be lost if the container is killed -.. Elasticsearch is I/O sensitive and the Docker storage driver is not ideal for fast I/O -.. It allows the use of advanced https://docs.docker.com/engine/extend/plugins/#volume-plugins[Docker volume plugins] -+ -. If you are using the devicemapper storage driver, make sure you are not using -the default `loop-lvm` mode. Configure docker-engine to use -https://docs.docker.com/engine/userguide/storagedriver/device-mapper-driver/#configure-docker-with-devicemapper[direct-lvm] -instead. -+ -. Consider centralizing your logs by using a different https://docs.docker.com/engine/admin/logging/overview/[logging driver]. Also note that the default json-file logging driver is not ideally suited for production use. - - -include::next-steps.asciidoc[] diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java index d4c7efe0d5092..8ca31787b5aaa 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java @@ -19,14 +19,24 @@ package org.elasticsearch.ingest.common; +import com.fasterxml.jackson.core.JsonParseException; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.common.xcontent.json.JsonXContentParser; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; +import java.io.IOException; import java.util.Map; import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; @@ -64,17 +74,36 @@ boolean isAddToRoot() { @Override public void execute(IngestDocument document) throws Exception { - String stringValue = document.getFieldValue(field, String.class); - try { - Map mapValue = XContentHelper.convertToMap(JsonXContent.jsonXContent, stringValue, false); - if (addToRoot) { - for (Map.Entry entry : mapValue.entrySet()) { + Object fieldValue = document.getFieldValue(field, Object.class); + BytesReference bytesRef = (fieldValue == null) ? new BytesArray("null") : new BytesArray(fieldValue.toString()); + try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, bytesRef)) { + XContentParser.Token token = parser.nextToken(); + Object value = null; + if (token == XContentParser.Token.VALUE_NULL) { + value = null; + } else if (token == XContentParser.Token.VALUE_STRING) { + value = parser.text(); + } else if (token == XContentParser.Token.VALUE_NUMBER) { + value = parser.numberValue(); + } else if (token == XContentParser.Token.VALUE_BOOLEAN) { + value = parser.booleanValue(); + } else if (token == XContentParser.Token.START_OBJECT) { + value = parser.map(); + } else if (token == XContentParser.Token.START_ARRAY) { + value = parser.list(); + } else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) { + throw new IllegalArgumentException("cannot read binary value"); + } + if (addToRoot && (value instanceof Map)) { + for (Map.Entry entry : ((Map) value).entrySet()) { document.setFieldValue(entry.getKey(), entry.getValue()); } + } else if (addToRoot) { + throw new IllegalArgumentException("cannot add non-map fields to root of document"); } else { - document.setFieldValue(targetField, mapValue); + document.setFieldValue(targetField, value); } - } catch (ElasticsearchParseException e) { + } catch (IOException e) { throw new IllegalArgumentException(e); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java index 66ffb87e8da14..ef17935962d0e 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java @@ -21,15 +21,19 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.test.ESTestCase; +import java.util.Arrays; import java.util.HashMap; +import java.util.List; import java.util.Map; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class JsonProcessorTests extends ESTestCase { @@ -44,7 +48,7 @@ public void testExecute() throws Exception { Map randomJsonMap = RandomDocumentPicks.randomSource(random()); XContentBuilder builder = JsonXContent.contentBuilder().map(randomJsonMap); - String randomJson = XContentHelper.convertToJson(builder.bytes(), false); + String randomJson = XContentHelper.convertToJson(builder.bytes(), false, XContentType.JSON); document.put(randomField, randomJson); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); @@ -53,16 +57,84 @@ public void testExecute() throws Exception { assertIngestDocument(ingestDocument.getFieldValue(randomTargetField, Object.class), jsonified); } - public void testInvalidJson() { + public void testInvalidValue() { JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false); Map document = new HashMap<>(); - document.put("field", "invalid json"); + document.put("field", "blah blah"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); Exception exception = expectThrows(IllegalArgumentException.class, () -> jsonProcessor.execute(ingestDocument)); - assertThat(exception.getCause().getCause().getMessage(), equalTo("Unrecognized token" - + " 'invalid': was expecting ('true', 'false' or 'null')\n" - + " at [Source: invalid json; line: 1, column: 8]")); + assertThat(exception.getCause().getMessage(), containsString("Unrecognized token 'blah': " + + "was expecting ('true', 'false' or 'null')")); + } + + public void testByteArray() { + JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false); + Map document = new HashMap<>(); + document.put("field", new byte[] { 0, 1 }); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + + Exception exception = expectThrows(IllegalArgumentException.class, () -> jsonProcessor.execute(ingestDocument)); + assertThat(exception.getCause().getMessage(), containsString("Unrecognized token 'B': was expecting ('true', 'false' or 'null')")); + } + + public void testNull() throws Exception { + JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false); + Map document = new HashMap<>(); + document.put("field", null); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + jsonProcessor.execute(ingestDocument); + assertNull(ingestDocument.getFieldValue("target_field", Object.class)); + } + + public void testBoolean() throws Exception { + JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false); + Map document = new HashMap<>(); + boolean value = true; + document.put("field", value); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + jsonProcessor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target_field", Object.class), equalTo(value)); + } + + public void testInteger() throws Exception { + JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false); + Map document = new HashMap<>(); + int value = 3; + document.put("field", value); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + jsonProcessor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target_field", Object.class), equalTo(value)); + } + + public void testDouble() throws Exception { + JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false); + Map document = new HashMap<>(); + double value = 3.0; + document.put("field", value); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + jsonProcessor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target_field", Object.class), equalTo(value)); + } + + public void testString() throws Exception { + JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false); + Map document = new HashMap<>(); + String value = "hello world"; + document.put("field", "\"" + value + "\""); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + jsonProcessor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target_field", Object.class), equalTo(value)); + } + + public void testArray() throws Exception { + JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", false); + Map document = new HashMap<>(); + List value = Arrays.asList(true, true, false); + document.put("field", value.toString()); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + jsonProcessor.execute(ingestDocument); + assertThat(ingestDocument.getFieldValue("target_field", Object.class), equalTo(value)); } public void testFieldMissing() { @@ -96,4 +168,13 @@ public void testAddToRoot() throws Exception { assertIngestDocument(ingestDocument, expectedIngestDocument); } + + public void testAddBoolToRoot() { + JsonProcessor jsonProcessor = new JsonProcessor("tag", "field", "target_field", true); + Map document = new HashMap<>(); + document.put("field", true); + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); + Exception exception = expectThrows(IllegalArgumentException.class, () -> jsonProcessor.execute(ingestDocument)); + assertThat(exception.getMessage(), containsString("cannot add non-map fields to root of document")); + } } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/140_json.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/140_json.yml index 3d9f6a97c081a..81761ba509e10 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/140_json.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/140_json.yml @@ -15,7 +15,32 @@ teardown: "processors": [ { "json" : { - "field" : "foo" + "field" : "foo_object" + } + }, + { + "json" : { + "field" : "foo_array" + } + }, + { + "json" : { + "field" : "foo_null" + } + }, + { + "json" : { + "field" : "foo_string" + } + }, + { + "json" : { + "field" : "foo_number" + } + }, + { + "json" : { + "field" : "foo_boolean" } } ] @@ -29,7 +54,12 @@ teardown: id: 1 pipeline: "1" body: { - foo: "{\"hello\": \"world\"}" + foo_object: "{\"hello\": \"world\"}", + foo_array: "[1, 2, 3]", + foo_null: null, + foo_string: "\"bla bla\"", + foo_number: 3, + foo_boolean: "true" } - do: @@ -37,4 +67,9 @@ teardown: index: test type: test id: 1 - - match: { _source.foo.hello: "world" } + - match: { _source.foo_object.hello: "world" } + - match: { _source.foo_array.0: 1 } + - match: { _source.foo_string: "bla bla" } + - match: { _source.foo_number: 3 } + - is_true: _source.foo_boolean + - is_false: _source.foo_null diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java index 910c716db6934..be18c62d1f2e2 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java @@ -183,6 +183,10 @@ QueryStore getQueryStore() { return queryStore; } + Query getCandidateMatchesQuery() { + return candidateMatchesQuery; + } + // Comparing identity here to avoid being cached // Note that in theory if the same instance gets used multiple times it could still get cached, // however since we create a new query instance each time we this query this shouldn't happen and thus diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index db1b444dcd28e..f4e295d4863df 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -639,7 +639,7 @@ protected Analyzer getWrappedAnalyzer(String fieldName) { String name = this.name != null ? this.name : field; PercolatorFieldMapper.FieldType pft = (PercolatorFieldMapper.FieldType) fieldType; PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField, percolateShardContext, mapUnmappedFieldsAsString); - return pft.percolateQuery(name, queryStore, documents, docSearcher); + return pft.percolateQuery(name, queryStore, documents, docSearcher, context.indexVersionCreated()); } public String getField() { diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java index 06c1423eb238d..35bce1f4f4f3f 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorFieldMapper.java @@ -20,6 +20,7 @@ import org.apache.lucene.document.BinaryRange; import org.apache.lucene.document.Field; +import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.IndexOptions; @@ -30,10 +31,12 @@ import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.search.BooleanClause.Occur; +import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.CoveringQuery; import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.LongValuesSource; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermInSetQuery; @@ -44,6 +47,7 @@ import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.hash.MurmurHash3; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; @@ -62,6 +66,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.RangeFieldMapper; import org.elasticsearch.index.mapper.RangeFieldMapper.RangeType; @@ -87,9 +92,6 @@ import java.util.Map; import java.util.function.Supplier; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.isObject; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue; -import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringValue; import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; public class PercolatorFieldMapper extends FieldMapper { @@ -113,11 +115,11 @@ public class PercolatorFieldMapper extends FieldMapper { static final String EXTRACTION_RESULT_FIELD_NAME = "extraction_result"; static final String QUERY_BUILDER_FIELD_NAME = "query_builder_field"; static final String RANGE_FIELD_NAME = "range_field"; + static final String MINIMUM_SHOULD_MATCH_FIELD_NAME = "minimum_should_match_field"; static class Builder extends FieldMapper.Builder { private final Supplier queryShardContext; - private final Map boostFields = new HashMap<>(); Builder(String fieldName, Supplier queryShardContext) { super(fieldName, FIELD_TYPE, FIELD_TYPE); @@ -138,15 +140,13 @@ public PercolatorFieldMapper build(BuilderContext context) { // have to introduce a new field type... RangeFieldMapper rangeFieldMapper = createExtractedRangeFieldBuilder(RANGE_FIELD_NAME, RangeType.IP, context); fieldType.rangeField = rangeFieldMapper.fieldType(); + NumberFieldMapper minimumShouldMatchFieldMapper = createMinimumShouldMatchField(context); + fieldType.minimumShouldMatchField = minimumShouldMatchFieldMapper.fieldType(); context.path().remove(); setupFieldType(context); return new PercolatorFieldMapper(name(), fieldType, defaultFieldType, context.indexSettings(), multiFieldsBuilder.build(this, context), copyTo, queryShardContext, extractedTermsField, - extractionResultField, queryBuilderField, rangeFieldMapper, Collections.unmodifiableMap(boostFields)); - } - - void addBoostField(String field, float boost) { - this.boostFields.put(field, boost); + extractionResultField, queryBuilderField, rangeFieldMapper, minimumShouldMatchFieldMapper); } static KeywordFieldMapper createExtractQueryFieldBuilder(String name, BuilderContext context) { @@ -173,30 +173,23 @@ static RangeFieldMapper createExtractedRangeFieldBuilder(String name, RangeType return builder.build(context); } + static NumberFieldMapper createMinimumShouldMatchField(BuilderContext context) { + NumberFieldMapper.Builder builder = + new NumberFieldMapper.Builder(MINIMUM_SHOULD_MATCH_FIELD_NAME, NumberFieldMapper.NumberType.INTEGER); + builder.index(false); + builder.store(false); + builder.docValues(true); + builder.fieldType().setDocValuesType(DocValuesType.NUMERIC); + return builder.build(context); + } + } static class TypeParser implements FieldMapper.TypeParser { @Override public Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { - Builder builder = new Builder(name, parserContext.queryShardContextSupplier()); - for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { - Map.Entry entry = iterator.next(); - String propName = entry.getKey(); - Object propNode = entry.getValue(); - if (propName.equals("boost_fields")) { - if (isObject(propNode)) { - for (Map.Entry innerEntry : ((Map) propNode).entrySet()) { - String fieldName = nodeStringValue(innerEntry.getKey(), null); - builder.addBoostField(fieldName, nodeFloatValue(innerEntry.getValue())); - } - } else { - throw new IllegalArgumentException("boost_fields [" + propNode + "] is not an object"); - } - iterator.remove(); - } - } - return builder; + return new Builder(name, parserContext.queryShardContextSupplier()); } } @@ -205,6 +198,7 @@ static class FieldType extends MappedFieldType { MappedFieldType queryTermsField; MappedFieldType extractionResultField; MappedFieldType queryBuilderField; + MappedFieldType minimumShouldMatchField; RangeFieldMapper.RangeFieldType rangeField; @@ -220,6 +214,7 @@ static class FieldType extends MappedFieldType { extractionResultField = ref.extractionResultField; queryBuilderField = ref.queryBuilderField; rangeField = ref.rangeField; + minimumShouldMatchField = ref.minimumShouldMatchField; } @Override @@ -247,23 +242,38 @@ public Query termQuery(Object value, QueryShardContext context) { } Query percolateQuery(String name, PercolateQuery.QueryStore queryStore, List documents, - IndexSearcher searcher) throws IOException { + IndexSearcher searcher, Version indexVersion) throws IOException { IndexReader indexReader = searcher.getIndexReader(); - Query candidateMatchesQuery = createCandidateQuery(indexReader); + Tuple, Boolean> t = createCandidateQueryClauses(indexReader); + BooleanQuery.Builder candidateQuery = new BooleanQuery.Builder(); + if (t.v2() && indexVersion.onOrAfter(Version.V_6_1_0)) { + LongValuesSource valuesSource = LongValuesSource.fromIntField(minimumShouldMatchField.name()); + candidateQuery.add(new CoveringQuery(t.v1(), valuesSource), BooleanClause.Occur.SHOULD); + } else { + for (Query query : t.v1()) { + candidateQuery.add(query, BooleanClause.Occur.SHOULD); + } + } + // include extractionResultField:failed, because docs with this term have no extractedTermsField + // and otherwise we would fail to return these docs. Docs that failed query term extraction + // always need to be verified by MemoryIndex: + candidateQuery.add(new TermQuery(new Term(extractionResultField.name(), EXTRACTION_FAILED)), BooleanClause.Occur.SHOULD); + Query verifiedMatchesQuery; - // We can only skip the MemoryIndex verification when percolating a single document. - // When the document being percolated contains a nested object field then the MemoryIndex contains multiple - // documents. In this case the term query that indicates whether memory index verification can be skipped - // can incorrectly indicate that non nested queries would match, while their nested variants would not. - if (indexReader.maxDoc() == 1) { + // We can only skip the MemoryIndex verification when percolating a single non nested document. We cannot + // skip MemoryIndex verification when percolating multiple documents, because when terms and + // ranges are extracted from IndexReader backed by a RamDirectory holding multiple documents we do + // not know to which document the terms belong too and for certain queries we incorrectly emit candidate + // matches as actual match. + if (t.v2() && indexReader.maxDoc() == 1) { verifiedMatchesQuery = new TermQuery(new Term(extractionResultField.name(), EXTRACTION_COMPLETE)); } else { - verifiedMatchesQuery = new MatchNoDocsQuery("multiple/nested docs, so no verified matches"); + verifiedMatchesQuery = new MatchNoDocsQuery("multiple or nested docs or CoveringQuery could not be used"); } - return new PercolateQuery(name, queryStore, documents, candidateMatchesQuery, searcher, verifiedMatchesQuery); + return new PercolateQuery(name, queryStore, documents, candidateQuery.build(), searcher, verifiedMatchesQuery); } - Query createCandidateQuery(IndexReader indexReader) throws IOException { + Tuple, Boolean> createCandidateQueryClauses(IndexReader indexReader) throws IOException { List extractedTerms = new ArrayList<>(); Map> encodedPointValuesByField = new HashMap<>(); @@ -290,14 +300,17 @@ Query createCandidateQuery(IndexReader indexReader) throws IOException { } } - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - if (extractedTerms.size() != 0) { - builder.add(new TermInSetQuery(queryTermsField.name(), extractedTerms), Occur.SHOULD); + final boolean canUseMinimumShouldMatchField; + final List queries = new ArrayList<>(); + if (extractedTerms.size() + encodedPointValuesByField.size() <= BooleanQuery.getMaxClauseCount()) { + canUseMinimumShouldMatchField = true; + for (BytesRef extractedTerm : extractedTerms) { + queries.add(new TermQuery(new Term(queryTermsField.name(), extractedTerm))); + } + } else { + canUseMinimumShouldMatchField = false; + queries.add(new TermInSetQuery(queryTermsField.name(), extractedTerms)); } - // include extractionResultField:failed, because docs with this term have no extractedTermsField - // and otherwise we would fail to return these docs. Docs that failed query term extraction - // always need to be verified by MemoryIndex: - builder.add(new TermQuery(new Term(extractionResultField.name(), EXTRACTION_FAILED)), Occur.SHOULD); for (Map.Entry> entry : encodedPointValuesByField.entrySet()) { String rangeFieldName = entry.getKey(); @@ -305,9 +318,9 @@ Query createCandidateQuery(IndexReader indexReader) throws IOException { byte[] min = encodedPointValues.get(0); byte[] max = encodedPointValues.get(1); Query query = BinaryRange.newIntersectsQuery(rangeField.name(), encodeRange(rangeFieldName, min, max)); - builder.add(query, Occur.SHOULD); + queries.add(query); } - return builder.build(); + return new Tuple<>(queries, canUseMinimumShouldMatchField); } } @@ -317,24 +330,24 @@ Query createCandidateQuery(IndexReader indexReader) throws IOException { private KeywordFieldMapper queryTermsField; private KeywordFieldMapper extractionResultField; private BinaryFieldMapper queryBuilderField; + private NumberFieldMapper minimumShouldMatchFieldMapper; private RangeFieldMapper rangeFieldMapper; - private Map boostFields; PercolatorFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, - Settings indexSettings, MultiFields multiFields, CopyTo copyTo, - Supplier queryShardContext, - KeywordFieldMapper queryTermsField, KeywordFieldMapper extractionResultField, - BinaryFieldMapper queryBuilderField, RangeFieldMapper rangeFieldMapper, - Map boostFields) { + Settings indexSettings, MultiFields multiFields, CopyTo copyTo, + Supplier queryShardContext, + KeywordFieldMapper queryTermsField, KeywordFieldMapper extractionResultField, + BinaryFieldMapper queryBuilderField, RangeFieldMapper rangeFieldMapper, + NumberFieldMapper minimumShouldMatchFieldMapper) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); this.queryShardContext = queryShardContext; this.queryTermsField = queryTermsField; this.extractionResultField = extractionResultField; this.queryBuilderField = queryBuilderField; + this.minimumShouldMatchFieldMapper = minimumShouldMatchFieldMapper; this.mapUnmappedFieldAsText = getMapUnmappedFieldAsText(indexSettings); this.rangeFieldMapper = rangeFieldMapper; - this.boostFields = boostFields; } private static boolean getMapUnmappedFieldAsText(Settings indexSettings) { @@ -361,6 +374,7 @@ public FieldMapper updateFieldType(Map fullNameToFieldT KeywordFieldMapper extractionResultUpdated = (KeywordFieldMapper) extractionResultField.updateFieldType(fullNameToFieldType); BinaryFieldMapper queryBuilderUpdated = (BinaryFieldMapper) queryBuilderField.updateFieldType(fullNameToFieldType); RangeFieldMapper rangeFieldMapperUpdated = (RangeFieldMapper) rangeFieldMapper.updateFieldType(fullNameToFieldType); + NumberFieldMapper msmFieldMapperUpdated = (NumberFieldMapper) minimumShouldMatchFieldMapper.updateFieldType(fullNameToFieldType); if (updated == this && queryTermsUpdated == queryTermsField && extractionResultUpdated == extractionResultField && queryBuilderUpdated == queryBuilderField && rangeFieldMapperUpdated == rangeFieldMapper) { @@ -373,6 +387,7 @@ public FieldMapper updateFieldType(Map fullNameToFieldT updated.extractionResultField = extractionResultUpdated; updated.queryBuilderField = queryBuilderUpdated; updated.rangeFieldMapper = rangeFieldMapperUpdated; + updated.minimumShouldMatchFieldMapper = msmFieldMapperUpdated; return updated; } @@ -429,7 +444,8 @@ void processQuery(Query query, ParseContext context) { FieldType pft = (FieldType) this.fieldType(); QueryAnalyzer.Result result; try { - result = QueryAnalyzer.analyze(query, boostFields); + Version indexVersion = context.mapperService().getIndexSettings().getIndexVersionCreated(); + result = QueryAnalyzer.analyze(query, indexVersion); } catch (QueryAnalyzer.UnsupportedQueryException e) { doc.add(new Field(pft.extractionResultField.name(), EXTRACTION_FAILED, extractionResultField.fieldType())); return; @@ -457,6 +473,9 @@ void processQuery(Query query, ParseContext context) { for (IndexableField field : fields) { context.doc().add(field); } + if (context.mapperService().getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_6_1_0)) { + doc.add(new NumericDocValuesField(minimumShouldMatchFieldMapper.name(), result.minimumShouldMatch)); + } } static Query parseQuery(QueryShardContext context, boolean mapUnmappedFieldsAsString, XContentParser parser) throws IOException { @@ -491,7 +510,9 @@ private static QueryBuilder parseQueryBuilder(XContentParser parser, XContentLoc @Override public Iterator iterator() { - return Arrays.asList(queryTermsField, extractionResultField, queryBuilderField, rangeFieldMapper).iterator(); + return Arrays.asList( + queryTermsField, extractionResultField, queryBuilderField, minimumShouldMatchFieldMapper, rangeFieldMapper + ).iterator(); } @Override @@ -504,28 +525,6 @@ protected String contentType() { return CONTENT_TYPE; } - @Override - protected void doMerge(Mapper mergeWith, boolean updateAllTypes) { - super.doMerge(mergeWith, updateAllTypes); - PercolatorFieldMapper percolatorMergeWith = (PercolatorFieldMapper) mergeWith; - - // Updating the boost_fields can be allowed, because it doesn't break previously indexed percolator queries - // However the updated boost_fields to completely take effect, percolator queries prior to the mapping update need to be reindexed - boostFields = percolatorMergeWith.boostFields; - } - - @Override - protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException { - super.doXContentBody(builder, includeDefaults, params); - if (boostFields.isEmpty() == false) { - builder.startObject("boost_fields"); - for (Map.Entry entry : boostFields.entrySet()) { - builder.field(entry.getKey(), entry.getValue()); - } - builder.endObject(); - } - } - boolean isMapUnmappedFieldAsText() { return mapUnmappedFieldAsText; } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java index 8c2a6d7a4553b..940f9ebab5a9a 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/QueryAnalyzer.java @@ -45,6 +45,7 @@ import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.Version; import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.index.search.ESToParentBlockJoinQuery; @@ -59,16 +60,15 @@ import java.util.Objects; import java.util.Set; import java.util.function.BiFunction; -import java.util.function.Predicate; import static java.util.stream.Collectors.toSet; final class QueryAnalyzer { - private static final Map, BiFunction, Result>> queryProcessors; + private static final Map, BiFunction> queryProcessors; static { - Map, BiFunction, Result>> map = new HashMap<>(); + Map, BiFunction> map = new HashMap<>(); map.put(MatchNoDocsQuery.class, matchNoDocsQuery()); map.put(ConstantScoreQuery.class, constantScoreQuery()); map.put(BoostQuery.class, boostQuery()); @@ -119,161 +119,196 @@ private QueryAnalyzer() { * Sometimes the query analyzer can't always extract terms or ranges from a sub query, if that happens then * query analysis is stopped and an UnsupportedQueryException is thrown. So that the caller can mark * this query in such a way that the PercolatorQuery always verifies if this query with the MemoryIndex. + * + * @param query The query to analyze. + * @param indexVersion The create version of the index containing the percolator queries. */ - static Result analyze(Query query, Map boosts) { + static Result analyze(Query query, Version indexVersion) { Class queryClass = query.getClass(); if (queryClass.isAnonymousClass()) { // Sometimes queries have anonymous classes in that case we need the direct super class. // (for example blended term query) queryClass = queryClass.getSuperclass(); } - BiFunction, Result> queryProcessor = queryProcessors.get(queryClass); + BiFunction queryProcessor = queryProcessors.get(queryClass); if (queryProcessor != null) { - return queryProcessor.apply(query, boosts); + return queryProcessor.apply(query, indexVersion); } else { throw new UnsupportedQueryException(query); } } - private static BiFunction, Result> matchNoDocsQuery() { - return (query, boosts) -> new Result(true, Collections.emptySet()); + private static BiFunction matchNoDocsQuery() { + return (query, version) -> new Result(true, Collections.emptySet(), 1); } - private static BiFunction, Result> constantScoreQuery() { - return (query, boosts)-> { + private static BiFunction constantScoreQuery() { + return (query, boosts) -> { Query wrappedQuery = ((ConstantScoreQuery) query).getQuery(); return analyze(wrappedQuery, boosts); }; } - private static BiFunction, Result> boostQuery() { - return (query, boosts) -> { + private static BiFunction boostQuery() { + return (query, version) -> { Query wrappedQuery = ((BoostQuery) query).getQuery(); - return analyze(wrappedQuery, boosts); + return analyze(wrappedQuery, version); }; } - private static BiFunction, Result> termQuery() { - return (query, boosts) -> { + private static BiFunction termQuery() { + return (query, version) -> { TermQuery termQuery = (TermQuery) query; - return new Result(true, Collections.singleton(new QueryExtraction(termQuery.getTerm()))); + return new Result(true, Collections.singleton(new QueryExtraction(termQuery.getTerm())), 1); }; } - private static BiFunction, Result> termInSetQuery() { - return (query, boosts) -> { + private static BiFunction termInSetQuery() { + return (query, version) -> { TermInSetQuery termInSetQuery = (TermInSetQuery) query; Set terms = new HashSet<>(); PrefixCodedTerms.TermIterator iterator = termInSetQuery.getTermData().iterator(); for (BytesRef term = iterator.next(); term != null; term = iterator.next()) { terms.add(new QueryExtraction(new Term(iterator.field(), term))); } - return new Result(true, terms); + return new Result(true, terms, 1); }; } - private static BiFunction, Result> synonymQuery() { - return (query, boosts) -> { + private static BiFunction synonymQuery() { + return (query, version) -> { Set terms = ((SynonymQuery) query).getTerms().stream().map(QueryExtraction::new).collect(toSet()); - return new Result(true, terms); + return new Result(true, terms, 1); }; } - private static BiFunction, Result> commonTermsQuery() { - return (query, boosts) -> { + private static BiFunction commonTermsQuery() { + return (query, version) -> { Set terms = ((CommonTermsQuery) query).getTerms().stream().map(QueryExtraction::new).collect(toSet()); - return new Result(false, terms); + return new Result(false, terms, 1); }; } - private static BiFunction, Result> blendedTermQuery() { - return (query, boosts) -> { + private static BiFunction blendedTermQuery() { + return (query, version) -> { Set terms = ((BlendedTermQuery) query).getTerms().stream().map(QueryExtraction::new).collect(toSet()); - return new Result(true, terms); + return new Result(true, terms, 1); }; } - private static BiFunction, Result> phraseQuery() { - return (query, boosts) -> { + private static BiFunction phraseQuery() { + return (query, version) -> { Term[] terms = ((PhraseQuery) query).getTerms(); if (terms.length == 0) { - return new Result(true, Collections.emptySet()); + return new Result(true, Collections.emptySet(), 1); } - // the longest term is likely to be the rarest, - // so from a performance perspective it makes sense to extract that - Term longestTerm = terms[0]; - for (Term term : terms) { - if (longestTerm.bytes().length < term.bytes().length) { - longestTerm = term; + if (version.onOrAfter(Version.V_6_1_0)) { + Set extractions = Arrays.stream(terms).map(QueryExtraction::new).collect(toSet()); + return new Result(false, extractions, extractions.size()); + } else { + // the longest term is likely to be the rarest, + // so from a performance perspective it makes sense to extract that + Term longestTerm = terms[0]; + for (Term term : terms) { + if (longestTerm.bytes().length < term.bytes().length) { + longestTerm = term; + } } + return new Result(false, Collections.singleton(new QueryExtraction(longestTerm)), 1); } - return new Result(false, Collections.singleton(new QueryExtraction(longestTerm))); }; } - private static BiFunction, Result> multiPhraseQuery() { - return (query, boosts) -> { + private static BiFunction multiPhraseQuery() { + return (query, version) -> { Term[][] terms = ((MultiPhraseQuery) query).getTermArrays(); if (terms.length == 0) { - return new Result(true, Collections.emptySet()); + return new Result(true, Collections.emptySet(), 1); } - Set bestTermArr = null; - for (Term[] termArr : terms) { - Set queryExtractions = Arrays.stream(termArr).map(QueryExtraction::new).collect(toSet()); - bestTermArr = selectBestExtraction(boosts, bestTermArr, queryExtractions); + if (version.onOrAfter(Version.V_6_1_0)) { + Set extractions = new HashSet<>(); + for (Term[] termArr : terms) { + extractions.addAll(Arrays.stream(termArr).map(QueryExtraction::new).collect(toSet())); + } + return new Result(false, extractions, terms.length); + } else { + Set bestTermArr = null; + for (Term[] termArr : terms) { + Set queryExtractions = Arrays.stream(termArr).map(QueryExtraction::new).collect(toSet()); + bestTermArr = selectBestExtraction(bestTermArr, queryExtractions); + } + return new Result(false, bestTermArr, 1); } - return new Result(false, bestTermArr); }; } - private static BiFunction, Result> spanTermQuery() { - return (query, boosts) -> { + private static BiFunction spanTermQuery() { + return (query, version) -> { Term term = ((SpanTermQuery) query).getTerm(); - return new Result(true, Collections.singleton(new QueryExtraction(term))); + return new Result(true, Collections.singleton(new QueryExtraction(term)), 1); }; } - private static BiFunction, Result> spanNearQuery() { - return (query, boosts) -> { - Set bestClauses = null; + private static BiFunction spanNearQuery() { + return (query, version) -> { SpanNearQuery spanNearQuery = (SpanNearQuery) query; - for (SpanQuery clause : spanNearQuery.getClauses()) { - Result temp = analyze(clause, boosts); - bestClauses = selectBestExtraction(boosts, temp.extractions, bestClauses); + if (version.onOrAfter(Version.V_6_1_0)) { + Set results = Arrays.stream(spanNearQuery.getClauses()).map(clause -> analyze(clause, version)).collect(toSet()); + int msm = 0; + Set extractions = new HashSet<>(); + Set seenRangeFields = new HashSet<>(); + for (Result result : results) { + QueryExtraction[] t = result.extractions.toArray(new QueryExtraction[1]); + if (result.extractions.size() == 1 && t[0].range != null) { + if (seenRangeFields.add(t[0].range.fieldName)) { + msm += 1; + } + } else { + msm += result.minimumShouldMatch; + } + extractions.addAll(result.extractions); + } + return new Result(false, extractions, msm); + } else { + Set bestClauses = null; + for (SpanQuery clause : spanNearQuery.getClauses()) { + Result temp = analyze(clause, version); + bestClauses = selectBestExtraction(temp.extractions, bestClauses); + } + return new Result(false, bestClauses, 1); } - return new Result(false, bestClauses); }; } - private static BiFunction, Result> spanOrQuery() { - return (query, boosts) -> { + private static BiFunction spanOrQuery() { + return (query, version) -> { Set terms = new HashSet<>(); SpanOrQuery spanOrQuery = (SpanOrQuery) query; for (SpanQuery clause : spanOrQuery.getClauses()) { - terms.addAll(analyze(clause, boosts).extractions); + terms.addAll(analyze(clause, version).extractions); } - return new Result(false, terms); + return new Result(false, terms, 1); }; } - private static BiFunction, Result> spanNotQuery() { - return (query, boosts) -> { - Result result = analyze(((SpanNotQuery) query).getInclude(), boosts); - return new Result(false, result.extractions); + private static BiFunction spanNotQuery() { + return (query, version) -> { + Result result = analyze(((SpanNotQuery) query).getInclude(), version); + return new Result(false, result.extractions, result.minimumShouldMatch); }; } - private static BiFunction, Result> spanFirstQuery() { - return (query, boosts) -> { - Result result = analyze(((SpanFirstQuery) query).getMatch(), boosts); - return new Result(false, result.extractions); + private static BiFunction spanFirstQuery() { + return (query, version) -> { + Result result = analyze(((SpanFirstQuery) query).getMatch(), version); + return new Result(false, result.extractions, result.minimumShouldMatch); }; } - private static BiFunction, Result> booleanQuery() { - return (query, boosts) -> { + private static BiFunction booleanQuery() { + return (query, version) -> { BooleanQuery bq = (BooleanQuery) query; List clauses = bq.clauses(); int minimumShouldMatch = bq.getMinimumNumberShouldMatch(); @@ -292,34 +327,89 @@ private static BiFunction, Result> booleanQuery() { } } if (numRequiredClauses > 0) { - Set bestClause = null; - UnsupportedQueryException uqe = null; - for (BooleanClause clause : clauses) { - if (clause.isRequired() == false) { - // skip must_not clauses, we don't need to remember the things that do *not* match... - // skip should clauses, this bq has must clauses, so we don't need to remember should clauses, - // since they are completely optional. - continue; + if (version.onOrAfter(Version.V_6_1_0)) { + UnsupportedQueryException uqe = null; + List results = new ArrayList<>(numRequiredClauses); + for (BooleanClause clause : clauses) { + if (clause.isRequired()) { + // skip must_not clauses, we don't need to remember the things that do *not* match... + // skip should clauses, this bq has must clauses, so we don't need to remember should clauses, + // since they are completely optional. + + try { + results.add(analyze(clause.getQuery(), version)); + } catch (UnsupportedQueryException e) { + uqe = e; + } + } } - Result temp; - try { - temp = analyze(clause.getQuery(), boosts); - } catch (UnsupportedQueryException e) { - uqe = e; - continue; + if (results.isEmpty()) { + if (uqe != null) { + // we're unable to select the best clause and an exception occurred, so we bail + throw uqe; + } else { + // We didn't find a clause and no exception occurred, so this bq only contained MatchNoDocsQueries, + return new Result(true, Collections.emptySet(), 1); + } + } else { + int msm = 0; + boolean requiredShouldClauses = minimumShouldMatch > 0 && numOptionalClauses > 0; + boolean verified = uqe == null && numProhibitedClauses == 0 && requiredShouldClauses == false; + Set extractions = new HashSet<>(); + Set seenRangeFields = new HashSet<>(); + for (Result result : results) { + QueryExtraction[] t = result.extractions.toArray(new QueryExtraction[1]); + if (result.extractions.size() == 1 && t[0].range != null) { + // In case of range queries each extraction does not simply increment the minimum_should_match + // for that percolator query like for a term based extraction, so that can lead to more false + // positives for percolator queries with range queries than term based queries. + // The is because the way number fields are extracted from the document to be percolated. + // Per field a single range is extracted and if a percolator query has two or more range queries + // on the same field than the the minimum should match can be higher than clauses in the CoveringQuery. + // Therefore right now the minimum should match is incremented once per number field when processing + // the percolator query at index time. + if (seenRangeFields.add(t[0].range.fieldName)) { + msm += 1; + } + } else { + msm += result.minimumShouldMatch; + } + verified &= result.verified; + extractions.addAll(result.extractions); + } + return new Result(verified, extractions, msm); } - bestClause = selectBestExtraction(boosts, temp.extractions, bestClause); - } - if (bestClause != null) { - return new Result(false, bestClause); } else { - if (uqe != null) { - // we're unable to select the best clause and an exception occurred, so we bail - throw uqe; + Set bestClause = null; + UnsupportedQueryException uqe = null; + for (BooleanClause clause : clauses) { + if (clause.isRequired() == false) { + // skip must_not clauses, we don't need to remember the things that do *not* match... + // skip should clauses, this bq has must clauses, so we don't need to remember should clauses, + // since they are completely optional. + continue; + } + + Result temp; + try { + temp = analyze(clause.getQuery(), version); + } catch (UnsupportedQueryException e) { + uqe = e; + continue; + } + bestClause = selectBestExtraction(temp.extractions, bestClause); + } + if (bestClause != null) { + return new Result(false, bestClause, 1); } else { - // We didn't find a clause and no exception occurred, so this bq only contained MatchNoDocsQueries, - return new Result(true, Collections.emptySet()); + if (uqe != null) { + // we're unable to select the best clause and an exception occurred, so we bail + throw uqe; + } else { + // We didn't find a clause and no exception occurred, so this bq only contained MatchNoDocsQueries, + return new Result(true, Collections.emptySet(), 1); + } } } } else { @@ -329,33 +419,33 @@ private static BiFunction, Result> booleanQuery() { disjunctions.add(clause.getQuery()); } } - return handleDisjunction(disjunctions, minimumShouldMatch, numProhibitedClauses > 0, boosts); + return handleDisjunction(disjunctions, minimumShouldMatch, numProhibitedClauses > 0, version); } }; } - private static BiFunction, Result> disjunctionMaxQuery() { - return (query, boosts) -> { + private static BiFunction disjunctionMaxQuery() { + return (query, version) -> { List disjuncts = ((DisjunctionMaxQuery) query).getDisjuncts(); - return handleDisjunction(disjuncts, 1, false, boosts); + return handleDisjunction(disjuncts, 1, false, version); }; } - private static BiFunction, Result> functionScoreQuery() { - return (query, boosts) -> { + private static BiFunction functionScoreQuery() { + return (query, version) -> { FunctionScoreQuery functionScoreQuery = (FunctionScoreQuery) query; - Result result = analyze(functionScoreQuery.getSubQuery(), boosts); + Result result = analyze(functionScoreQuery.getSubQuery(), version); // If min_score is specified we can't guarantee upfront that this percolator query matches, // so in that case we set verified to false. // (if it matches with the percolator document matches with the extracted terms. // Min score filters out docs, which is different than the functions, which just influences the score.) boolean verified = functionScoreQuery.getMinScore() == null; - return new Result(verified, result.extractions); + return new Result(verified, result.extractions, result.minimumShouldMatch); }; } - private static BiFunction, Result> pointRangeQuery() { - return (query, boosts) -> { + private static BiFunction pointRangeQuery() { + return (query, version) -> { PointRangeQuery pointRangeQuery = (PointRangeQuery) query; if (pointRangeQuery.getNumDims() != 1) { throw new UnsupportedQueryException(query); @@ -367,14 +457,13 @@ private static BiFunction, Result> pointRangeQuery() { // Need to check whether upper is not smaller than lower, otherwise NumericUtils.subtract(...) fails IAE // If upper is really smaller than lower then we deal with like MatchNoDocsQuery. (verified and no extractions) if (new BytesRef(lowerPoint).compareTo(new BytesRef(upperPoint)) > 0) { - return new Result(true, Collections.emptySet()); + return new Result(true, Collections.emptySet(), 1); } byte[] interval = new byte[16]; NumericUtils.subtract(16, 0, prepad(upperPoint), prepad(lowerPoint), interval); return new Result(false, Collections.singleton(new QueryExtraction( - new Range(pointRangeQuery.getField(), lowerPoint, upperPoint, interval)) - )); + new Range(pointRangeQuery.getField(), lowerPoint, upperPoint, interval))), 1); }; } @@ -385,82 +474,83 @@ private static byte[] prepad(byte[] original) { return result; } - private static BiFunction, Result> indexOrDocValuesQuery() { - return (query, boosts) -> { + private static BiFunction indexOrDocValuesQuery() { + return (query, version) -> { IndexOrDocValuesQuery indexOrDocValuesQuery = (IndexOrDocValuesQuery) query; - return analyze(indexOrDocValuesQuery.getIndexQuery(), boosts); + return analyze(indexOrDocValuesQuery.getIndexQuery(), version); }; } - private static BiFunction, Result> toParentBlockJoinQuery() { - return (query, boosts) -> { + private static BiFunction toParentBlockJoinQuery() { + return (query, version) -> { ESToParentBlockJoinQuery toParentBlockJoinQuery = (ESToParentBlockJoinQuery) query; - Result result = analyze(toParentBlockJoinQuery.getChildQuery(), boosts); - return new Result(false, result.extractions); + Result result = analyze(toParentBlockJoinQuery.getChildQuery(), version); + return new Result(false, result.extractions, result.minimumShouldMatch); }; } - private static Result handleDisjunction(List disjunctions, int minimumShouldMatch, boolean otherClauses, - Map boosts) { - boolean verified = minimumShouldMatch <= 1 && otherClauses == false; + private static Result handleDisjunction(List disjunctions, int requiredShouldClauses, boolean otherClauses, + Version version) { + // Keep track of the msm for each clause: + int[] msmPerClause = new int[disjunctions.size()]; + String[] rangeFieldNames = new String[disjunctions.size()]; + boolean verified = otherClauses == false; + if (version.before(Version.V_6_1_0)) { + verified &= requiredShouldClauses <= 1; + } + Set terms = new HashSet<>(); - for (Query disjunct : disjunctions) { - Result subResult = analyze(disjunct, boosts); - if (subResult.verified == false) { - verified = false; - } + for (int i = 0; i < disjunctions.size(); i++) { + Query disjunct = disjunctions.get(i); + Result subResult = analyze(disjunct, version); + verified &= subResult.verified; terms.addAll(subResult.extractions); + + QueryExtraction[] t = subResult.extractions.toArray(new QueryExtraction[1]); + msmPerClause[i] = subResult.minimumShouldMatch; + if (subResult.extractions.size() == 1 && t[0].range != null) { + rangeFieldNames[i] = t[0].range.fieldName; + } } - return new Result(verified, terms); + + int msm = 0; + if (version.onOrAfter(Version.V_6_1_0)) { + Set seenRangeFields = new HashSet<>(); + // Figure out what the combined msm is for this disjunction: + // (sum the lowest required clauses, otherwise we're too strict and queries may not match) + Arrays.sort(msmPerClause); + int limit = Math.min(msmPerClause.length, Math.max(1, requiredShouldClauses)); + for (int i = 0; i < limit; i++) { + if (rangeFieldNames[i] != null) { + if (seenRangeFields.add(rangeFieldNames[i])) { + msm += 1; + } + } else { + msm += msmPerClause[i]; + } + } + } else { + msm = 1; + } + return new Result(verified, terms, msm); } - static Set selectBestExtraction(Map boostFields, Set extractions1, - Set extractions2) { + static Set selectBestExtraction(Set extractions1, Set extractions2) { assert extractions1 != null || extractions2 != null; if (extractions1 == null) { return extractions2; } else if (extractions2 == null) { return extractions1; } else { - Set filtered1; - Set filtered2; - if (boostFields.isEmpty() == false) { - Predicate predicate = extraction -> { - String fieldName = extraction.term != null ? extraction.term.field() : extraction.range.fieldName; - float boost = boostFields.getOrDefault(fieldName, 1F); - return boost != 0F; - }; - filtered1 = extractions1.stream().filter(predicate).collect(toSet()); - if (filtered1.isEmpty()) { - return extractions2; - } - filtered2 = extractions2.stream().filter(predicate).collect(toSet()); - if (filtered2.isEmpty()) { - return extractions1; - } - - float extraction1LowestBoost = lowestBoost(filtered1, boostFields); - float extraction2LowestBoost = lowestBoost(filtered2, boostFields); - if (extraction1LowestBoost > extraction2LowestBoost) { - return extractions1; - } else if (extraction2LowestBoost > extraction1LowestBoost) { - return extractions2; - } - // Step out, because boosts are equal, so pick best extraction on either term or range size. - } else { - filtered1 = extractions1; - filtered2 = extractions2; - } - // Prefer term based extractions over range based extractions: boolean onlyRangeBasedExtractions = true; - for (QueryExtraction clause : filtered1) { + for (QueryExtraction clause : extractions1) { if (clause.term != null) { onlyRangeBasedExtractions = false; break; } } - for (QueryExtraction clause : filtered2) { + for (QueryExtraction clause : extractions2) { if (clause.term != null) { onlyRangeBasedExtractions = false; break; @@ -468,8 +558,8 @@ static Set selectBestExtraction(Map boostFields, } if (onlyRangeBasedExtractions) { - BytesRef extraction1SmallestRange = smallestRange(filtered1); - BytesRef extraction2SmallestRange = smallestRange(filtered2); + BytesRef extraction1SmallestRange = smallestRange(extractions1); + BytesRef extraction2SmallestRange = smallestRange(extractions2); if (extraction1SmallestRange == null) { return extractions2; } else if (extraction2SmallestRange == null) { @@ -483,8 +573,8 @@ static Set selectBestExtraction(Map boostFields, return extractions2; } } else { - int extraction1ShortestTerm = minTermLength(filtered1); - int extraction2ShortestTerm = minTermLength(filtered2); + int extraction1ShortestTerm = minTermLength(extractions1); + int extraction2ShortestTerm = minTermLength(extractions2); // keep the clause with longest terms, this likely to be rarest. if (extraction1ShortestTerm >= extraction2ShortestTerm) { return extractions1; @@ -495,21 +585,11 @@ static Set selectBestExtraction(Map boostFields, } } - private static float lowestBoost(Set extractions, Map boostFields) { - float lowestBoost = Float.POSITIVE_INFINITY; - for (QueryExtraction extraction : extractions) { - String fieldName = extraction.term != null ? extraction.term.field() : extraction.range.fieldName; - float boost = boostFields.getOrDefault(fieldName, 1F); - lowestBoost = Math.min(lowestBoost, boost); - } - return lowestBoost; - } - private static int minTermLength(Set extractions) { // In case there are only range extractions, then we return Integer.MIN_VALUE, // so that selectBestExtraction(...) we are likely to prefer the extractions that contains at least a single extraction if (extractions.stream().filter(queryExtraction -> queryExtraction.term != null).count() == 0 && - extractions.stream().filter(queryExtraction -> queryExtraction.range != null).count() > 0) { + extractions.stream().filter(queryExtraction -> queryExtraction.range != null).count() > 0) { return Integer.MIN_VALUE; } @@ -538,10 +618,12 @@ static class Result { final Set extractions; final boolean verified; + final int minimumShouldMatch; - Result(boolean verified, Set extractions) { + Result(boolean verified, Set extractions, int minimumShouldMatch) { this.extractions = extractions; this.verified = verified; + this.minimumShouldMatch = minimumShouldMatch; } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 61f3fd14f9533..971be4931e6c1 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -55,6 +55,7 @@ import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.Weight; @@ -64,6 +65,8 @@ import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.store.Directory; +import org.apache.lucene.store.RAMDirectory; +import org.elasticsearch.Version; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.compress.CompressedXContent; @@ -88,6 +91,7 @@ import static org.elasticsearch.common.network.InetAddresses.forString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; public class CandidateQueryTests extends ESSingleNodeTestCase { @@ -307,9 +311,10 @@ public void testRangeQueries() throws Exception { IndexSearcher shardSearcher = newSearcher(directoryReader); shardSearcher.setQueryCache(null); + Version v = Version.V_6_1_0; MemoryIndex memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new IntPoint("int_field", 3)), new WhitespaceAnalyzer()); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); - Query query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher); + Query query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); TopDocs topDocs = shardSearcher.search(query, 1); assertEquals(1L, topDocs.totalHits); assertEquals(1, topDocs.scoreDocs.length); @@ -317,7 +322,7 @@ public void testRangeQueries() throws Exception { memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new LongPoint("long_field", 7L)), new WhitespaceAnalyzer()); percolateSearcher = memoryIndex.createSearcher(); - query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher); + query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); topDocs = shardSearcher.search(query, 1); assertEquals(1L, topDocs.totalHits); assertEquals(1, topDocs.scoreDocs.length); @@ -326,7 +331,7 @@ public void testRangeQueries() throws Exception { memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new HalfFloatPoint("half_float_field", 12)), new WhitespaceAnalyzer()); percolateSearcher = memoryIndex.createSearcher(); - query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher); + query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); topDocs = shardSearcher.search(query, 1); assertEquals(1L, topDocs.totalHits); assertEquals(1, topDocs.scoreDocs.length); @@ -334,7 +339,7 @@ public void testRangeQueries() throws Exception { memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new FloatPoint("float_field", 17)), new WhitespaceAnalyzer()); percolateSearcher = memoryIndex.createSearcher(); - query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher); + query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); topDocs = shardSearcher.search(query, 1); assertEquals(1, topDocs.totalHits); assertEquals(1, topDocs.scoreDocs.length); @@ -342,7 +347,7 @@ public void testRangeQueries() throws Exception { memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new DoublePoint("double_field", 21)), new WhitespaceAnalyzer()); percolateSearcher = memoryIndex.createSearcher(); - query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher); + query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); topDocs = shardSearcher.search(query, 1); assertEquals(1, topDocs.totalHits); assertEquals(1, topDocs.scoreDocs.length); @@ -351,7 +356,7 @@ public void testRangeQueries() throws Exception { memoryIndex = MemoryIndex.fromDocument(Collections.singleton(new InetAddressPoint("ip_field", forString("192.168.0.4"))), new WhitespaceAnalyzer()); percolateSearcher = memoryIndex.createSearcher(); - query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher); + query = fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); topDocs = shardSearcher.search(query, 1); assertEquals(1, topDocs.totalHits); assertEquals(1, topDocs.scoreDocs.length); @@ -461,11 +466,99 @@ public void testDuelRangeQueries() throws Exception { duelRun(queryStore, memoryIndex, shardSearcher); } + public void testPercolateSmallAndLargeDocument() throws Exception { + List docs = new ArrayList<>(); + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.add(new TermQuery(new Term("field", "value1")), BooleanClause.Occur.MUST); + builder.add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.MUST); + addQuery(builder.build(), docs); + builder = new BooleanQuery.Builder(); + builder.add(new TermQuery(new Term("field", "value2")), BooleanClause.Occur.MUST); + builder.add(new TermQuery(new Term("field", "value3")), BooleanClause.Occur.MUST); + addQuery(builder.build(), docs); + builder = new BooleanQuery.Builder(); + builder.add(new TermQuery(new Term("field", "value3")), BooleanClause.Occur.MUST); + builder.add(new TermQuery(new Term("field", "value4")), BooleanClause.Occur.MUST); + addQuery(builder.build(), docs); + indexWriter.addDocuments(docs); + indexWriter.close(); + directoryReader = DirectoryReader.open(directory); + IndexSearcher shardSearcher = newSearcher(directoryReader); + shardSearcher.setQueryCache(null); + + Version v = Version.CURRENT; + + try (RAMDirectory directory = new RAMDirectory()) { + try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) { + Document document = new Document(); + document.add(new StringField("field", "value1", Field.Store.NO)); + document.add(new StringField("field", "value2", Field.Store.NO)); + iw.addDocument(document); + document = new Document(); + document.add(new StringField("field", "value5", Field.Store.NO)); + document.add(new StringField("field", "value6", Field.Store.NO)); + iw.addDocument(document); + document = new Document(); + document.add(new StringField("field", "value3", Field.Store.NO)); + document.add(new StringField("field", "value4", Field.Store.NO)); + iw.addDocument(document); + } + try (IndexReader ir = DirectoryReader.open(directory)){ + IndexSearcher percolateSearcher = new IndexSearcher(ir); + Query query = + fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); + TopDocs topDocs = shardSearcher.search(query, 10); + assertEquals(2L, topDocs.totalHits); + assertEquals(2, topDocs.scoreDocs.length); + assertEquals(0, topDocs.scoreDocs[0].doc); + assertEquals(2, topDocs.scoreDocs[1].doc); + + query = new ConstantScoreQuery(query); + topDocs = shardSearcher.search(query, 10); + assertEquals(2L, topDocs.totalHits); + assertEquals(2, topDocs.scoreDocs.length); + assertEquals(0, topDocs.scoreDocs[0].doc); + assertEquals(2, topDocs.scoreDocs[1].doc); + } + } + + // This will trigger using the TermsQuery instead of individual term query clauses in the CoveringQuery: + try (RAMDirectory directory = new RAMDirectory()) { + try (IndexWriter iw = new IndexWriter(directory, newIndexWriterConfig())) { + Document document = new Document(); + for (int i = 0; i < 1025; i++) { + int fieldNumber = 2 + i; + document.add(new StringField("field", "value" + fieldNumber, Field.Store.NO)); + } + iw.addDocument(document); + } + try (IndexReader ir = DirectoryReader.open(directory)){ + IndexSearcher percolateSearcher = new IndexSearcher(ir); + PercolateQuery query = (PercolateQuery) + fieldType.percolateQuery("_name", queryStore, Collections.singletonList(new BytesArray("{}")), percolateSearcher, v); + BooleanQuery candidateQuery = (BooleanQuery) query.getCandidateMatchesQuery(); + assertThat(candidateQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class)); + + TopDocs topDocs = shardSearcher.search(query, 10); + assertEquals(2L, topDocs.totalHits); + assertEquals(2, topDocs.scoreDocs.length); + assertEquals(1, topDocs.scoreDocs[0].doc); + assertEquals(2, topDocs.scoreDocs[1].doc); + + topDocs = shardSearcher.search(new ConstantScoreQuery(query), 10); + assertEquals(2L, topDocs.totalHits); + assertEquals(2, topDocs.scoreDocs.length); + assertEquals(1, topDocs.scoreDocs[0].doc); + assertEquals(2, topDocs.scoreDocs[1].doc); + } + } + } + private void duelRun(PercolateQuery.QueryStore queryStore, MemoryIndex memoryIndex, IndexSearcher shardSearcher) throws IOException { boolean requireScore = randomBoolean(); IndexSearcher percolateSearcher = memoryIndex.createSearcher(); Query percolateQuery = fieldType.percolateQuery("_name", queryStore, - Collections.singletonList(new BytesArray("{}")), percolateSearcher); + Collections.singletonList(new BytesArray("{}")), percolateSearcher, Version.CURRENT); Query query = requireScore ? percolateQuery : new ConstantScoreQuery(percolateQuery); TopDocs topDocs = shardSearcher.search(query, 10); @@ -499,7 +592,7 @@ private TopDocs executeQuery(PercolateQuery.QueryStore queryStore, IndexSearcher shardSearcher) throws IOException { IndexSearcher percolateSearcher = memoryIndex.createSearcher(); Query percolateQuery = fieldType.percolateQuery("_name", queryStore, - Collections.singletonList(new BytesArray("{}")), percolateSearcher); + Collections.singletonList(new BytesArray("{}")), percolateSearcher, Version.CURRENT); return shardSearcher.search(percolateQuery, 10); } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java index 441278d23f87a..299f4e8ae923c 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorFieldMapperTests.java @@ -28,10 +28,8 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.PrefixCodedTerms; import org.apache.lucene.index.Term; import org.apache.lucene.index.memory.MemoryIndex; -import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.PhraseQuery; @@ -43,6 +41,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.hash.MurmurHash3; import org.elasticsearch.common.io.stream.InputStreamStreamInput; @@ -115,6 +114,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; public class PercolatorFieldMapperTests extends ESSingleNodeTestCase { @@ -171,9 +171,9 @@ public void testExtractTerms() throws Exception { addQueryFieldMappings(); BooleanQuery.Builder bq = new BooleanQuery.Builder(); TermQuery termQuery1 = new TermQuery(new Term("field", "term1")); - bq.add(termQuery1, BooleanClause.Occur.SHOULD); + bq.add(termQuery1, Occur.SHOULD); TermQuery termQuery2 = new TermQuery(new Term("field", "term2")); - bq.add(termQuery2, BooleanClause.Occur.SHOULD); + bq.add(termQuery2, Occur.SHOULD); DocumentMapper documentMapper = mapperService.documentMapper("doc"); PercolatorFieldMapper fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper(fieldName); @@ -189,6 +189,31 @@ public void testExtractTerms() throws Exception { assertThat(fields.size(), equalTo(2)); assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("field\u0000term1")); assertThat(fields.get(1).binaryValue().utf8ToString(), equalTo("field\u0000term2")); + + fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.minimumShouldMatchField.name()))); + assertThat(fields.size(), equalTo(1)); + assertThat(fields.get(0).numericValue(), equalTo(1L)); + + // Now test conjunction: + bq = new BooleanQuery.Builder(); + bq.add(termQuery1, Occur.MUST); + bq.add(termQuery2, Occur.MUST); + + parseContext = new ParseContext.InternalParseContext(Settings.EMPTY, mapperService.documentMapperParser(), + documentMapper, null, null); + fieldMapper.processQuery(bq.build(), parseContext); + document = parseContext.doc(); + + assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_COMPLETE)); + fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.queryTermsField.name()))); + fields.sort(Comparator.comparing(IndexableField::binaryValue)); + assertThat(fields.size(), equalTo(2)); + assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("field\u0000term1")); + assertThat(fields.get(1).binaryValue().utf8ToString(), equalTo("field\u0000term2")); + + fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.minimumShouldMatchField.name()))); + assertThat(fields.size(), equalTo(1)); + assertThat(fields.get(0).numericValue(), equalTo(2L)); } public void testExtractRanges() throws Exception { @@ -212,9 +237,40 @@ public void testExtractRanges() throws Exception { assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL)); List fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.rangeField.name()))); fields.sort(Comparator.comparing(IndexableField::binaryValue)); + assertThat(fields.size(), equalTo(2)); + assertThat(IntPoint.decodeDimension(fields.get(0).binaryValue().bytes, 12), equalTo(10)); + assertThat(IntPoint.decodeDimension(fields.get(0).binaryValue().bytes, 28), equalTo(20)); + assertThat(IntPoint.decodeDimension(fields.get(1).binaryValue().bytes, 12), equalTo(15)); + assertThat(IntPoint.decodeDimension(fields.get(1).binaryValue().bytes, 28), equalTo(20)); + + fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.minimumShouldMatchField.name()))); assertThat(fields.size(), equalTo(1)); - assertThat(IntPoint.decodeDimension(fields.get(0).binaryValue().bytes, 12), equalTo(15)); + assertThat(fields.get(0).numericValue(), equalTo(1L)); + + // Range queries on different fields: + bq = new BooleanQuery.Builder(); + bq.add(rangeQuery1, Occur.MUST); + rangeQuery2 = mapperService.documentMapper("doc").mappers().getMapper("number_field2").fieldType() + .rangeQuery(15, 20, true, true, null, null, null, null); + bq.add(rangeQuery2, Occur.MUST); + + parseContext = new ParseContext.InternalParseContext(Settings.EMPTY, + mapperService.documentMapperParser(), documentMapper, null, null); + fieldMapper.processQuery(bq.build(), parseContext); + document = parseContext.doc(); + + assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL)); + fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.rangeField.name()))); + fields.sort(Comparator.comparing(IndexableField::binaryValue)); + assertThat(fields.size(), equalTo(2)); + assertThat(IntPoint.decodeDimension(fields.get(0).binaryValue().bytes, 12), equalTo(10)); assertThat(IntPoint.decodeDimension(fields.get(0).binaryValue().bytes, 28), equalTo(20)); + assertThat(LongPoint.decodeDimension(fields.get(1).binaryValue().bytes, 8), equalTo(15L)); + assertThat(LongPoint.decodeDimension(fields.get(1).binaryValue().bytes, 24), equalTo(20L)); + + fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.minimumShouldMatchField.name()))); + assertThat(fields.size(), equalTo(1)); + assertThat(fields.get(0).numericValue(), equalTo(2L)); } public void testExtractTermsAndRanges_failed() throws Exception { @@ -243,7 +299,7 @@ public void testExtractTermsAndRanges_partial() throws Exception { ParseContext.Document document = parseContext.doc(); PercolatorFieldMapper.FieldType fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType(); - assertThat(document.getFields().size(), equalTo(2)); + assertThat(document.getFields().size(), equalTo(3)); assertThat(document.getFields().get(0).binaryValue().utf8ToString(), equalTo("field\u0000term")); assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL)); } @@ -260,35 +316,57 @@ public void testCreateCandidateQuery() throws Exception { IndexReader indexReader = memoryIndex.createSearcher().getIndexReader(); - BooleanQuery candidateQuery = (BooleanQuery) fieldType.createCandidateQuery(indexReader); - assertEquals(3, candidateQuery.clauses().size()); - assertEquals(Occur.SHOULD, candidateQuery.clauses().get(0).getOccur()); - TermInSetQuery termsQuery = (TermInSetQuery) candidateQuery.clauses().get(0).getQuery(); - - PrefixCodedTerms terms = termsQuery.getTermData(); - assertThat(terms.size(), equalTo(14L)); - PrefixCodedTerms.TermIterator termIterator = terms.iterator(); - assertTermIterator(termIterator, "_field3\u0000me", fieldType.queryTermsField.name()); - assertTermIterator(termIterator, "_field3\u0000unhide", fieldType.queryTermsField.name()); - assertTermIterator(termIterator, "field1\u0000brown", fieldType.queryTermsField.name()); - assertTermIterator(termIterator, "field1\u0000dog", fieldType.queryTermsField.name()); - assertTermIterator(termIterator, "field1\u0000fox", fieldType.queryTermsField.name()); - assertTermIterator(termIterator, "field1\u0000jumps", fieldType.queryTermsField.name()); - assertTermIterator(termIterator, "field1\u0000lazy", fieldType.queryTermsField.name()); - assertTermIterator(termIterator, "field1\u0000over", fieldType.queryTermsField.name()); - assertTermIterator(termIterator, "field1\u0000quick", fieldType.queryTermsField.name()); - assertTermIterator(termIterator, "field1\u0000the", fieldType.queryTermsField.name()); - assertTermIterator(termIterator, "field2\u0000more", fieldType.queryTermsField.name()); - assertTermIterator(termIterator, "field2\u0000some", fieldType.queryTermsField.name()); - assertTermIterator(termIterator, "field2\u0000text", fieldType.queryTermsField.name()); - assertTermIterator(termIterator, "field4\u0000123", fieldType.queryTermsField.name()); - - assertEquals(Occur.SHOULD, candidateQuery.clauses().get(1).getOccur()); - assertEquals(new TermQuery(new Term(fieldType.extractionResultField.name(), EXTRACTION_FAILED)), - candidateQuery.clauses().get(1).getQuery()); - - assertEquals(Occur.SHOULD, candidateQuery.clauses().get(2).getOccur()); - assertThat(candidateQuery.clauses().get(2).getQuery().toString(), containsString(fieldName + ".range_field:, Boolean> t = fieldType.createCandidateQueryClauses(indexReader); + assertTrue(t.v2()); + List clauses = t.v1(); + clauses.sort(Comparator.comparing(Query::toString)); + assertEquals(15, clauses.size()); + assertEquals(fieldType.queryTermsField.name() + ":_field3\u0000me", clauses.get(0).toString()); + assertEquals(fieldType.queryTermsField.name() + ":_field3\u0000unhide", clauses.get(1).toString()); + assertEquals(fieldType.queryTermsField.name() + ":field1\u0000brown", clauses.get(2).toString()); + assertEquals(fieldType.queryTermsField.name() + ":field1\u0000dog", clauses.get(3).toString()); + assertEquals(fieldType.queryTermsField.name() + ":field1\u0000fox", clauses.get(4).toString()); + assertEquals(fieldType.queryTermsField.name() + ":field1\u0000jumps", clauses.get(5).toString()); + assertEquals(fieldType.queryTermsField.name() + ":field1\u0000lazy", clauses.get(6).toString()); + assertEquals(fieldType.queryTermsField.name() + ":field1\u0000over", clauses.get(7).toString()); + assertEquals(fieldType.queryTermsField.name() + ":field1\u0000quick", clauses.get(8).toString()); + assertEquals(fieldType.queryTermsField.name() + ":field1\u0000the", clauses.get(9).toString()); + assertEquals(fieldType.queryTermsField.name() + ":field2\u0000more", clauses.get(10).toString()); + assertEquals(fieldType.queryTermsField.name() + ":field2\u0000some", clauses.get(11).toString()); + assertEquals(fieldType.queryTermsField.name() + ":field2\u0000text", clauses.get(12).toString()); + assertEquals(fieldType.queryTermsField.name() + ":field4\u0000123", clauses.get(13).toString()); + assertThat(clauses.get(14).toString(), containsString(fieldName + ".range_field:, Boolean> t = fieldType.createCandidateQueryClauses(indexReader); + assertTrue(t.v2()); + List clauses = t.v1(); + assertEquals(1024, clauses.size()); + assertThat(clauses.get(1023).toString(), containsString(fieldName + ".range_field:, Boolean> t = fieldType.createCandidateQueryClauses(indexReader); + assertThat(t.v2(), is(true)); + List clauses = t.v1(); + assertEquals(7, clauses.size()); + assertThat(clauses.get(0).toString(), containsString(fieldName + ".range_field: fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.rangeField.name()))); - assertThat(fields.size(), equalTo(1)); - assertThat(LongPoint.decodeDimension(subByteArray(fields.get(0).binaryValue().bytes, 8, 8), 0), equalTo(5L)); - assertThat(LongPoint.decodeDimension(subByteArray(fields.get(0).binaryValue().bytes, 24, 8), 0), equalTo(10L)); - - // No boost fields, so default extraction logic: - fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper("query2"); - parseContext = new ParseContext.InternalParseContext(Settings.EMPTY, mapperService.documentMapperParser(), - documentMapper, null, null); - fieldMapper.processQuery(bq.build(), parseContext); - document = parseContext.doc(); - fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType(); - assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL)); - assertThat(document.getFields(fieldType.rangeField.name()).length, equalTo(0)); - fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.queryTermsField.name()))); - assertThat(fields.size(), equalTo(1)); - assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("status\0updated")); - - // Second clause is extracted, because it is boosted by 2: - bq = new BooleanQuery.Builder(); - bq.add(new TermQuery(new Term("status", "updated")), Occur.FILTER); - bq.add(new TermQuery(new Term("updated_field", "done")), Occur.FILTER); - - fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper("query1"); - parseContext = new ParseContext.InternalParseContext(Settings.EMPTY, mapperService.documentMapperParser(), - documentMapper, null, null); - fieldMapper.processQuery(bq.build(), parseContext); - document = parseContext.doc(); - fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType(); - assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL)); - assertThat(document.getFields(fieldType.rangeField.name()).length, equalTo(0)); - fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.queryTermsField.name()))); - assertThat(fields.size(), equalTo(1)); - assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("updated_field\0done")); - - // First clause is extracted, because default logic: - bq = new BooleanQuery.Builder(); - bq.add(new TermQuery(new Term("status", "updated")), Occur.FILTER); - bq.add(new TermQuery(new Term("updated_field", "done")), Occur.FILTER); - - fieldMapper = (PercolatorFieldMapper) documentMapper.mappers().getMapper("query2"); - parseContext = new ParseContext.InternalParseContext(Settings.EMPTY, mapperService.documentMapperParser(), - documentMapper, null, null); - fieldMapper.processQuery(bq.build(), parseContext); - document = parseContext.doc(); - fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType(); - assertThat(document.getField(fieldType.extractionResultField.name()).stringValue(), equalTo(EXTRACTION_PARTIAL)); - assertThat(document.getFields(fieldType.rangeField.name()).length, equalTo(0)); - fields = new ArrayList<>(Arrays.asList(document.getFields(fieldType.queryTermsField.name()))); - assertThat(fields.size(), equalTo(1)); - assertThat(fields.get(0).binaryValue().utf8ToString(), equalTo("status\0updated")); - } - // Just so that we store scripts in percolator queries, but not really execute these scripts. public static class FoolMeScriptPlugin extends MockScriptPlugin { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java index 54d6c69112571..db2d85b9e39e9 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java @@ -193,6 +193,7 @@ public void testPercolatorRangeQueries() throws Exception { SearchResponse response = client().prepareSearch() .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .get(); + logger.info("response={}", response); assertHitCount(response, 2); assertThat(response.getHits().getAt(0).getId(), equalTo("3")); assertThat(response.getHits().getAt(1).getId(), equalTo("1")); @@ -849,34 +850,4 @@ public void testPercolatorQueryViaMultiSearch() throws Exception { assertThat(item.getFailureMessage(), containsString("[test/type/6] couldn't be found")); } - public void testBoostFields() throws Exception { - XContentBuilder mappingSource = XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties") - .startObject("status").field("type", "keyword").endObject() - .startObject("price").field("type", "long").endObject() - .startObject("query").field("type", "percolator") - .startObject("boost_fields").field("status", 0.0F).endObject() - .endObject() - .endObject().endObject().endObject(); - assertAcked(client().admin().indices().prepareCreate("test").addMapping("type", mappingSource)); - - client().prepareIndex("test", "type", "q1") - .setSource(jsonBuilder().startObject().field("query", boolQuery() - .must(matchQuery("status", "sold")) - .must(matchQuery("price", 100)) - ).endObject()) - .get(); - refresh(); - - SearchResponse response = client().prepareSearch() - .setQuery(new PercolateQueryBuilder("query", - XContentFactory.jsonBuilder().startObject() - .field("status", "sold") - .field("price", 100) - .endObject().bytes(), XContentType.JSON)) - .get(); - assertHitCount(response, 1); - assertThat(response.getHits().getAt(0).getId(), equalTo("q1")); - } - } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java index a6af5fb9dfe38..f2f5a4e586170 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java @@ -52,6 +52,7 @@ import org.apache.lucene.search.spans.SpanOrQuery; import org.apache.lucene.search.spans.SpanTermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery; import org.elasticsearch.common.lucene.search.function.RandomScoreFunction; @@ -63,12 +64,9 @@ import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.Comparator; -import java.util.HashMap; import java.util.HashSet; import java.util.List; -import java.util.Map; import java.util.Set; import java.util.function.Consumer; import java.util.stream.Collectors; @@ -84,8 +82,9 @@ public class QueryAnalyzerTests extends ESTestCase { public void testExtractQueryMetadata_termQuery() { TermQuery termQuery = new TermQuery(new Term("_field", "_term")); - Result result = analyze(termQuery, Collections.emptyMap()); + Result result = analyze(termQuery, Version.CURRENT); assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); List terms = new ArrayList<>(result.extractions); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo(termQuery.getTerm().field())); @@ -94,8 +93,9 @@ public void testExtractQueryMetadata_termQuery() { public void testExtractQueryMetadata_termsQuery() { TermInSetQuery termsQuery = new TermInSetQuery("_field", new BytesRef("_term1"), new BytesRef("_term2")); - Result result = analyze(termsQuery, Collections.emptyMap()); + Result result = analyze(termsQuery, Version.CURRENT); assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); List terms = new ArrayList<>(result.extractions); terms.sort(Comparator.comparing(qt -> qt.term)); assertThat(terms.size(), equalTo(2)); @@ -107,23 +107,55 @@ public void testExtractQueryMetadata_termsQuery() { public void testExtractQueryMetadata_phraseQuery() { PhraseQuery phraseQuery = new PhraseQuery("_field", "_term1", "term2"); - Result result = analyze(phraseQuery, Collections.emptyMap()); + Result result = analyze(phraseQuery, Version.CURRENT); assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(2)); List terms = new ArrayList<>(result.extractions); - assertThat(terms.size(), equalTo(1)); + terms.sort(Comparator.comparing(qt -> qt.term)); + assertThat(terms.size(), equalTo(2)); assertThat(terms.get(0).field(), equalTo(phraseQuery.getTerms()[0].field())); assertThat(terms.get(0).bytes(), equalTo(phraseQuery.getTerms()[0].bytes())); + assertThat(terms.get(1).field(), equalTo(phraseQuery.getTerms()[1].field())); + assertThat(terms.get(1).bytes(), equalTo(phraseQuery.getTerms()[1].bytes())); } public void testExtractQueryMetadata_multiPhraseQuery() { + MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder() + .add(new Term("_field", "_term1")) + .add(new Term[] {new Term("_field", "_term2"), new Term("_field", "_term3")}) + .add(new Term[] {new Term("_field", "_term4"), new Term("_field", "_term5")}) + .add(new Term[] {new Term("_field", "_term6")}) + .build(); + Result result = analyze(multiPhraseQuery, Version.CURRENT); + assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(4)); + List terms = new ArrayList<>(result.extractions); + terms.sort(Comparator.comparing(qt -> qt.term)); + assertThat(terms.size(), equalTo(6)); + assertThat(terms.get(0).field(), equalTo("_field")); + assertThat(terms.get(0).bytes().utf8ToString(), equalTo("_term1")); + assertThat(terms.get(1).field(), equalTo("_field")); + assertThat(terms.get(1).bytes().utf8ToString(), equalTo("_term2")); + assertThat(terms.get(2).field(), equalTo("_field")); + assertThat(terms.get(2).bytes().utf8ToString(), equalTo("_term3")); + assertThat(terms.get(3).field(), equalTo("_field")); + assertThat(terms.get(3).bytes().utf8ToString(), equalTo("_term4")); + assertThat(terms.get(4).field(), equalTo("_field")); + assertThat(terms.get(4).bytes().utf8ToString(), equalTo("_term5")); + assertThat(terms.get(5).field(), equalTo("_field")); + assertThat(terms.get(5).bytes().utf8ToString(), equalTo("_term6")); + } + + public void testExtractQueryMetadata_multiPhraseQuery_pre6dot1() { MultiPhraseQuery multiPhraseQuery = new MultiPhraseQuery.Builder() .add(new Term("_field", "_long_term")) .add(new Term[] {new Term("_field", "_long_term"), new Term("_field", "_term")}) .add(new Term[] {new Term("_field", "_long_term"), new Term("_field", "_very_long_term")}) .add(new Term[] {new Term("_field", "_very_long_term")}) .build(); - Result result = analyze(multiPhraseQuery, Collections.emptyMap()); + Result result = analyze(multiPhraseQuery, Version.V_6_0_0); assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); List terms = new ArrayList<>(result.extractions); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo("_field")); @@ -131,6 +163,39 @@ public void testExtractQueryMetadata_multiPhraseQuery() { } public void testExtractQueryMetadata_booleanQuery() { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + TermQuery termQuery1 = new TermQuery(new Term("_field", "term0")); + builder.add(termQuery1, BooleanClause.Occur.SHOULD); + PhraseQuery phraseQuery = new PhraseQuery("_field", "term1", "term2"); + builder.add(phraseQuery, BooleanClause.Occur.SHOULD); + + BooleanQuery.Builder subBuilder = new BooleanQuery.Builder(); + TermQuery termQuery2 = new TermQuery(new Term("_field1", "term4")); + subBuilder.add(termQuery2, BooleanClause.Occur.MUST); + TermQuery termQuery3 = new TermQuery(new Term("_field3", "term5")); + subBuilder.add(termQuery3, BooleanClause.Occur.MUST); + builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD); + + BooleanQuery booleanQuery = builder.build(); + Result result = analyze(booleanQuery, Version.CURRENT); + assertThat("Should clause with phrase query isn't verified, so entire query can't be verified", result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); + List terms = new ArrayList<>(result.extractions); + terms.sort(Comparator.comparing(qt -> qt.term)); + assertThat(terms.size(), equalTo(5)); + assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); + assertThat(terms.get(0).bytes(), equalTo(termQuery1.getTerm().bytes())); + assertThat(terms.get(1).field(), equalTo(phraseQuery.getTerms()[0].field())); + assertThat(terms.get(1).bytes(), equalTo(phraseQuery.getTerms()[0].bytes())); + assertThat(terms.get(2).field(), equalTo(phraseQuery.getTerms()[1].field())); + assertThat(terms.get(2).bytes(), equalTo(phraseQuery.getTerms()[1].bytes())); + assertThat(terms.get(3).field(), equalTo(termQuery2.getTerm().field())); + assertThat(terms.get(3).bytes(), equalTo(termQuery2.getTerm().bytes())); + assertThat(terms.get(4).field(), equalTo(termQuery3.getTerm().field())); + assertThat(terms.get(4).bytes(), equalTo(termQuery3.getTerm().bytes())); + } + + public void testExtractQueryMetadata_booleanQuery_pre6dot1() { BooleanQuery.Builder builder = new BooleanQuery.Builder(); TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); builder.add(termQuery1, BooleanClause.Occur.SHOULD); @@ -145,8 +210,9 @@ public void testExtractQueryMetadata_booleanQuery() { builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD); BooleanQuery booleanQuery = builder.build(); - Result result = analyze(booleanQuery, Collections.emptyMap()); + Result result = analyze(booleanQuery, Version.V_6_0_0); assertThat("Should clause with phrase query isn't verified, so entire query can't be verified", result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); List terms = new ArrayList<>(result.extractions); terms.sort(Comparator.comparing(qt -> qt.term)); assertThat(terms.size(), equalTo(3)); @@ -158,6 +224,50 @@ public void testExtractQueryMetadata_booleanQuery() { assertThat(terms.get(2).bytes(), equalTo(termQuery3.getTerm().bytes())); } + public void testExtractQueryMetadata_booleanQuery_msm() { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.setMinimumNumberShouldMatch(2); + TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1")); + builder.add(termQuery1, BooleanClause.Occur.SHOULD); + TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2")); + builder.add(termQuery2, BooleanClause.Occur.SHOULD); + TermQuery termQuery3 = new TermQuery(new Term("_field", "_term3")); + builder.add(termQuery3, BooleanClause.Occur.SHOULD); + + BooleanQuery booleanQuery = builder.build(); + Result result = analyze(booleanQuery, Version.CURRENT); + assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(2)); + List extractions = new ArrayList<>(result.extractions); + extractions.sort(Comparator.comparing(extraction -> extraction.term)); + assertThat(extractions.size(), equalTo(3)); + assertThat(extractions.get(0).term, equalTo(new Term("_field", "_term1"))); + assertThat(extractions.get(1).term, equalTo(new Term("_field", "_term2"))); + assertThat(extractions.get(2).term, equalTo(new Term("_field", "_term3"))); + } + + public void testExtractQueryMetadata_booleanQuery_msm_pre6dot1() { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.setMinimumNumberShouldMatch(2); + TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1")); + builder.add(termQuery1, BooleanClause.Occur.SHOULD); + TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2")); + builder.add(termQuery2, BooleanClause.Occur.SHOULD); + TermQuery termQuery3 = new TermQuery(new Term("_field", "_term3")); + builder.add(termQuery3, BooleanClause.Occur.SHOULD); + + BooleanQuery booleanQuery = builder.build(); + Result result = analyze(booleanQuery, Version.V_6_0_0); + assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); + List extractions = new ArrayList<>(result.extractions); + extractions.sort(Comparator.comparing(extraction -> extraction.term)); + assertThat(extractions.size(), equalTo(3)); + assertThat(extractions.get(0).term, equalTo(new Term("_field", "_term1"))); + assertThat(extractions.get(1).term, equalTo(new Term("_field", "_term2"))); + assertThat(extractions.get(2).term, equalTo(new Term("_field", "_term3"))); + } + public void testExtractQueryMetadata_booleanQuery_onlyShould() { BooleanQuery.Builder builder = new BooleanQuery.Builder(); TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1")); @@ -173,8 +283,9 @@ public void testExtractQueryMetadata_booleanQuery_onlyShould() { builder.add(subBuilder.build(), BooleanClause.Occur.SHOULD); BooleanQuery booleanQuery = builder.build(); - Result result = analyze(booleanQuery, Collections.emptyMap()); + Result result = analyze(booleanQuery, Version.CURRENT); assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); List terms = new ArrayList<>(result.extractions); terms.sort(Comparator.comparing(qt -> qt.term)); assertThat(terms.size(), equalTo(4)); @@ -196,12 +307,16 @@ public void testExtractQueryMetadata_booleanQueryWithMustNot() { builder.add(phraseQuery, BooleanClause.Occur.SHOULD); BooleanQuery booleanQuery = builder.build(); - Result result = analyze(booleanQuery, Collections.emptyMap()); + Result result = analyze(booleanQuery, Version.CURRENT); assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(2)); List terms = new ArrayList<>(result.extractions); - assertThat(terms.size(), equalTo(1)); + assertThat(terms.size(), equalTo(2)); + terms.sort(Comparator.comparing(qt -> qt.term)); assertThat(terms.get(0).field(), equalTo(phraseQuery.getTerms()[0].field())); assertThat(terms.get(0).bytes(), equalTo(phraseQuery.getTerms()[0].bytes())); + assertThat(terms.get(1).field(), equalTo(phraseQuery.getTerms()[1].field())); + assertThat(terms.get(1).bytes(), equalTo(phraseQuery.getTerms()[1].bytes())); } public void testExactMatch_booleanQuery() { @@ -210,59 +325,119 @@ public void testExactMatch_booleanQuery() { builder.add(termQuery1, BooleanClause.Occur.SHOULD); TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2")); builder.add(termQuery2, BooleanClause.Occur.SHOULD); - Result result = analyze(builder.build(), Collections.emptyMap()); + Result result = analyze(builder.build(), Version.CURRENT); assertThat("All clauses are exact, so candidate matches are verified", result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, BooleanClause.Occur.SHOULD); PhraseQuery phraseQuery1 = new PhraseQuery("_field", "_term1", "_term2"); builder.add(phraseQuery1, BooleanClause.Occur.SHOULD); - result = analyze(builder.build(), Collections.emptyMap()); + result = analyze(builder.build(), Version.CURRENT); assertThat("Clause isn't exact, so candidate matches are not verified", result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); builder = new BooleanQuery.Builder(); builder.add(phraseQuery1, BooleanClause.Occur.SHOULD); PhraseQuery phraseQuery2 = new PhraseQuery("_field", "_term3", "_term4"); builder.add(phraseQuery2, BooleanClause.Occur.SHOULD); - result = analyze(builder.build(), Collections.emptyMap()); + result = analyze(builder.build(), Version.CURRENT); assertThat("No clause is exact, so candidate matches are not verified", result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(2)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, BooleanClause.Occur.MUST_NOT); builder.add(termQuery2, BooleanClause.Occur.SHOULD); - result = analyze(builder.build(), Collections.emptyMap()); + result = analyze(builder.build(), Version.CURRENT); assertThat("There is a must_not clause, so candidate matches are not verified", result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); builder = new BooleanQuery.Builder(); builder.setMinimumNumberShouldMatch(randomIntBetween(2, 32)); builder.add(termQuery1, BooleanClause.Occur.SHOULD); builder.add(termQuery2, BooleanClause.Occur.SHOULD); - result = analyze(builder.build(), Collections.emptyMap()); - assertThat("Minimum match is >= 1, so candidate matches are not verified", result.verified, is(false)); + result = analyze(builder.build(), Version.CURRENT); + assertThat("Minimum match has not impact on whether the result is verified", result.verified, is(true)); + assertThat("msm is at least two so result.minimumShouldMatch should 2 too", result.minimumShouldMatch, equalTo(2)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); - result = analyze(builder.build(), Collections.emptyMap()); - assertThat("Single required clause, so candidate matches are verified", result.verified, is(false)); + result = analyze(builder.build(), Version.CURRENT); + assertThat("Also required clauses are taken into account whether the result is verified", result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); builder.add(termQuery2, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); - result = analyze(builder.build(), Collections.emptyMap()); - assertThat("Two or more required clauses, so candidate matches are not verified", result.verified, is(false)); + result = analyze(builder.build(), Version.CURRENT); + assertThat("Also required clauses are taken into account whether the result is verified", result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(2)); builder = new BooleanQuery.Builder(); builder.add(termQuery1, randomBoolean() ? BooleanClause.Occur.MUST : BooleanClause.Occur.FILTER); builder.add(termQuery2, BooleanClause.Occur.MUST_NOT); - result = analyze(builder.build(), Collections.emptyMap()); - assertThat("Required and prohibited clauses, so candidate matches are not verified", result.verified, is(false)); + result = analyze(builder.build(), Version.CURRENT); + assertThat("Prohibited clause, so candidate matches are not verified", result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); + } + + public void testBooleanQueryWithMustAndShouldClauses() { + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + TermQuery termQuery1 = new TermQuery(new Term("_field", "_term1")); + builder.add(termQuery1, BooleanClause.Occur.SHOULD); + TermQuery termQuery2 = new TermQuery(new Term("_field", "_term2")); + builder.add(termQuery2, BooleanClause.Occur.SHOULD); + TermQuery termQuery3 = new TermQuery(new Term("_field", "_term3")); + builder.add(termQuery3, BooleanClause.Occur.MUST); + Result result = analyze(builder.build(), Version.CURRENT); + assertThat("Must clause is exact, so this is a verified candidate match", result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); + assertThat(result.extractions.size(), equalTo(1)); + List extractions = new ArrayList<>(result.extractions); + assertThat(extractions.get(0).term, equalTo(new Term("_field", "_term3"))); + + builder.setMinimumNumberShouldMatch(1); + result = analyze(builder.build(), Version.CURRENT); + assertThat("Must clause is exact, but m_s_m is 1 so one should clause must match too", result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); + assertThat(result.extractions.size(), equalTo(1)); + extractions = new ArrayList<>(result.extractions); + assertThat(extractions.get(0).term, equalTo(new Term("_field", "_term3"))); + + builder = new BooleanQuery.Builder(); + BooleanQuery.Builder innerBuilder = new BooleanQuery.Builder(); + innerBuilder.setMinimumNumberShouldMatch(2); + innerBuilder.add(termQuery1, BooleanClause.Occur.SHOULD); + innerBuilder.add(termQuery2, BooleanClause.Occur.SHOULD); + builder.add(innerBuilder.build(), BooleanClause.Occur.MUST); + builder.add(termQuery3, BooleanClause.Occur.MUST); + result = analyze(builder.build(), Version.CURRENT); + assertThat("Verified, because m_s_m is specified in an inner clause and not top level clause", result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(3)); + assertThat(result.extractions.size(), equalTo(3)); + extractions = new ArrayList<>(result.extractions); + extractions.sort(Comparator.comparing(key -> key.term)); + assertThat(extractions.get(0).term, equalTo(new Term("_field", "_term1"))); + assertThat(extractions.get(1).term, equalTo(new Term("_field", "_term2"))); + assertThat(extractions.get(2).term, equalTo(new Term("_field", "_term3"))); + + builder = new BooleanQuery.Builder(); + builder.add(innerBuilder.build(), BooleanClause.Occur.SHOULD); + builder.add(termQuery3, BooleanClause.Occur.MUST); + result = analyze(builder.build(), Version.CURRENT); + assertThat("Verified, because m_s_m is specified in an inner clause and not top level clause", result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); + assertThat(result.extractions.size(), equalTo(1)); + extractions = new ArrayList<>(result.extractions); + assertThat(extractions.get(0).term, equalTo(new Term("_field", "_term3"))); } public void testExtractQueryMetadata_constantScoreQuery() { TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); ConstantScoreQuery constantScoreQuery = new ConstantScoreQuery(termQuery1); - Result result = analyze(constantScoreQuery, Collections.emptyMap()); + Result result = analyze(constantScoreQuery, Version.CURRENT); assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); List terms = new ArrayList<>(result.extractions); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); @@ -272,8 +447,9 @@ public void testExtractQueryMetadata_constantScoreQuery() { public void testExtractQueryMetadata_boostQuery() { TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); BoostQuery constantScoreQuery = new BoostQuery(termQuery1, 1f); - Result result = analyze(constantScoreQuery, Collections.emptyMap()); + Result result = analyze(constantScoreQuery, Version.CURRENT); assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); List terms = new ArrayList<>(result.extractions); assertThat(terms.size(), equalTo(1)); assertThat(terms.get(0).field(), equalTo(termQuery1.getTerm().field())); @@ -284,11 +460,13 @@ public void testExtractQueryMetadata_commonTermsQuery() { CommonTermsQuery commonTermsQuery = new CommonTermsQuery(BooleanClause.Occur.SHOULD, BooleanClause.Occur.SHOULD, 100); commonTermsQuery.add(new Term("_field", "_term1")); commonTermsQuery.add(new Term("_field", "_term2")); - Result result = analyze(commonTermsQuery, Collections.emptyMap()); + Result result = analyze(commonTermsQuery, Version.CURRENT); assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); List terms = new ArrayList<>(result.extractions); terms.sort(Comparator.comparing(qt -> qt.term)); assertThat(terms.size(), equalTo(2)); + assertThat(result.minimumShouldMatch, equalTo(1)); assertThat(terms.get(0).field(), equalTo("_field")); assertThat(terms.get(0).text(), equalTo("_term1")); assertThat(terms.get(1).field(), equalTo("_field")); @@ -298,8 +476,9 @@ public void testExtractQueryMetadata_commonTermsQuery() { public void testExtractQueryMetadata_blendedTermQuery() { Term[] termsArr = new Term[]{new Term("_field", "_term1"), new Term("_field", "_term2")}; BlendedTermQuery commonTermsQuery = BlendedTermQuery.dismaxBlendedQuery(termsArr, 1.0f); - Result result = analyze(commonTermsQuery, Collections.emptyMap()); + Result result = analyze(commonTermsQuery, Version.CURRENT); assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); List terms = new ArrayList<>(result.extractions); terms.sort(Comparator.comparing(qt -> qt.term)); assertThat(terms.size(), equalTo(2)); @@ -322,8 +501,9 @@ public void testExtractQueryMetadata_spanTermQuery() { // 4) FieldMaskingSpanQuery is a tricky query so we shouldn't optimize this SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); - Result result = analyze(spanTermQuery1, Collections.emptyMap()); + Result result = analyze(spanTermQuery1, Version.CURRENT); assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, spanTermQuery1.getTerm()); } @@ -333,8 +513,21 @@ public void testExtractQueryMetadata_spanNearQuery() { SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true) .addClause(spanTermQuery1).addClause(spanTermQuery2).build(); - Result result = analyze(spanNearQuery, Collections.emptyMap()); + Result result = analyze(spanNearQuery, Version.CURRENT); assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(2)); + assertTermsEqual(result.extractions, spanTermQuery1.getTerm(), spanTermQuery2.getTerm()); + } + + public void testExtractQueryMetadata_spanNearQuery_pre6dot1() { + SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); + SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); + SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("_field", true) + .addClause(spanTermQuery1).addClause(spanTermQuery2).build(); + + Result result = analyze(spanNearQuery, Version.V_6_0_0); + assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, spanTermQuery2.getTerm()); } @@ -342,16 +535,18 @@ public void testExtractQueryMetadata_spanOrQuery() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); SpanOrQuery spanOrQuery = new SpanOrQuery(spanTermQuery1, spanTermQuery2); - Result result = analyze(spanOrQuery, Collections.emptyMap()); + Result result = analyze(spanOrQuery, Version.CURRENT); assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, spanTermQuery1.getTerm(), spanTermQuery2.getTerm()); } public void testExtractQueryMetadata_spanFirstQuery() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanFirstQuery spanFirstQuery = new SpanFirstQuery(spanTermQuery1, 20); - Result result = analyze(spanFirstQuery, Collections.emptyMap()); + Result result = analyze(spanFirstQuery, Version.CURRENT); assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, spanTermQuery1.getTerm()); } @@ -359,47 +554,54 @@ public void testExtractQueryMetadata_spanNotQuery() { SpanTermQuery spanTermQuery1 = new SpanTermQuery(new Term("_field", "_short_term")); SpanTermQuery spanTermQuery2 = new SpanTermQuery(new Term("_field", "_very_long_term")); SpanNotQuery spanNotQuery = new SpanNotQuery(spanTermQuery1, spanTermQuery2); - Result result = analyze(spanNotQuery, Collections.emptyMap()); + Result result = analyze(spanNotQuery, Version.CURRENT); assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, spanTermQuery1.getTerm()); } public void testExtractQueryMetadata_matchNoDocsQuery() { - Result result = analyze(new MatchNoDocsQuery("sometimes there is no reason at all"), Collections.emptyMap()); + Result result = analyze(new MatchNoDocsQuery("sometimes there is no reason at all"), Version.CURRENT); assertThat(result.verified, is(true)); assertEquals(0, result.extractions.size()); + assertThat(result.minimumShouldMatch, equalTo(1)); BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.MUST); bq.add(new MatchNoDocsQuery("sometimes there is no reason at all"), BooleanClause.Occur.MUST); - result = analyze(bq.build(), Collections.emptyMap()); - assertThat(result.verified, is(false)); - assertEquals(0, result.extractions.size()); + result = analyze(bq.build(), Version.CURRENT); + assertThat(result.verified, is(true)); + assertEquals(1, result.extractions.size()); + assertThat(result.minimumShouldMatch, equalTo(2)); + assertTermsEqual(result.extractions, new Term("field", "value")); bq = new BooleanQuery.Builder(); bq.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.SHOULD); bq.add(new MatchNoDocsQuery("sometimes there is no reason at all"), BooleanClause.Occur.SHOULD); - result = analyze(bq.build(), Collections.emptyMap()); + result = analyze(bq.build(), Version.CURRENT); assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, new Term("field", "value")); DisjunctionMaxQuery disjunctionMaxQuery = new DisjunctionMaxQuery( Arrays.asList(new TermQuery(new Term("field", "value")), new MatchNoDocsQuery("sometimes there is no reason at all")), 1f ); - result = analyze(disjunctionMaxQuery, Collections.emptyMap()); + result = analyze(disjunctionMaxQuery, Version.CURRENT); assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, new Term("field", "value")); } public void testExtractQueryMetadata_matchAllDocsQuery() { - expectThrows(UnsupportedQueryException.class, () -> analyze(new MatchAllDocsQuery(), Collections.emptyMap())); + expectThrows(UnsupportedQueryException.class, () -> analyze(new MatchAllDocsQuery(), Version.CURRENT)); BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.MUST); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); - Result result = analyze(builder.build(), Collections.emptyMap()); + Result result = analyze(builder.build(), Version.CURRENT); assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, new Term("field", "value")); builder = new BooleanQuery.Builder(); @@ -407,40 +609,40 @@ public void testExtractQueryMetadata_matchAllDocsQuery() { builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); BooleanQuery bq1 = builder.build(); - expectThrows(UnsupportedQueryException.class, () -> analyze(bq1, Collections.emptyMap())); + expectThrows(UnsupportedQueryException.class, () -> analyze(bq1, Version.CURRENT)); builder = new BooleanQuery.Builder(); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST_NOT); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST); BooleanQuery bq2 = builder.build(); - expectThrows(UnsupportedQueryException.class, () -> analyze(bq2, Collections.emptyMap())); + expectThrows(UnsupportedQueryException.class, () -> analyze(bq2, Version.CURRENT)); builder = new BooleanQuery.Builder(); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); BooleanQuery bq3 = builder.build(); - expectThrows(UnsupportedQueryException.class, () -> analyze(bq3, Collections.emptyMap())); + expectThrows(UnsupportedQueryException.class, () -> analyze(bq3, Version.CURRENT)); builder = new BooleanQuery.Builder(); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST_NOT); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); BooleanQuery bq4 = builder.build(); - expectThrows(UnsupportedQueryException.class, () -> analyze(bq4, Collections.emptyMap())); + expectThrows(UnsupportedQueryException.class, () -> analyze(bq4, Version.CURRENT)); builder = new BooleanQuery.Builder(); builder.add(new TermQuery(new Term("field", "value")), BooleanClause.Occur.SHOULD); builder.add(new MatchAllDocsQuery(), BooleanClause.Occur.SHOULD); BooleanQuery bq5 = builder.build(); - expectThrows(UnsupportedQueryException.class, () -> analyze(bq5, Collections.emptyMap())); + expectThrows(UnsupportedQueryException.class, () -> analyze(bq5, Version.CURRENT)); } public void testExtractQueryMetadata_unsupportedQuery() { TermRangeQuery termRangeQuery = new TermRangeQuery("_field", null, null, true, false); UnsupportedQueryException e = expectThrows(UnsupportedQueryException.class, - () -> analyze(termRangeQuery, Collections.emptyMap())); + () -> analyze(termRangeQuery, Version.CURRENT)); assertThat(e.getUnsupportedQuery(), sameInstance(termRangeQuery)); TermQuery termQuery1 = new TermQuery(new Term("_field", "_term")); @@ -449,7 +651,7 @@ public void testExtractQueryMetadata_unsupportedQuery() { builder.add(termRangeQuery, BooleanClause.Occur.SHOULD); BooleanQuery bq = builder.build(); - e = expectThrows(UnsupportedQueryException.class, () -> analyze(bq, Collections.emptyMap())); + e = expectThrows(UnsupportedQueryException.class, () -> analyze(bq, Version.CURRENT)); assertThat(e.getUnsupportedQuery(), sameInstance(termRangeQuery)); } @@ -462,8 +664,9 @@ public void testExtractQueryMetadata_unsupportedQueryInBoolQueryWithMustClauses( builder.add(unsupportedQuery, BooleanClause.Occur.MUST); BooleanQuery bq1 = builder.build(); - Result result = analyze(bq1, Collections.emptyMap()); + Result result = analyze(bq1, Version.CURRENT); assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, termQuery1.getTerm()); TermQuery termQuery2 = new TermQuery(new Term("_field", "_longer_term")); @@ -472,15 +675,16 @@ public void testExtractQueryMetadata_unsupportedQueryInBoolQueryWithMustClauses( builder.add(termQuery2, BooleanClause.Occur.MUST); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); bq1 = builder.build(); - result = analyze(bq1, Collections.emptyMap()); + result = analyze(bq1, Version.CURRENT); assertThat(result.verified, is(false)); - assertTermsEqual(result.extractions, termQuery2.getTerm()); + assertThat(result.minimumShouldMatch, equalTo(2)); + assertTermsEqual(result.extractions, termQuery1.getTerm(), termQuery2.getTerm()); builder = new BooleanQuery.Builder(); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); builder.add(unsupportedQuery, BooleanClause.Occur.MUST); BooleanQuery bq2 = builder.build(); - UnsupportedQueryException e = expectThrows(UnsupportedQueryException.class, () -> analyze(bq2, Collections.emptyMap())); + UnsupportedQueryException e = expectThrows(UnsupportedQueryException.class, () -> analyze(bq2, Version.CURRENT)); assertThat(e.getUnsupportedQuery(), sameInstance(unsupportedQuery)); } @@ -493,8 +697,9 @@ public void testExtractQueryMetadata_disjunctionMaxQuery() { Arrays.asList(termQuery1, termQuery2, termQuery3, termQuery4), 0.1f ); - Result result = analyze(disjunctionMaxQuery, Collections.emptyMap()); + Result result = analyze(disjunctionMaxQuery, Version.CURRENT); assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); List terms = new ArrayList<>(result.extractions); terms.sort(Comparator.comparing(qt -> qt.term)); assertThat(terms.size(), equalTo(4)); @@ -511,8 +716,9 @@ public void testExtractQueryMetadata_disjunctionMaxQuery() { Arrays.asList(termQuery1, termQuery2, termQuery3, new PhraseQuery("_field", "_term4")), 0.1f ); - result = analyze(disjunctionMaxQuery, Collections.emptyMap()); + result = analyze(disjunctionMaxQuery, Version.CURRENT); assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); terms = new ArrayList<>(result.extractions); terms.sort(Comparator.comparing(qt -> qt.term)); assertThat(terms.size(), equalTo(4)); @@ -528,148 +734,91 @@ public void testExtractQueryMetadata_disjunctionMaxQuery() { public void testSynonymQuery() { SynonymQuery query = new SynonymQuery(); - Result result = analyze(query, Collections.emptyMap()); + Result result = analyze(query, Version.CURRENT); assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); assertThat(result.extractions.isEmpty(), is(true)); query = new SynonymQuery(new Term("_field", "_value1"), new Term("_field", "_value2")); - result = analyze(query, Collections.emptyMap()); + result = analyze(query, Version.CURRENT); assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, new Term("_field", "_value1"), new Term("_field", "_value2")); } public void testFunctionScoreQuery() { TermQuery termQuery = new TermQuery(new Term("_field", "_value")); FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(termQuery, new RandomScoreFunction(0, 0, null)); - Result result = analyze(functionScoreQuery, Collections.emptyMap()); + Result result = analyze(functionScoreQuery, Version.CURRENT); assertThat(result.verified, is(true)); + assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, new Term("_field", "_value")); functionScoreQuery = new FunctionScoreQuery(termQuery, new RandomScoreFunction(0, 0, null), CombineFunction.MULTIPLY, 1f, 10f); - result = analyze(functionScoreQuery, Collections.emptyMap()); + result = analyze(functionScoreQuery, Version.CURRENT); assertThat(result.verified, is(false)); + assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, new Term("_field", "_value")); } public void testSelectBestExtraction() { Set queryTerms1 = terms(new int[0], "12", "1234", "12345"); Set queryTerms2 = terms(new int[0], "123", "1234", "12345"); - Set result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2); + Set result = selectBestExtraction(queryTerms1, queryTerms2); assertSame(queryTerms2, result); queryTerms1 = terms(new int[]{1, 2, 3}); queryTerms2 = terms(new int[]{2, 3, 4}); - result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2); + result = selectBestExtraction(queryTerms1, queryTerms2); assertSame(queryTerms1, result); queryTerms1 = terms(new int[]{4, 5, 6}); queryTerms2 = terms(new int[]{1, 2, 3}); - result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2); + result = selectBestExtraction(queryTerms1, queryTerms2); assertSame(queryTerms2, result); queryTerms1 = terms(new int[]{1, 2, 3}, "123", "456"); queryTerms2 = terms(new int[]{2, 3, 4}, "123", "456"); - result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2); + result = selectBestExtraction(queryTerms1, queryTerms2); assertSame(queryTerms1, result); queryTerms1 = terms(new int[]{10}); queryTerms2 = terms(new int[]{1}); - result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2); + result = selectBestExtraction(queryTerms1, queryTerms2); assertSame(queryTerms2, result); queryTerms1 = terms(new int[]{10}, "123"); queryTerms2 = terms(new int[]{1}); - result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2); + result = selectBestExtraction(queryTerms1, queryTerms2); assertSame(queryTerms1, result); queryTerms1 = terms(new int[]{10}, "1", "123"); queryTerms2 = terms(new int[]{1}, "1", "2"); - result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2); + result = selectBestExtraction(queryTerms1, queryTerms2); assertSame(queryTerms1, result); queryTerms1 = terms(new int[]{1, 2, 3}, "123", "456"); queryTerms2 = terms(new int[]{2, 3, 4}, "1", "456"); - result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2); + result = selectBestExtraction(queryTerms1, queryTerms2); assertSame("Ignoring ranges, so then prefer queryTerms1, because it has the longest shortest term", queryTerms1, result); queryTerms1 = terms(new int[]{}); queryTerms2 = terms(new int[]{}); - result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2); + result = selectBestExtraction(queryTerms1, queryTerms2); assertSame("In case query extractions are empty", queryTerms2, result); queryTerms1 = terms(new int[]{1}); queryTerms2 = terms(new int[]{}); - result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2); + result = selectBestExtraction(queryTerms1, queryTerms2); assertSame("In case query a single extraction is empty", queryTerms1, result); queryTerms1 = terms(new int[]{}); queryTerms2 = terms(new int[]{1}); - result = selectBestExtraction(Collections.emptyMap(), queryTerms1, queryTerms2); + result = selectBestExtraction(queryTerms1, queryTerms2); assertSame("In case query a single extraction is empty", queryTerms2, result); } - public void testSelectBestExtraction_boostFields() { - Set queryTerms1 = new HashSet<>(Arrays.asList( - new QueryExtraction(new Term("status_field", "sold")), - new QueryExtraction(new Term("category", "accessory")) - )); - Set queryTerms2 = new HashSet<>(Arrays.asList( - new QueryExtraction(new Term("status_field", "instock")), - new QueryExtraction(new Term("category", "hardware")) - )); - Set result = selectBestExtraction(Collections.singletonMap("status_field", 0F), queryTerms1, queryTerms2); - assertSame(queryTerms1, result); - - byte[] interval = new byte[Long.BYTES]; - LongPoint.encodeDimension(4, interval, 0); - queryTerms1 = new HashSet<>(Arrays.asList( - new QueryExtraction(new Term("status_field", "sold")), - new QueryExtraction(new QueryAnalyzer.Range("price", null, null, interval)) - )); - interval = new byte[Long.BYTES]; - LongPoint.encodeDimension(8, interval, 0); - queryTerms2 = new HashSet<>(Arrays.asList( - new QueryExtraction(new Term("status_field", "instock")), - new QueryExtraction(new QueryAnalyzer.Range("price", null, null, interval)) - )); - result = selectBestExtraction(Collections.singletonMap("status_field", 0F), queryTerms1, queryTerms2); - assertSame(queryTerms1, result); - - Map boostFields = new HashMap<>(); - boostFields.put("field1", 2F); - boostFields.put("field2", 0.5F); - boostFields.put("field4", 3F); - boostFields.put("field5", 0.6F); - queryTerms1 = new HashSet<>(Arrays.asList( - new QueryExtraction(new Term("field1", "sold")), - new QueryExtraction(new Term("field2", "accessory")), - new QueryExtraction(new QueryAnalyzer.Range("field3", null, null, new byte[0])) - )); - queryTerms2 = new HashSet<>(Arrays.asList( - new QueryExtraction(new Term("field3", "sold")), - new QueryExtraction(new Term("field4", "accessory")), - new QueryExtraction(new QueryAnalyzer.Range("field5", null, null, new byte[0])) - )); - result = selectBestExtraction(boostFields, queryTerms1, queryTerms2); - assertSame(queryTerms2, result); - - boostFields.put("field2", 6F); - result = selectBestExtraction(boostFields, queryTerms1, queryTerms2); - assertSame(queryTerms1, result); - - boostFields.put("field2", 0F); - boostFields.put("field3", 0F); - boostFields.put("field5", 0F); - result = selectBestExtraction(boostFields, queryTerms1, queryTerms2); - assertSame(queryTerms2, result); - - boostFields = new HashMap<>(); - boostFields.put("field2", 2F); - result = selectBestExtraction(boostFields, queryTerms1, queryTerms2); - assertSame(queryTerms1, result); - } - public void testSelectBestExtraction_random() { Set terms1 = new HashSet<>(); int shortestTerms1Length = Integer.MAX_VALUE; @@ -691,7 +840,7 @@ public void testSelectBestExtraction_random() { sumTermLength -= length; } - Set result = selectBestExtraction(Collections.emptyMap(), terms1, terms2); + Set result = selectBestExtraction(terms1, terms2); Set expected = shortestTerms1Length >= shortestTerms2Length ? terms1 : terms2; assertThat(result, sameInstance(expected)); } @@ -699,8 +848,9 @@ public void testSelectBestExtraction_random() { public void testPointRangeQuery() { // int ranges get converted to long ranges: Query query = IntPoint.newRangeQuery("_field", 10, 20); - Result result = analyze(query, Collections.emptyMap()); + Result result = analyze(query, Version.CURRENT); assertFalse(result.verified); + assertThat(result.minimumShouldMatch, equalTo(1)); List ranges = new ArrayList<>(result.extractions); assertThat(ranges.size(), equalTo(1)); assertNull(ranges.get(0).term); @@ -709,7 +859,8 @@ public void testPointRangeQuery() { assertDimension(ranges.get(0).range.upperPoint, bytes -> IntPoint.encodeDimension(20, bytes, 0)); query = LongPoint.newRangeQuery("_field", 10L, 21L); - result = analyze(query, Collections.emptyMap()); + result = analyze(query, Version.CURRENT); + assertThat(result.minimumShouldMatch, equalTo(1)); assertFalse(result.verified); ranges = new ArrayList<>(result.extractions); assertThat(ranges.size(), equalTo(1)); @@ -720,7 +871,8 @@ public void testPointRangeQuery() { // Half float ranges get converted to double ranges: query = HalfFloatPoint.newRangeQuery("_field", 10F, 20F); - result = analyze(query, Collections.emptyMap()); + result = analyze(query, Version.CURRENT); + assertThat(result.minimumShouldMatch, equalTo(1)); assertFalse(result.verified); ranges = new ArrayList<>(result.extractions); assertThat(ranges.size(), equalTo(1)); @@ -731,7 +883,8 @@ public void testPointRangeQuery() { // Float ranges get converted to double ranges: query = FloatPoint.newRangeQuery("_field", 10F, 20F); - result = analyze(query, Collections.emptyMap()); + result = analyze(query, Version.CURRENT); + assertThat(result.minimumShouldMatch, equalTo(1)); assertFalse(result.verified); ranges = new ArrayList<>(result.extractions); assertThat(ranges.size(), equalTo(1)); @@ -741,7 +894,8 @@ public void testPointRangeQuery() { assertDimension(ranges.get(0).range.upperPoint, bytes -> FloatPoint.encodeDimension(20F, bytes, 0)); query = DoublePoint.newRangeQuery("_field", 10D, 20D); - result = analyze(query, Collections.emptyMap()); + result = analyze(query, Version.CURRENT); + assertThat(result.minimumShouldMatch, equalTo(1)); assertFalse(result.verified); ranges = new ArrayList<>(result.extractions); assertThat(ranges.size(), equalTo(1)); @@ -752,7 +906,8 @@ public void testPointRangeQuery() { query = InetAddressPoint.newRangeQuery("_field", InetAddresses.forString("192.168.1.0"), InetAddresses.forString("192.168.1.255")); - result = analyze(query, Collections.emptyMap()); + result = analyze(query, Version.CURRENT); + assertThat(result.minimumShouldMatch, equalTo(1)); assertFalse(result.verified); ranges = new ArrayList<>(result.extractions); assertThat(ranges.size(), equalTo(1)); @@ -765,24 +920,26 @@ public void testPointRangeQuery() { public void testTooManyPointDimensions() { // For now no extraction support for geo queries: Query query1 = LatLonPoint.newBoxQuery("_field", 0, 1, 0, 1); - expectThrows(UnsupportedQueryException.class, () -> analyze(query1, Collections.emptyMap())); + expectThrows(UnsupportedQueryException.class, () -> analyze(query1, Version.CURRENT)); Query query2 = LongPoint.newRangeQuery("_field", new long[]{0, 0, 0}, new long[]{1, 1, 1}); - expectThrows(UnsupportedQueryException.class, () -> analyze(query2, Collections.emptyMap())); + expectThrows(UnsupportedQueryException.class, () -> analyze(query2, Version.CURRENT)); } public void testPointRangeQuery_lowerUpperReversed() { Query query = IntPoint.newRangeQuery("_field", 20, 10); - Result result = analyze(query, Collections.emptyMap()); + Result result = analyze(query, Version.CURRENT); assertTrue(result.verified); + assertThat(result.minimumShouldMatch, equalTo(1)); assertThat(result.extractions.size(), equalTo(0)); } public void testIndexOrDocValuesQuery() { Query query = new IndexOrDocValuesQuery(IntPoint.newRangeQuery("_field", 10, 20), SortedNumericDocValuesField.newSlowRangeQuery("_field", 10, 20)); - Result result = analyze(query, Collections.emptyMap()); + Result result = analyze(query, Version.CURRENT); assertFalse(result.verified); + assertThat(result.minimumShouldMatch, equalTo(1)); List ranges = new ArrayList<>(result.extractions); assertThat(ranges.size(), equalTo(1)); assertNull(ranges.get(0).term); @@ -795,8 +952,9 @@ public void testToParentBlockJoinQuery() { TermQuery termQuery = new TermQuery(new Term("field", "value")); QueryBitSetProducer queryBitSetProducer = new QueryBitSetProducer(new TermQuery(new Term("_type", "child"))); ESToParentBlockJoinQuery query = new ESToParentBlockJoinQuery(termQuery, queryBitSetProducer, ScoreMode.None, "child"); - Result result = analyze(query, Collections.emptyMap()); + Result result = analyze(query, Version.CURRENT); assertFalse(result.verified); + assertThat(result.minimumShouldMatch, equalTo(1)); assertEquals(1, result.extractions.size()); assertNull(result.extractions.toArray(new QueryExtraction[0])[0].range); assertEquals(new Term("field", "value"), result.extractions.toArray(new QueryExtraction[0])[0].term); @@ -806,44 +964,101 @@ public void testPointRangeQuerySelectShortestRange() { BooleanQuery.Builder boolQuery = new BooleanQuery.Builder(); boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER); boolQuery.add(LongPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER); - Result result = analyze(boolQuery.build(), Collections.emptyMap()); + Result result = analyze(boolQuery.build(), Version.V_6_0_0); assertFalse(result.verified); + assertThat(result.minimumShouldMatch, equalTo(1)); assertEquals(1, result.extractions.size()); assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName); boolQuery = new BooleanQuery.Builder(); boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER); boolQuery.add(IntPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER); - result = analyze(boolQuery.build(), Collections.emptyMap()); + result = analyze(boolQuery.build(), Version.V_6_0_0); assertFalse(result.verified); + assertThat(result.minimumShouldMatch, equalTo(1)); assertEquals(1, result.extractions.size()); assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName); boolQuery = new BooleanQuery.Builder(); boolQuery.add(DoublePoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER); boolQuery.add(DoublePoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER); - result = analyze(boolQuery.build(), Collections.emptyMap()); + result = analyze(boolQuery.build(), Version.V_6_0_0); assertFalse(result.verified); + assertThat(result.minimumShouldMatch, equalTo(1)); assertEquals(1, result.extractions.size()); assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName); boolQuery = new BooleanQuery.Builder(); boolQuery.add(DoublePoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER); boolQuery.add(FloatPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER); - result = analyze(boolQuery.build(), Collections.emptyMap()); + result = analyze(boolQuery.build(), Version.V_6_0_0); assertFalse(result.verified); + assertThat(result.minimumShouldMatch, equalTo(1)); assertEquals(1, result.extractions.size()); assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName); boolQuery = new BooleanQuery.Builder(); boolQuery.add(HalfFloatPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER); boolQuery.add(HalfFloatPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER); - result = analyze(boolQuery.build(), Collections.emptyMap()); + result = analyze(boolQuery.build(), Version.V_6_0_0); assertFalse(result.verified); + assertThat(result.minimumShouldMatch, equalTo(1)); assertEquals(1, result.extractions.size()); assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName); } + public void testPointRangeQuerySelectRanges() { + BooleanQuery.Builder boolQuery = new BooleanQuery.Builder(); + boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.SHOULD); + boolQuery.add(LongPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.SHOULD); + Result result = analyze(boolQuery.build(), Version.CURRENT); + assertFalse(result.verified); + assertThat(result.minimumShouldMatch, equalTo(1)); + assertEquals(2, result.extractions.size()); + assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName); + assertEquals("_field1", new ArrayList<>(result.extractions).get(1).range.fieldName); + + boolQuery = new BooleanQuery.Builder(); + boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER); + boolQuery.add(LongPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.FILTER); + result = analyze(boolQuery.build(), Version.CURRENT); + assertFalse(result.verified); + assertThat(result.minimumShouldMatch, equalTo(2)); + assertEquals(2, result.extractions.size()); + assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName); + assertEquals("_field1", new ArrayList<>(result.extractions).get(1).range.fieldName); + + boolQuery = new BooleanQuery.Builder(); + boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.FILTER); + boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 15), BooleanClause.Occur.FILTER); + result = analyze(boolQuery.build(), Version.CURRENT); + assertFalse(result.verified); + assertThat(result.minimumShouldMatch, equalTo(1)); + assertEquals(2, result.extractions.size()); + assertEquals("_field1", new ArrayList<>(result.extractions).get(0).range.fieldName); + assertEquals("_field1", new ArrayList<>(result.extractions).get(1).range.fieldName); + + boolQuery = new BooleanQuery.Builder().setMinimumNumberShouldMatch(2); + boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.SHOULD); + boolQuery.add(LongPoint.newRangeQuery("_field2", 10, 15), BooleanClause.Occur.SHOULD); + result = analyze(boolQuery.build(), Version.CURRENT); + assertFalse(result.verified); + assertThat(result.minimumShouldMatch, equalTo(2)); + assertEquals(2, result.extractions.size()); + assertEquals("_field2", new ArrayList<>(result.extractions).get(0).range.fieldName); + assertEquals("_field1", new ArrayList<>(result.extractions).get(1).range.fieldName); + + boolQuery = new BooleanQuery.Builder().setMinimumNumberShouldMatch(2); + boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 20), BooleanClause.Occur.SHOULD); + boolQuery.add(LongPoint.newRangeQuery("_field1", 10, 15), BooleanClause.Occur.SHOULD); + result = analyze(boolQuery.build(), Version.CURRENT); + assertFalse(result.verified); + assertThat(result.minimumShouldMatch, equalTo(1)); + assertEquals(2, result.extractions.size()); + assertEquals("_field1", new ArrayList<>(result.extractions).get(0).range.fieldName); + assertEquals("_field1", new ArrayList<>(result.extractions).get(1).range.fieldName); + } + private static void assertDimension(byte[] expected, Consumer consumer) { byte[] dest = new byte[expected.length]; consumer.accept(dest); diff --git a/modules/reindex/src/main/plugin-metadata/plugin-security.policy b/modules/reindex/src/main/plugin-metadata/plugin-security.policy index 70fb51b845ce1..a2482eaf4bd57 100644 --- a/modules/reindex/src/main/plugin-metadata/plugin-security.policy +++ b/modules/reindex/src/main/plugin-metadata/plugin-security.policy @@ -27,7 +27,7 @@ grant codeBase "${codebase.elasticsearch-rest-client}" { permission java.net.NetPermission "getProxySelector"; }; -grant codeBase "${codebase.httpasyncclient-4.1.2.jar}" { +grant codeBase "${codebase.httpasyncclient}" { // rest client uses system properties which gets the default proxy permission java.net.NetPermission "getProxySelector"; }; diff --git a/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy b/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy index 4c87e1ef9c598..32b2dc9bd1540 100644 --- a/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy +++ b/modules/transport-netty4/src/main/plugin-metadata/plugin-security.policy @@ -17,7 +17,7 @@ * under the License. */ -grant codeBase "${codebase.netty-common-4.1.13.Final.jar}" { +grant codeBase "${codebase.netty-common}" { // for reading the system-wide configuration for the backlog of established sockets permission java.io.FilePermission "/proc/sys/net/core/somaxconn", "read"; @@ -25,7 +25,7 @@ grant codeBase "${codebase.netty-common-4.1.13.Final.jar}" { permission java.net.SocketPermission "*", "accept,connect"; }; -grant codeBase "${codebase.netty-transport-4.1.13.Final.jar}" { +grant codeBase "${codebase.netty-transport}" { // Netty NioEventLoop wants to change this, because of https://bugs.openjdk.java.net/browse/JDK-6427854 // the bug says it only happened rarely, and that its fixed, but apparently it still happens rarely! permission java.util.PropertyPermission "sun.nio.ch.bugLevel", "write"; diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index f6941a9260e4b..88f25f72e72f9 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -33,8 +33,8 @@ dependencies { compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "commons-logging:commons-logging:${versions.commonslogging}" compile "commons-codec:commons-codec:${versions.commonscodec}" - compile 'com.fasterxml.jackson.core:jackson-databind:2.5.3' - compile 'com.fasterxml.jackson.core:jackson-annotations:2.5.0' + compile 'com.fasterxml.jackson.core:jackson-databind:2.6.7.1' + compile 'com.fasterxml.jackson.core:jackson-annotations:2.6.0' } dependencyLicenses { diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.5.0.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.5.0.jar.sha1 deleted file mode 100644 index 862ac6f304f54..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-annotations-2.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a2a55a3375bc1cef830ca426d68d2ea22961190e diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.6.0.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.6.0.jar.sha1 new file mode 100644 index 0000000000000..bc4cae402d631 --- /dev/null +++ b/plugins/discovery-ec2/licenses/jackson-annotations-2.6.0.jar.sha1 @@ -0,0 +1 @@ +a0990e2e812ac6639b6ce955c91b13228500476e \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.5.3.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.5.3.jar.sha1 deleted file mode 100644 index cdc6695805932..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-databind-2.5.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c37875ff66127d93e5f672708cb2dcc14c8232ab diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.6.7.1.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.6.7.1.jar.sha1 new file mode 100644 index 0000000000000..7d82dbddc52d0 --- /dev/null +++ b/plugins/discovery-ec2/licenses/jackson-databind-2.6.7.1.jar.sha1 @@ -0,0 +1 @@ +306775aeb5164835a1dcbdf3f945587045cfb3b5 \ No newline at end of file diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java index 472ab121e8365..e360558933cc1 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureStorageSettings.java @@ -38,44 +38,47 @@ import java.util.HashMap; import java.util.Locale; import java.util.Map; -import java.util.Set; public final class AzureStorageSettings { + // prefix for azure client settings - private static final String PREFIX = "azure.client."; + private static final String AZURE_CLIENT_PREFIX_KEY = "azure.client."; /** Azure account name */ public static final AffixSetting ACCOUNT_SETTING = - Setting.affixKeySetting(PREFIX, "account", key -> SecureSetting.secureString(key, null)); + Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "account", key -> SecureSetting.secureString(key, null)); + + /** Azure key */ + public static final AffixSetting KEY_SETTING = Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "key", + key -> SecureSetting.secureString(key, null)); /** max_retries: Number of retries in case of Azure errors. Defaults to 3 (RetryPolicy.DEFAULT_CLIENT_RETRY_COUNT). */ private static final Setting MAX_RETRIES_SETTING = - Setting.affixKeySetting(PREFIX, "max_retries", - (key) -> Setting.intSetting(key, RetryPolicy.DEFAULT_CLIENT_RETRY_COUNT, Setting.Property.NodeScope)); + Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "max_retries", + (key) -> Setting.intSetting(key, RetryPolicy.DEFAULT_CLIENT_RETRY_COUNT, Setting.Property.NodeScope), + ACCOUNT_SETTING, KEY_SETTING); /** * Azure endpoint suffix. Default to core.windows.net (CloudStorageAccount.DEFAULT_DNS). */ - public static final Setting ENDPOINT_SUFFIX_SETTING = Setting.affixKeySetting(PREFIX, "endpoint_suffix", - key -> Setting.simpleString(key, Property.NodeScope)); - - /** Azure key */ - public static final AffixSetting KEY_SETTING = Setting.affixKeySetting(PREFIX, "key", - key -> SecureSetting.secureString(key, null)); + public static final Setting ENDPOINT_SUFFIX_SETTING = Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "endpoint_suffix", + key -> Setting.simpleString(key, Property.NodeScope), ACCOUNT_SETTING, KEY_SETTING); - public static final AffixSetting TIMEOUT_SETTING = Setting.affixKeySetting(PREFIX, "timeout", - (key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(-1), Property.NodeScope)); + public static final AffixSetting TIMEOUT_SETTING = Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "timeout", + (key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(-1), Property.NodeScope), ACCOUNT_SETTING, KEY_SETTING); /** The type of the proxy to connect to azure through. Can be direct (no proxy, default), http or socks */ - public static final AffixSetting PROXY_TYPE_SETTING = Setting.affixKeySetting(PREFIX, "proxy.type", - (key) -> new Setting<>(key, "direct", s -> Proxy.Type.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope)); + public static final AffixSetting PROXY_TYPE_SETTING = Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "proxy.type", + (key) -> new Setting<>(key, "direct", s -> Proxy.Type.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope) + , ACCOUNT_SETTING, KEY_SETTING); /** The host name of a proxy to connect to azure through. */ - public static final Setting PROXY_HOST_SETTING = Setting.affixKeySetting(PREFIX, "proxy.host", - (key) -> Setting.simpleString(key, Property.NodeScope)); + public static final AffixSetting PROXY_HOST_SETTING = Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "proxy.host", + (key) -> Setting.simpleString(key, Property.NodeScope), KEY_SETTING, ACCOUNT_SETTING, PROXY_TYPE_SETTING); /** The port of a proxy to connect to azure through. */ - public static final Setting PROXY_PORT_SETTING = Setting.affixKeySetting(PREFIX, "proxy.port", - (key) -> Setting.intSetting(key, 0, 0, 65535, Setting.Property.NodeScope)); + public static final Setting PROXY_PORT_SETTING = Setting.affixKeySetting(AZURE_CLIENT_PREFIX_KEY, "proxy.port", + (key) -> Setting.intSetting(key, 0, 0, 65535, Setting.Property.NodeScope), ACCOUNT_SETTING, KEY_SETTING, PROXY_TYPE_SETTING, + PROXY_HOST_SETTING); private final String account; private final String key; @@ -157,9 +160,8 @@ public String toString() { */ public static Map load(Settings settings) { // Get the list of existing named configurations - Set clientNames = settings.getGroups(PREFIX).keySet(); Map storageSettings = new HashMap<>(); - for (String clientName : clientNames) { + for (String clientName : ACCOUNT_SETTING.getNamespaces(settings)) { storageSettings.put(clientName, getClientSettings(settings, clientName)); } diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java index 50fd071accf48..dcd1d650628a2 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageService.java @@ -40,6 +40,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.env.Environment; import java.io.IOException; @@ -54,10 +55,8 @@ interface GoogleCloudStorageService { - String SETTINGS_PREFIX = "gcs.client."; - /** A json credentials file loaded from secure settings. */ - Setting.AffixSetting CREDENTIALS_FILE_SETTING = Setting.affixKeySetting(SETTINGS_PREFIX, "credentials_file", + Setting.AffixSetting CREDENTIALS_FILE_SETTING = Setting.affixKeySetting("gcs.client.", "credentials_file", key -> SecureSetting.secureFile(key, null)); /** @@ -176,16 +175,15 @@ private ExponentialBackOff newBackOff() { /** Load all secure credentials from the settings. */ static Map loadClientCredentials(Settings settings) { - Set clientNames = settings.getGroups(SETTINGS_PREFIX).keySet(); Map credentials = new HashMap<>(); - for (String clientName : clientNames) { - Setting concreteSetting = CREDENTIALS_FILE_SETTING.getConcreteSettingForNamespace(clientName); + Iterable> iterable = CREDENTIALS_FILE_SETTING.getAllConcreteSettings(settings)::iterator; + for (Setting concreteSetting : iterable) { try (InputStream credStream = concreteSetting.get(settings)) { GoogleCredential credential = GoogleCredential.fromStream(credStream); if (credential.createScopedRequired()) { credential = credential.createScoped(Collections.singleton(StorageScopes.DEVSTORAGE_FULL_CONTROL)); } - credentials.put(clientName, credential); + credentials.put(CREDENTIALS_FILE_SETTING.getNamespace(concreteSetting), credential); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 7071307fbc3c2..ae971cfe4e1ec 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -34,8 +34,8 @@ dependencies { compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "commons-logging:commons-logging:${versions.commonslogging}" compile "commons-codec:commons-codec:${versions.commonscodec}" - compile "com.fasterxml.jackson.core:jackson-databind:2.5.3" - compile "com.fasterxml.jackson.core:jackson-annotations:2.5.0" + compile 'com.fasterxml.jackson.core:jackson-databind:2.6.7.1' + compile 'com.fasterxml.jackson.core:jackson-annotations:2.6.0' // HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here, // and whitelist this hack in JarHell diff --git a/plugins/repository-s3/licenses/jackson-annotations-2.5.0.jar.sha1 b/plugins/repository-s3/licenses/jackson-annotations-2.5.0.jar.sha1 deleted file mode 100644 index 862ac6f304f54..0000000000000 --- a/plugins/repository-s3/licenses/jackson-annotations-2.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a2a55a3375bc1cef830ca426d68d2ea22961190e diff --git a/plugins/repository-s3/licenses/jackson-annotations-2.6.0.jar.sha1 b/plugins/repository-s3/licenses/jackson-annotations-2.6.0.jar.sha1 new file mode 100644 index 0000000000000..bc4cae402d631 --- /dev/null +++ b/plugins/repository-s3/licenses/jackson-annotations-2.6.0.jar.sha1 @@ -0,0 +1 @@ +a0990e2e812ac6639b6ce955c91b13228500476e \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jackson-databind-2.5.3.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.5.3.jar.sha1 deleted file mode 100644 index cdc6695805932..0000000000000 --- a/plugins/repository-s3/licenses/jackson-databind-2.5.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c37875ff66127d93e5f672708cb2dcc14c8232ab diff --git a/plugins/repository-s3/licenses/jackson-databind-2.6.7.1.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.6.7.1.jar.sha1 new file mode 100644 index 0000000000000..7d82dbddc52d0 --- /dev/null +++ b/plugins/repository-s3/licenses/jackson-databind-2.6.7.1.jar.sha1 @@ -0,0 +1 @@ +306775aeb5164835a1dcbdf3f945587045cfb3b5 \ No newline at end of file diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/DefaultS3OutputStream.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/DefaultS3OutputStream.java deleted file mode 100644 index 811f6e7214146..0000000000000 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/DefaultS3OutputStream.java +++ /dev/null @@ -1,223 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.repositories.s3; - -import com.amazonaws.AmazonClientException; -import com.amazonaws.services.s3.model.AbortMultipartUploadRequest; -import com.amazonaws.services.s3.model.AmazonS3Exception; -import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest; -import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; -import com.amazonaws.services.s3.model.ObjectMetadata; -import com.amazonaws.services.s3.model.PartETag; -import com.amazonaws.services.s3.model.PutObjectRequest; -import com.amazonaws.services.s3.model.PutObjectResult; -import com.amazonaws.services.s3.model.UploadPartRequest; -import com.amazonaws.services.s3.model.UploadPartResult; -import com.amazonaws.util.Base64; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.common.unit.ByteSizeValue; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.security.DigestInputStream; -import java.security.MessageDigest; -import java.security.NoSuchAlgorithmException; -import java.util.ArrayList; -import java.util.List; - -/** - * DefaultS3OutputStream uploads data to the AWS S3 service using 2 modes: single and multi part. - *

- * When the length of the chunk is lower than buffer_size, the chunk is uploaded with a single request. - * Otherwise multiple requests are made, each of buffer_size (except the last one which can be lower than buffer_size). - *

- * Quick facts about S3: - *

- * Maximum object size: 5 TB - * Maximum number of parts per upload: 10,000 - * Part numbers: 1 to 10,000 (inclusive) - * Part size: 5 MB to 5 GB, last part can be < 5 MB - *

- * See http://docs.aws.amazon.com/AmazonS3/latest/dev/qfacts.html - * See http://docs.aws.amazon.com/AmazonS3/latest/dev/uploadobjusingmpu.html - */ -class DefaultS3OutputStream extends S3OutputStream { - - private static final ByteSizeValue MULTIPART_MAX_SIZE = new ByteSizeValue(5, ByteSizeUnit.GB); - private static final Logger logger = Loggers.getLogger("cloud.aws"); - /** - * Multipart Upload API data - */ - private String multipartId; - private int multipartChunks; - private List multiparts; - - DefaultS3OutputStream(S3BlobStore blobStore, String bucketName, String blobName, int bufferSizeInBytes, boolean serverSideEncryption) { - super(blobStore, bucketName, blobName, bufferSizeInBytes, serverSideEncryption); - } - - @Override - public void flush(byte[] bytes, int off, int len, boolean closing) throws IOException { - SocketAccess.doPrivilegedIOException(() -> { - flushPrivileged(bytes, off, len, closing); - return null; - }); - } - - private void flushPrivileged(byte[] bytes, int off, int len, boolean closing) throws IOException { - if (len > MULTIPART_MAX_SIZE.getBytes()) { - throw new IOException("Unable to upload files larger than " + MULTIPART_MAX_SIZE + " to Amazon S3"); - } - - if (!closing) { - if (len < getBufferSize()) { - upload(bytes, off, len); - } else { - if (getFlushCount() == 0) { - initializeMultipart(); - } - uploadMultipart(bytes, off, len, false); - } - } else { - if (multipartId != null) { - uploadMultipart(bytes, off, len, true); - completeMultipart(); - } else { - upload(bytes, off, len); - } - } - } - - /** - * Upload data using a single request. - */ - private void upload(byte[] bytes, int off, int len) throws IOException { - try (ByteArrayInputStream is = new ByteArrayInputStream(bytes, off, len)) { - try { - doUpload(getBlobStore(), getBucketName(), getBlobName(), is, len, isServerSideEncryption()); - } catch (AmazonClientException e) { - throw new IOException("Unable to upload object " + getBlobName(), e); - } - } - } - - protected void doUpload(S3BlobStore blobStore, String bucketName, String blobName, InputStream is, int length, - boolean serverSideEncryption) throws AmazonS3Exception { - ObjectMetadata md = new ObjectMetadata(); - if (serverSideEncryption) { - md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); - } - md.setContentLength(length); - - PutObjectRequest putRequest = new PutObjectRequest(bucketName, blobName, is, md) - .withStorageClass(blobStore.getStorageClass()) - .withCannedAcl(blobStore.getCannedACL()); - blobStore.client().putObject(putRequest); - - } - - private void initializeMultipart() { - while (multipartId == null) { - multipartId = doInitialize(getBlobStore(), getBucketName(), getBlobName(), isServerSideEncryption()); - if (multipartId != null) { - multipartChunks = 1; - multiparts = new ArrayList<>(); - } - } - } - - protected String doInitialize(S3BlobStore blobStore, String bucketName, String blobName, boolean serverSideEncryption) { - InitiateMultipartUploadRequest request = new InitiateMultipartUploadRequest(bucketName, blobName) - .withCannedACL(blobStore.getCannedACL()) - .withStorageClass(blobStore.getStorageClass()); - - if (serverSideEncryption) { - ObjectMetadata md = new ObjectMetadata(); - md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); - request.setObjectMetadata(md); - } - - return blobStore.client().initiateMultipartUpload(request).getUploadId(); - } - - private void uploadMultipart(byte[] bytes, int off, int len, boolean lastPart) throws IOException { - try (ByteArrayInputStream is = new ByteArrayInputStream(bytes, off, len)) { - try { - PartETag partETag = doUploadMultipart(getBlobStore(), getBucketName(), getBlobName(), multipartId, is, len, lastPart); - multiparts.add(partETag); - multipartChunks++; - } catch (AmazonClientException e) { - abortMultipart(); - throw e; - } - } - } - - protected PartETag doUploadMultipart(S3BlobStore blobStore, String bucketName, String blobName, String uploadId, InputStream is, - int length, boolean lastPart) throws AmazonS3Exception { - UploadPartRequest request = new UploadPartRequest() - .withBucketName(bucketName) - .withKey(blobName) - .withUploadId(uploadId) - .withPartNumber(multipartChunks) - .withInputStream(is) - .withPartSize(length) - .withLastPart(lastPart); - - UploadPartResult response = blobStore.client().uploadPart(request); - return response.getPartETag(); - - } - - private void completeMultipart() { - try { - doCompleteMultipart(getBlobStore(), getBucketName(), getBlobName(), multipartId, multiparts); - multipartId = null; - return; - } catch (AmazonClientException e) { - abortMultipart(); - throw e; - } - } - - protected void doCompleteMultipart(S3BlobStore blobStore, String bucketName, String blobName, String uploadId, List parts) - throws AmazonS3Exception { - CompleteMultipartUploadRequest request = new CompleteMultipartUploadRequest(bucketName, blobName, uploadId, parts); - blobStore.client().completeMultipartUpload(request); - } - - private void abortMultipart() { - if (multipartId != null) { - try { - doAbortMultipart(getBlobStore(), getBucketName(), getBlobName(), multipartId); - } finally { - multipartId = null; - } - } - } - - protected void doAbortMultipart(S3BlobStore blobStore, String bucketName, String blobName, String uploadId) - throws AmazonS3Exception { - blobStore.client().abortMultipartUpload(new AbortMultipartUploadRequest(bucketName, blobName, uploadId)); - } -} diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java index f49f4b348f02b..bb1130db42d9a 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobContainer.java @@ -21,35 +21,48 @@ import com.amazonaws.AmazonClientException; import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.model.AbortMultipartUploadRequest; import com.amazonaws.services.s3.model.AmazonS3Exception; +import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest; import com.amazonaws.services.s3.model.CopyObjectRequest; +import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; import com.amazonaws.services.s3.model.ObjectListing; import com.amazonaws.services.s3.model.ObjectMetadata; +import com.amazonaws.services.s3.model.PartETag; +import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.S3Object; import com.amazonaws.services.s3.model.S3ObjectSummary; +import com.amazonaws.services.s3.model.UploadPartRequest; +import com.amazonaws.services.s3.model.UploadPartResult; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobMetaData; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStoreException; import org.elasticsearch.common.blobstore.support.AbstractBlobContainer; import org.elasticsearch.common.blobstore.support.PlainBlobMetaData; import org.elasticsearch.common.collect.MapBuilder; -import org.elasticsearch.common.io.Streams; +import org.elasticsearch.common.collect.Tuple; import java.io.IOException; import java.io.InputStream; -import java.io.OutputStream; import java.nio.file.FileAlreadyExistsException; import java.nio.file.NoSuchFileException; import java.security.AccessController; import java.security.PrivilegedAction; +import java.util.ArrayList; +import java.util.List; import java.util.Map; -class S3BlobContainer extends AbstractBlobContainer { +import static org.elasticsearch.repositories.s3.S3Repository.MAX_FILE_SIZE; +import static org.elasticsearch.repositories.s3.S3Repository.MAX_FILE_SIZE_USING_MULTIPART; +import static org.elasticsearch.repositories.s3.S3Repository.MIN_PART_SIZE_USING_MULTIPART; - protected final S3BlobStore blobStore; +class S3BlobContainer extends AbstractBlobContainer { - protected final String keyPath; + private final S3BlobStore blobStore; + private final String keyPath; S3BlobContainer(BlobPath path, S3BlobStore blobStore) { super(path); @@ -91,9 +104,15 @@ public void writeBlob(String blobName, InputStream inputStream, long blobSize) t if (blobExists(blobName)) { throw new FileAlreadyExistsException("blob [" + blobName + "] already exists, cannot overwrite"); } - try (OutputStream stream = createOutput(blobName)) { - Streams.copy(inputStream, stream); - } + + SocketAccess.doPrivilegedIOException(() -> { + if (blobSize <= blobStore.bufferSizeInBytes()) { + executeSingleUpload(blobStore, buildKey(blobName), inputStream, blobSize); + } else { + executeMultipartUpload(blobStore, buildKey(blobName), inputStream, blobSize); + } + return null; + }); } @Override @@ -109,12 +128,6 @@ public void deleteBlob(String blobName) throws IOException { } } - private OutputStream createOutput(final String blobName) throws IOException { - // UploadS3OutputStream does buffering & retry logic internally - return new DefaultS3OutputStream(blobStore, blobStore.bucket(), buildKey(blobName), - blobStore.bufferSizeInBytes(), blobStore.serverSideEncryption()); - } - @Override public Map listBlobsByPrefix(@Nullable String blobNamePrefix) throws IOException { return AccessController.doPrivileged((PrivilegedAction>) () -> { @@ -175,7 +188,158 @@ public Map listBlobs() throws IOException { return listBlobsByPrefix(null); } - protected String buildKey(String blobName) { + private String buildKey(String blobName) { return keyPath + blobName; } + + /** + * Uploads a blob using a single upload request + */ + void executeSingleUpload(final S3BlobStore blobStore, + final String blobName, + final InputStream input, + final long blobSize) throws IOException { + + // Extra safety checks + if (blobSize > MAX_FILE_SIZE.getBytes()) { + throw new IllegalArgumentException("Upload request size [" + blobSize + "] can't be larger than " + MAX_FILE_SIZE); + } + if (blobSize > blobStore.bufferSizeInBytes()) { + throw new IllegalArgumentException("Upload request size [" + blobSize + "] can't be larger than buffer size"); + } + + try { + final ObjectMetadata md = new ObjectMetadata(); + md.setContentLength(blobSize); + if (blobStore.serverSideEncryption()) { + md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); + } + + final PutObjectRequest putRequest = new PutObjectRequest(blobStore.bucket(), blobName, input, md); + putRequest.setStorageClass(blobStore.getStorageClass()); + putRequest.setCannedAcl(blobStore.getCannedACL()); + + blobStore.client().putObject(putRequest); + } catch (AmazonClientException e) { + throw new IOException("Unable to upload object [" + blobName + "] using a single upload", e); + } + } + + /** + * Uploads a blob using multipart upload requests. + */ + void executeMultipartUpload(final S3BlobStore blobStore, + final String blobName, + final InputStream input, + final long blobSize) throws IOException { + + if (blobSize > MAX_FILE_SIZE_USING_MULTIPART.getBytes()) { + throw new IllegalArgumentException("Multipart upload request size [" + blobSize + + "] can't be larger than " + MAX_FILE_SIZE_USING_MULTIPART); + } + if (blobSize < MIN_PART_SIZE_USING_MULTIPART.getBytes()) { + throw new IllegalArgumentException("Multipart upload request size [" + blobSize + + "] can't be smaller than " + MIN_PART_SIZE_USING_MULTIPART); + } + + final long partSize = blobStore.bufferSizeInBytes(); + final Tuple multiparts = numberOfMultiparts(blobSize, partSize); + + if (multiparts.v1() > Integer.MAX_VALUE) { + throw new IllegalArgumentException("Too many multipart upload requests, maybe try a larger buffer size?"); + } + + final int nbParts = multiparts.v1().intValue(); + final long lastPartSize = multiparts.v2(); + assert blobSize == (nbParts - 1) * partSize + lastPartSize : "blobSize does not match multipart sizes"; + + final SetOnce uploadId = new SetOnce<>(); + final String bucketName = blobStore.bucket(); + boolean success = false; + + try { + final InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucketName, blobName); + initRequest.setStorageClass(blobStore.getStorageClass()); + initRequest.setCannedACL(blobStore.getCannedACL()); + if (blobStore.serverSideEncryption()) { + final ObjectMetadata md = new ObjectMetadata(); + md.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); + initRequest.setObjectMetadata(md); + } + + uploadId.set(blobStore.client().initiateMultipartUpload(initRequest).getUploadId()); + if (Strings.isEmpty(uploadId.get())) { + throw new IOException("Failed to initialize multipart upload " + blobName); + } + + final List parts = new ArrayList<>(); + + long bytesCount = 0; + for (int i = 1; i <= nbParts; i++) { + final UploadPartRequest uploadRequest = new UploadPartRequest(); + uploadRequest.setBucketName(bucketName); + uploadRequest.setKey(blobName); + uploadRequest.setUploadId(uploadId.get()); + uploadRequest.setPartNumber(i); + uploadRequest.setInputStream(input); + + if (i < nbParts) { + uploadRequest.setPartSize(partSize); + uploadRequest.setLastPart(false); + } else { + uploadRequest.setPartSize(lastPartSize); + uploadRequest.setLastPart(true); + } + bytesCount += uploadRequest.getPartSize(); + + final UploadPartResult uploadResponse = blobStore.client().uploadPart(uploadRequest); + parts.add(uploadResponse.getPartETag()); + } + + if (bytesCount != blobSize) { + throw new IOException("Failed to execute multipart upload for [" + blobName + "], expected " + blobSize + + "bytes sent but got " + bytesCount); + } + + CompleteMultipartUploadRequest complRequest = new CompleteMultipartUploadRequest(bucketName, blobName, uploadId.get(), parts); + blobStore.client().completeMultipartUpload(complRequest); + success = true; + + } catch (AmazonClientException e) { + throw new IOException("Unable to upload object [" + blobName + "] using multipart upload", e); + } finally { + if (success == false && Strings.hasLength(uploadId.get())) { + final AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest(bucketName, blobName, uploadId.get()); + blobStore.client().abortMultipartUpload(abortRequest); + } + } + } + + /** + * Returns the number parts of size of {@code partSize} needed to reach {@code totalSize}, + * along with the size of the last (or unique) part. + * + * @param totalSize the total size + * @param partSize the part size + * @return a {@link Tuple} containing the number of parts to fill {@code totalSize} and + * the size of the last part + */ + static Tuple numberOfMultiparts(final long totalSize, final long partSize) { + if (partSize <= 0) { + throw new IllegalArgumentException("Part size must be greater than zero"); + } + + if (totalSize == 0L || totalSize <= partSize) { + return Tuple.tuple(1L, totalSize); + } + + final long parts = totalSize / partSize; + final long remaining = totalSize % partSize; + + if (remaining == 0) { + return Tuple.tuple(parts, partSize); + } else { + return Tuple.tuple(parts + 1, remaining); + } + } } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java index d951b31c07d67..27349f12135ed 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3BlobStore.java @@ -93,8 +93,8 @@ public boolean serverSideEncryption() { return serverSideEncryption; } - public int bufferSizeInBytes() { - return bufferSize.bytesAsInt(); + public long bufferSizeInBytes() { + return bufferSize.getBytes(); } @Override diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3OutputStream.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3OutputStream.java deleted file mode 100644 index 46c9108f1b585..0000000000000 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3OutputStream.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.repositories.s3; - -import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.common.unit.ByteSizeValue; - -import java.io.IOException; -import java.io.OutputStream; - -/** - * S3OutputStream buffers data before flushing it to an underlying S3OutputStream. - */ -abstract class S3OutputStream extends OutputStream { - - /** - * Limit of upload allowed by AWS S3. - */ - protected static final ByteSizeValue MULTIPART_MAX_SIZE = new ByteSizeValue(5, ByteSizeUnit.GB); - protected static final ByteSizeValue MULTIPART_MIN_SIZE = new ByteSizeValue(5, ByteSizeUnit.MB); - - private S3BlobStore blobStore; - private String bucketName; - private String blobName; - private boolean serverSideEncryption; - - private byte[] buffer; - private int count; - private long length; - - private int flushCount = 0; - - S3OutputStream(S3BlobStore blobStore, String bucketName, String blobName, int bufferSizeInBytes, boolean serverSideEncryption) { - this.blobStore = blobStore; - this.bucketName = bucketName; - this.blobName = blobName; - this.serverSideEncryption = serverSideEncryption; - - if (bufferSizeInBytes < MULTIPART_MIN_SIZE.getBytes()) { - throw new IllegalArgumentException("Buffer size can't be smaller than " + MULTIPART_MIN_SIZE); - } - if (bufferSizeInBytes > MULTIPART_MAX_SIZE.getBytes()) { - throw new IllegalArgumentException("Buffer size can't be larger than " + MULTIPART_MAX_SIZE); - } - - this.buffer = new byte[bufferSizeInBytes]; - } - - public abstract void flush(byte[] bytes, int off, int len, boolean closing) throws IOException; - - private void flushBuffer(boolean closing) throws IOException { - flush(buffer, 0, count, closing); - flushCount++; - count = 0; - } - - @Override - public void write(int b) throws IOException { - if (count >= buffer.length) { - flushBuffer(false); - } - - buffer[count++] = (byte) b; - length++; - } - - @Override - public void close() throws IOException { - if (count > 0) { - flushBuffer(true); - count = 0; - } - } - - public S3BlobStore getBlobStore() { - return blobStore; - } - - public String getBucketName() { - return bucketName; - } - - public String getBlobName() { - return blobName; - } - - public int getBufferSize() { - return buffer.length; - } - - public boolean isServerSideEncryption() { - return serverSideEncryption; - } - - public long getLength() { - return length; - } - - public int getFlushCount() { - return flushCount; - } -} diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index eeca906ff4998..51bb6f2024cd4 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -19,8 +19,6 @@ package org.elasticsearch.repositories.s3; -import java.io.IOException; - import com.amazonaws.services.s3.AmazonS3; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; @@ -37,6 +35,8 @@ import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; +import java.io.IOException; + /** * Shared file system implementation of the BlobStoreRepository *

@@ -80,14 +80,36 @@ class S3Repository extends BlobStoreRepository { */ static final Setting SERVER_SIDE_ENCRYPTION_SETTING = Setting.boolSetting("server_side_encryption", false); + /** + * Maximum size of files that can be uploaded using a single upload request. + */ + static final ByteSizeValue MAX_FILE_SIZE = new ByteSizeValue(5, ByteSizeUnit.GB); + + /** + * Minimum size of parts that can be uploaded using the Multipart Upload API. + * (see http://docs.aws.amazon.com/AmazonS3/latest/dev/qfacts.html) + */ + static final ByteSizeValue MIN_PART_SIZE_USING_MULTIPART = new ByteSizeValue(5, ByteSizeUnit.MB); + + /** + * Maximum size of parts that can be uploaded using the Multipart Upload API. + * (see http://docs.aws.amazon.com/AmazonS3/latest/dev/qfacts.html) + */ + static final ByteSizeValue MAX_PART_SIZE_USING_MULTIPART = MAX_FILE_SIZE; + + /** + * Maximum size of files that can be uploaded using the Multipart Upload API. + */ + static final ByteSizeValue MAX_FILE_SIZE_USING_MULTIPART = new ByteSizeValue(5, ByteSizeUnit.TB); + /** * Minimum threshold below which the chunk is uploaded using a single request. Beyond this threshold, * the S3 repository will use the AWS Multipart Upload API to split the chunk into several parts, each of buffer_size length, and * to upload each part in its own request. Note that setting a buffer size lower than 5mb is not allowed since it will prevents the * use of the Multipart API and may result in upload errors. Defaults to the minimum between 100MB and 5% of the heap size. */ - static final Setting BUFFER_SIZE_SETTING = Setting.byteSizeSetting("buffer_size", DEFAULT_BUFFER_SIZE, - new ByteSizeValue(5, ByteSizeUnit.MB), new ByteSizeValue(5, ByteSizeUnit.TB)); + static final Setting BUFFER_SIZE_SETTING = + Setting.byteSizeSetting("buffer_size", DEFAULT_BUFFER_SIZE, MIN_PART_SIZE_USING_MULTIPART, MAX_PART_SIZE_USING_MULTIPART); /** * Big files can be broken down into chunks during snapshotting if needed. Defaults to 1g. diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockDefaultS3OutputStream.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockDefaultS3OutputStream.java deleted file mode 100644 index 3a48b70e307c3..0000000000000 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/MockDefaultS3OutputStream.java +++ /dev/null @@ -1,101 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.repositories.s3; - -import com.amazonaws.services.s3.model.AmazonS3Exception; -import com.amazonaws.services.s3.model.PartETag; -import com.carrotsearch.randomizedtesting.RandomizedTest; -import org.elasticsearch.common.io.Streams; -import org.elasticsearch.repositories.s3.DefaultS3OutputStream; -import org.elasticsearch.repositories.s3.S3BlobStore; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.util.List; - -public class MockDefaultS3OutputStream extends DefaultS3OutputStream { - - private ByteArrayOutputStream out = new ByteArrayOutputStream(); - - private boolean initialized = false; - private boolean completed = false; - private boolean aborted = false; - - private int numberOfUploadRequests = 0; - - public MockDefaultS3OutputStream(int bufferSizeInBytes) { - super(null, "test-bucket", "test-blobname", bufferSizeInBytes, false); - } - - @Override - protected void doUpload(S3BlobStore blobStore, String bucketName, String blobName, InputStream is, int length, boolean serverSideEncryption) throws AmazonS3Exception { - try { - long copied = Streams.copy(is, out); - if (copied != length) { - throw new AmazonS3Exception("Not all the bytes were copied"); - } - numberOfUploadRequests++; - } catch (IOException e) { - throw new AmazonS3Exception(e.getMessage()); - } - } - - @Override - protected String doInitialize(S3BlobStore blobStore, String bucketName, String blobName, boolean serverSideEncryption) { - initialized = true; - return RandomizedTest.randomAsciiOfLength(50); - } - - @Override - protected PartETag doUploadMultipart(S3BlobStore blobStore, String bucketName, String blobName, String uploadId, InputStream is, int length, boolean lastPart) throws AmazonS3Exception { - try { - long copied = Streams.copy(is, out); - if (copied != length) { - throw new AmazonS3Exception("Not all the bytes were copied"); - } - return new PartETag(numberOfUploadRequests++, RandomizedTest.randomAsciiOfLength(50)); - } catch (IOException e) { - throw new AmazonS3Exception(e.getMessage()); - } - } - - @Override - protected void doCompleteMultipart(S3BlobStore blobStore, String bucketName, String blobName, String uploadId, List parts) throws AmazonS3Exception { - completed = true; - } - - @Override - protected void doAbortMultipart(S3BlobStore blobStore, String bucketName, String blobName, String uploadId) throws AmazonS3Exception { - aborted = true; - } - - public int getNumberOfUploadRequests() { - return numberOfUploadRequests; - } - - public boolean isMultipart() { - return (numberOfUploadRequests > 1) && initialized && completed && !aborted; - } - - public byte[] toByteArray() { - return out.toByteArray(); - } -} diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java index 45ffac30aa7fb..5b80cf6c74555 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreContainerTests.java @@ -19,10 +19,24 @@ package org.elasticsearch.repositories.s3; -import org.apache.logging.log4j.Level; -import org.apache.logging.log4j.Logger; +import com.amazonaws.AmazonClientException; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.model.AbortMultipartUploadRequest; +import com.amazonaws.services.s3.model.CannedAccessControlList; +import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest; +import com.amazonaws.services.s3.model.CompleteMultipartUploadResult; +import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; +import com.amazonaws.services.s3.model.InitiateMultipartUploadResult; +import com.amazonaws.services.s3.model.ObjectMetadata; +import com.amazonaws.services.s3.model.PartETag; +import com.amazonaws.services.s3.model.PutObjectRequest; +import com.amazonaws.services.s3.model.PutObjectResult; +import com.amazonaws.services.s3.model.StorageClass; +import com.amazonaws.services.s3.model.UploadPartRequest; +import com.amazonaws.services.s3.model.UploadPartResult; +import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; -import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -30,15 +44,28 @@ import org.elasticsearch.repositories.ESBlobStoreContainerTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; +import org.mockito.ArgumentCaptor; +import java.io.ByteArrayInputStream; import java.io.IOException; import java.net.InetAddress; import java.net.ServerSocket; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import java.util.Locale; +import java.util.stream.Collectors; +import java.util.stream.IntStream; -public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase { +import static org.hamcrest.Matchers.instanceOf; +import static org.mockito.Matchers.any; +import static org.mockito.Mockito.doNothing; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; - private static final Logger logger = Loggers.getLogger(S3BlobStoreContainerTests.class); +public class S3BlobStoreContainerTests extends ESBlobStoreContainerTestCase { private static ServerSocket mockS3ServerSocket; @@ -69,6 +96,329 @@ protected BlobStore newBlobStore() throws IOException { new ByteSizeValue(10, ByteSizeUnit.MB), "public-read-write", "standard"); } + public void testExecuteSingleUploadBlobSizeTooLarge() throws IOException { + final long blobSize = ByteSizeUnit.GB.toBytes(randomIntBetween(6, 10)); + final S3BlobStore blobStore = mock(S3BlobStore.class); + final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + blobContainer.executeSingleUpload(blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize)); + assertEquals("Upload request size [" + blobSize + "] can't be larger than 5gb", e.getMessage()); + } + + public void testExecuteSingleUploadBlobSizeLargerThanBufferSize() throws IOException { + final S3BlobStore blobStore = mock(S3BlobStore.class); + when(blobStore.bufferSizeInBytes()).thenReturn(ByteSizeUnit.MB.toBytes(1)); + + final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); + final String blobName = randomAlphaOfLengthBetween(1, 10); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + blobContainer.executeSingleUpload(blobStore, blobName, new ByteArrayInputStream(new byte[0]), ByteSizeUnit.MB.toBytes(2))); + assertEquals("Upload request size [2097152] can't be larger than buffer size", e.getMessage()); + } + + public void testExecuteSingleUpload() throws IOException { + final String bucketName = randomAlphaOfLengthBetween(1, 10); + final String blobName = randomAlphaOfLengthBetween(1, 10); + + final BlobPath blobPath = new BlobPath(); + if (randomBoolean()) { + IntStream.of(randomIntBetween(1, 5)).forEach(value -> blobPath.add("path_" + value)); + } + + final int bufferSize = randomIntBetween(1024, 2048); + final int blobSize = randomIntBetween(0, bufferSize); + + final S3BlobStore blobStore = mock(S3BlobStore.class); + when(blobStore.bucket()).thenReturn(bucketName); + when(blobStore.bufferSizeInBytes()).thenReturn((long) bufferSize); + + final S3BlobContainer blobContainer = new S3BlobContainer(blobPath, blobStore); + + final boolean serverSideEncryption = randomBoolean(); + when(blobStore.serverSideEncryption()).thenReturn(serverSideEncryption); + + final StorageClass storageClass = randomFrom(StorageClass.values()); + when(blobStore.getStorageClass()).thenReturn(storageClass); + + final CannedAccessControlList cannedAccessControlList = randomBoolean() ? randomFrom(CannedAccessControlList.values()) : null; + if (cannedAccessControlList != null) { + when(blobStore.getCannedACL()).thenReturn(cannedAccessControlList); + } + + final AmazonS3 client = mock(AmazonS3.class); + when(blobStore.client()).thenReturn(client); + + final ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(PutObjectRequest.class); + when(client.putObject(argumentCaptor.capture())).thenReturn(new PutObjectResult()); + + final ByteArrayInputStream inputStream = new ByteArrayInputStream(new byte[blobSize]); + blobContainer.executeSingleUpload(blobStore, blobName, inputStream, blobSize); + + final PutObjectRequest request = argumentCaptor.getValue(); + assertEquals(bucketName, request.getBucketName()); + assertEquals(blobPath.buildAsString() + blobName, request.getKey()); + assertEquals(inputStream, request.getInputStream()); + assertEquals(blobSize, request.getMetadata().getContentLength()); + assertEquals(storageClass.toString(), request.getStorageClass()); + assertEquals(cannedAccessControlList, request.getCannedAcl()); + if (serverSideEncryption) { + assertEquals(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION, request.getMetadata().getSSEAlgorithm()); + } + } + + public void testExecuteMultipartUploadBlobSizeTooLarge() throws IOException { + final long blobSize = ByteSizeUnit.TB.toBytes(randomIntBetween(6, 10)); + final S3BlobStore blobStore = mock(S3BlobStore.class); + final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + blobContainer.executeMultipartUpload(blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize) + ); + assertEquals("Multipart upload request size [" + blobSize + "] can't be larger than 5tb", e.getMessage()); + } + + public void testExecuteMultipartUploadBlobSizeTooSmall() throws IOException { + final long blobSize = ByteSizeUnit.MB.toBytes(randomIntBetween(1, 4)); + final S3BlobStore blobStore = mock(S3BlobStore.class); + final S3BlobContainer blobContainer = new S3BlobContainer(mock(BlobPath.class), blobStore); + + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + blobContainer.executeMultipartUpload(blobStore, randomAlphaOfLengthBetween(1, 10), null, blobSize) + ); + assertEquals("Multipart upload request size [" + blobSize + "] can't be smaller than 5mb", e.getMessage()); + } + + public void testExecuteMultipartUpload() throws IOException { + final String bucketName = randomAlphaOfLengthBetween(1, 10); + final String blobName = randomAlphaOfLengthBetween(1, 10); + + final BlobPath blobPath = new BlobPath(); + if (randomBoolean()) { + IntStream.of(randomIntBetween(1, 5)).forEach(value -> blobPath.add("path_" + value)); + } + + final long blobSize = ByteSizeUnit.GB.toBytes(randomIntBetween(1, 1024)); + final long bufferSize = ByteSizeUnit.MB.toBytes(randomIntBetween(5, 1024)); + + final S3BlobStore blobStore = mock(S3BlobStore.class); + when(blobStore.bucket()).thenReturn(bucketName); + when(blobStore.bufferSizeInBytes()).thenReturn(bufferSize); + + final boolean serverSideEncryption = randomBoolean(); + when(blobStore.serverSideEncryption()).thenReturn(serverSideEncryption); + + final StorageClass storageClass = randomFrom(StorageClass.values()); + when(blobStore.getStorageClass()).thenReturn(storageClass); + + final CannedAccessControlList cannedAccessControlList = randomBoolean() ? randomFrom(CannedAccessControlList.values()) : null; + if (cannedAccessControlList != null) { + when(blobStore.getCannedACL()).thenReturn(cannedAccessControlList); + } + + final AmazonS3 client = mock(AmazonS3.class); + when(blobStore.client()).thenReturn(client); + + final ArgumentCaptor initArgCaptor = ArgumentCaptor.forClass(InitiateMultipartUploadRequest.class); + final InitiateMultipartUploadResult initResult = new InitiateMultipartUploadResult(); + initResult.setUploadId(randomAlphaOfLength(10)); + when(client.initiateMultipartUpload(initArgCaptor.capture())).thenReturn(initResult); + + final ArgumentCaptor uploadArgCaptor = ArgumentCaptor.forClass(UploadPartRequest.class); + + final List expectedEtags = new ArrayList<>(); + long partSize = Math.min(bufferSize, blobSize); + long totalBytes = 0; + do { + expectedEtags.add(randomAlphaOfLength(50)); + totalBytes += partSize; + } while (totalBytes < blobSize); + + when(client.uploadPart(uploadArgCaptor.capture())).thenAnswer(invocationOnMock -> { + final UploadPartRequest request = (UploadPartRequest) invocationOnMock.getArguments()[0]; + final UploadPartResult response = new UploadPartResult(); + response.setPartNumber(request.getPartNumber()); + response.setETag(expectedEtags.get(request.getPartNumber() - 1)); + return response; + }); + + final ArgumentCaptor compArgCaptor = ArgumentCaptor.forClass(CompleteMultipartUploadRequest.class); + when(client.completeMultipartUpload(compArgCaptor.capture())).thenReturn(new CompleteMultipartUploadResult()); + + final ByteArrayInputStream inputStream = new ByteArrayInputStream(new byte[0]); + final S3BlobContainer blobContainer = new S3BlobContainer(blobPath, blobStore); + blobContainer.executeMultipartUpload(blobStore, blobName, inputStream, blobSize); + + final InitiateMultipartUploadRequest initRequest = initArgCaptor.getValue(); + assertEquals(bucketName, initRequest.getBucketName()); + assertEquals(blobPath.buildAsString() + blobName, initRequest.getKey()); + assertEquals(storageClass, initRequest.getStorageClass()); + assertEquals(cannedAccessControlList, initRequest.getCannedACL()); + if (serverSideEncryption) { + assertEquals(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION, initRequest.getObjectMetadata().getSSEAlgorithm()); + } + + final Tuple numberOfParts = S3BlobContainer.numberOfMultiparts(blobSize, bufferSize); + + final List uploadRequests = uploadArgCaptor.getAllValues(); + assertEquals(numberOfParts.v1().intValue(), uploadRequests.size()); + + for (int i = 0; i < uploadRequests.size(); i++) { + UploadPartRequest uploadRequest = uploadRequests.get(i); + + assertEquals(bucketName, uploadRequest.getBucketName()); + assertEquals(blobPath.buildAsString() + blobName, uploadRequest.getKey()); + assertEquals(initResult.getUploadId(), uploadRequest.getUploadId()); + assertEquals(i + 1, uploadRequest.getPartNumber()); + assertEquals(inputStream, uploadRequest.getInputStream()); + + if (i == (uploadRequests.size() -1)) { + assertTrue(uploadRequest.isLastPart()); + assertEquals(numberOfParts.v2().longValue(), uploadRequest.getPartSize()); + } else { + assertFalse(uploadRequest.isLastPart()); + assertEquals(bufferSize, uploadRequest.getPartSize()); + } + } + + final CompleteMultipartUploadRequest compRequest = compArgCaptor.getValue(); + assertEquals(bucketName, compRequest.getBucketName()); + assertEquals(blobPath.buildAsString() + blobName, compRequest.getKey()); + assertEquals(initResult.getUploadId(), compRequest.getUploadId()); + + List actualETags = compRequest.getPartETags().stream().map(PartETag::getETag).collect(Collectors.toList()); + assertEquals(expectedEtags, actualETags); + } + + public void testExecuteMultipartUploadAborted() throws IOException { + final String bucketName = randomAlphaOfLengthBetween(1, 10); + final String blobName = randomAlphaOfLengthBetween(1, 10); + final BlobPath blobPath = new BlobPath(); + + final long blobSize = ByteSizeUnit.MB.toBytes(765); + final long bufferSize = ByteSizeUnit.MB.toBytes(150); + + final S3BlobStore blobStore = mock(S3BlobStore.class); + when(blobStore.bucket()).thenReturn(bucketName); + when(blobStore.bufferSizeInBytes()).thenReturn(bufferSize); + when(blobStore.getStorageClass()).thenReturn(randomFrom(StorageClass.values())); + + final AmazonS3 client = mock(AmazonS3.class); + when(blobStore.client()).thenReturn(client); + + final String uploadId = randomAlphaOfLength(25); + + final int stage = randomInt(2); + final List exceptions = Arrays.asList( + new AmazonClientException("Expected initialization request to fail"), + new AmazonClientException("Expected upload part request to fail"), + new AmazonClientException("Expected completion request to fail") + ); + + if (stage == 0) { + // Fail the initialization request + when(client.initiateMultipartUpload(any(InitiateMultipartUploadRequest.class))) + .thenThrow(exceptions.get(stage)); + + } else if (stage == 1) { + final InitiateMultipartUploadResult initResult = new InitiateMultipartUploadResult(); + initResult.setUploadId(uploadId); + when(client.initiateMultipartUpload(any(InitiateMultipartUploadRequest.class))).thenReturn(initResult); + + // Fail the upload part request + when(client.uploadPart(any(UploadPartRequest.class))) + .thenThrow(exceptions.get(stage)); + + } else { + final InitiateMultipartUploadResult initResult = new InitiateMultipartUploadResult(); + initResult.setUploadId(uploadId); + when(client.initiateMultipartUpload(any(InitiateMultipartUploadRequest.class))).thenReturn(initResult); + + when(client.uploadPart(any(UploadPartRequest.class))).thenAnswer(invocationOnMock -> { + final UploadPartRequest request = (UploadPartRequest) invocationOnMock.getArguments()[0]; + final UploadPartResult response = new UploadPartResult(); + response.setPartNumber(request.getPartNumber()); + response.setETag(randomAlphaOfLength(20)); + return response; + }); + + // Fail the completion request + when(client.completeMultipartUpload(any(CompleteMultipartUploadRequest.class))) + .thenThrow(exceptions.get(stage)); + } + + final ArgumentCaptor argumentCaptor = ArgumentCaptor.forClass(AbortMultipartUploadRequest.class); + doNothing().when(client).abortMultipartUpload(argumentCaptor.capture()); + + final IOException e = expectThrows(IOException.class, () -> { + final S3BlobContainer blobContainer = new S3BlobContainer(blobPath, blobStore); + blobContainer.executeMultipartUpload(blobStore, blobName, new ByteArrayInputStream(new byte[0]), blobSize); + }); + + assertEquals("Unable to upload object [" + blobName + "] using multipart upload", e.getMessage()); + assertThat(e.getCause(), instanceOf(AmazonClientException.class)); + assertEquals(exceptions.get(stage).getMessage(), e.getCause().getMessage()); + + if (stage == 0) { + verify(client, times(1)).initiateMultipartUpload(any(InitiateMultipartUploadRequest.class)); + verify(client, times(0)).uploadPart(any(UploadPartRequest.class)); + verify(client, times(0)).completeMultipartUpload(any(CompleteMultipartUploadRequest.class)); + verify(client, times(0)).abortMultipartUpload(any(AbortMultipartUploadRequest.class)); + + } else { + verify(client, times(1)).initiateMultipartUpload(any(InitiateMultipartUploadRequest.class)); + + if (stage == 1) { + verify(client, times(1)).uploadPart(any(UploadPartRequest.class)); + verify(client, times(0)).completeMultipartUpload(any(CompleteMultipartUploadRequest.class)); + } else { + verify(client, times(6)).uploadPart(any(UploadPartRequest.class)); + verify(client, times(1)).completeMultipartUpload(any(CompleteMultipartUploadRequest.class)); + } + + verify(client, times(1)).abortMultipartUpload(any(AbortMultipartUploadRequest.class)); + + final AbortMultipartUploadRequest abortRequest = argumentCaptor.getValue(); + assertEquals(bucketName, abortRequest.getBucketName()); + assertEquals(blobName, abortRequest.getKey()); + assertEquals(uploadId, abortRequest.getUploadId()); + } + } + + public void testNumberOfMultipartsWithZeroPartSize() { + IllegalArgumentException e = + expectThrows(IllegalArgumentException.class, () -> S3BlobContainer.numberOfMultiparts(randomNonNegativeLong(), 0L)); + assertEquals("Part size must be greater than zero", e.getMessage()); + } + + public void testNumberOfMultiparts() { + final ByteSizeUnit unit = randomFrom(ByteSizeUnit.BYTES, ByteSizeUnit.KB, ByteSizeUnit.MB, ByteSizeUnit.GB); + final long size = unit.toBytes(randomIntBetween(2, 1000)); + final int factor = randomIntBetween(2, 10); + + // Fits in 1 empty part + assertNumberOfMultiparts(1, 0L, 0L, size); + + // Fits in 1 part exactly + assertNumberOfMultiparts(1, size, size, size); + assertNumberOfMultiparts(1, size, size, size * factor); + + // Fits in N parts exactly + assertNumberOfMultiparts(factor, size, size * factor, size); + + // Fits in N parts plus a bit more + final long remaining = randomIntBetween(1, (size > Integer.MAX_VALUE) ? Integer.MAX_VALUE : (int) size - 1); + assertNumberOfMultiparts(factor + 1, remaining, size * factor + remaining, size); + } + + private static void assertNumberOfMultiparts(final int expectedParts, final long expectedRemaining, long totalSize, long partSize) { + final Tuple result = S3BlobContainer.numberOfMultiparts(totalSize, partSize); + + assertEquals("Expected number of parts [" + expectedParts + "] but got [" + result.v1() + "]", expectedParts, (long) result.v1()); + assertEquals("Expected remaining [" + expectedRemaining + "] but got [" + result.v2() + "]", expectedRemaining, (long) result.v2()); + } + @AfterClass public static void closeMockSocket() throws IOException, InterruptedException { mockS3ServerSocket.close(); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3OutputStreamTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3OutputStreamTests.java deleted file mode 100644 index 8f4c7daea7edf..0000000000000 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3OutputStreamTests.java +++ /dev/null @@ -1,143 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.repositories.s3; - -import org.elasticsearch.common.unit.ByteSizeUnit; -import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.test.ESTestCase; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.util.Arrays; - -import static org.elasticsearch.common.io.Streams.copy; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; - -/** - * Unit test for {@link S3OutputStream}. - */ -public class S3OutputStreamTests extends ESTestCase { - private static final int BUFFER_SIZE = new ByteSizeValue(5, ByteSizeUnit.MB).bytesAsInt(); - - public void testWriteLessDataThanBufferSize() throws IOException { - MockDefaultS3OutputStream out = newS3OutputStream(BUFFER_SIZE); - byte[] content = randomUnicodeOfLengthBetween(1, 512).getBytes("UTF-8"); - copy(content, out); - - // Checks length & content - assertThat(out.getLength(), equalTo((long) content.length)); - assertThat(Arrays.equals(content, out.toByteArray()), equalTo(true)); - - // Checks single/multi part upload - assertThat(out.getBufferSize(), equalTo(BUFFER_SIZE)); - assertThat(out.getFlushCount(), equalTo(1)); - assertThat(out.getNumberOfUploadRequests(), equalTo(1)); - assertFalse(out.isMultipart()); - - } - - public void testWriteSameDataThanBufferSize() throws IOException { - int size = randomIntBetween(BUFFER_SIZE, 2 * BUFFER_SIZE); - MockDefaultS3OutputStream out = newS3OutputStream(size); - - ByteArrayOutputStream content = new ByteArrayOutputStream(size); - for (int i = 0; i < size; i++) { - content.write(randomByte()); - } - copy(content.toByteArray(), out); - - // Checks length & content - assertThat(out.getLength(), equalTo((long) size)); - assertThat(Arrays.equals(content.toByteArray(), out.toByteArray()), equalTo(true)); - - // Checks single/multi part upload - assertThat(out.getBufferSize(), equalTo(size)); - assertThat(out.getFlushCount(), equalTo(1)); - assertThat(out.getNumberOfUploadRequests(), equalTo(1)); - assertFalse(out.isMultipart()); - - } - - public void testWriteExactlyNTimesMoreDataThanBufferSize() throws IOException { - int n = randomIntBetween(2, 3); - int length = n * BUFFER_SIZE; - ByteArrayOutputStream content = new ByteArrayOutputStream(length); - - for (int i = 0; i < length; i++) { - content.write(randomByte()); - } - - MockDefaultS3OutputStream out = newS3OutputStream(BUFFER_SIZE); - copy(content.toByteArray(), out); - - // Checks length & content - assertThat(out.getLength(), equalTo((long) length)); - assertThat(Arrays.equals(content.toByteArray(), out.toByteArray()), equalTo(true)); - - // Checks single/multi part upload - assertThat(out.getBufferSize(), equalTo(BUFFER_SIZE)); - assertThat(out.getFlushCount(), equalTo(n)); - - assertThat(out.getNumberOfUploadRequests(), equalTo(n)); - assertTrue(out.isMultipart()); - } - - public void testWriteRandomNumberOfBytes() throws IOException { - Integer randomBufferSize = randomIntBetween(BUFFER_SIZE, 2 * BUFFER_SIZE); - MockDefaultS3OutputStream out = newS3OutputStream(randomBufferSize); - - Integer randomLength = randomIntBetween(1, 2 * BUFFER_SIZE); - ByteArrayOutputStream content = new ByteArrayOutputStream(randomLength); - for (int i = 0; i < randomLength; i++) { - content.write(randomByte()); - } - - copy(content.toByteArray(), out); - - // Checks length & content - assertThat(out.getLength(), equalTo((long) randomLength)); - assertThat(Arrays.equals(content.toByteArray(), out.toByteArray()), equalTo(true)); - - assertThat(out.getBufferSize(), equalTo(randomBufferSize)); - int times = (int) Math.ceil(randomLength.doubleValue() / randomBufferSize.doubleValue()); - assertThat(out.getFlushCount(), equalTo(times)); - if (times > 1) { - assertTrue(out.isMultipart()); - } else { - assertFalse(out.isMultipart()); - } - } - - public void testWrongBufferSize() throws IOException { - Integer randomBufferSize = randomIntBetween(1, 4 * 1024 * 1024); - try { - newS3OutputStream(randomBufferSize); - fail("Buffer size can't be smaller than 5mb"); - } catch (IllegalArgumentException e) { - assertThat(e.getMessage(), is("Buffer size can't be smaller than 5mb")); - } - } - - private MockDefaultS3OutputStream newS3OutputStream(int bufferSizeInBytes) { - return new MockDefaultS3OutputStream(bufferSizeInBytes); - } - -} diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml index b0d6a23cc4095..0810341db1317 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml @@ -86,3 +86,46 @@ ingest.get_pipeline: id: "my_pipeline" - match: { my_pipeline.description: "_description" } + +--- +"Use the percolate query in mixed cluster": + - do: + search: + index: queries + body: + query: + percolate: + field: query + document: + field1: value + - match: { hits.total: 1 } + - match: { hits.hits.0._id: q1 } + + - do: + search: + index: queries + body: + sort: _id + query: + percolate: + field: query + document: + field1: value + field2: value + - match: { hits.total: 2 } + - match: { hits.hits.0._id: q1 } + - match: { hits.hits.1._id: q2 } + + - do: + search: + index: queries + body: + query: + percolate: + field: query + type: doc + document: + field2: value + field3: value + - match: { hits.total: 1 } + - match: { hits.hits.0._id: q3 } diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml index 7f2c24e23307b..b24025f356c3f 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/old_cluster/10_basic.yml @@ -100,3 +100,105 @@ params: f1: v5_old - match: { hits.total: 1 } + +--- +"Index percolator queries and use the percolate query in old cluster": + - do: + indices.create: + index: queries + body: + mappings: + doc: + properties: + query: + type: percolator + field1: + type: keyword + field2: + type: keyword + field3: + type: keyword + + - do: + index: + index: queries + type: doc + id: q1 + body: + query: + term: + field1: value + + - do: + index: + index: queries + type: doc + id: q2 + body: + query: + bool: + must: + - term: + field1: value + - term: + field2: value + + - do: + index: + index: queries + type: doc + id: q3 + body: + query: + bool: + minimum_should_match: 2 + should: + - term: + field2: value + - term: + field3: value + + - do: + indices.refresh: + index: queries + + - do: + search: + index: queries + body: + query: + percolate: + field: query + document: + field1: value + - match: { hits.total: 1 } + - match: { hits.hits.0._id: q1 } + + - do: + search: + index: queries + body: + sort: _id + query: + percolate: + field: query + document: + field1: value + field2: value + - match: { hits.total: 2 } + - match: { hits.hits.0._id: q1 } + - match: { hits.hits.1._id: q2 } + + - do: + search: + index: queries + body: + sort: _id + query: + percolate: + field: query + document: + field2: value + field3: value + - match: { hits.total: 1 } + - match: { hits.hits.0._id: q3 } diff --git a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml index a3608b0fdedd0..cdc94f638b5f5 100644 --- a/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml +++ b/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/upgraded_cluster/10_basic.yml @@ -66,3 +66,62 @@ ingest.get_pipeline: id: "my_pipeline" - match: { my_pipeline.description: "_description" } + +--- +"Index percolator query and use the percolate query in upgraded cluster": + - do: + index: + index: queries + type: doc + id: q4 + refresh: true + body: + query: + bool: + minimum_should_match: 2 + should: + - term: + field1: value + - term: + field2: value + + - do: + search: + index: queries + body: + query: + percolate: + field: query + document: + field1: value + - match: { hits.total: 1 } + - match: { hits.hits.0._id: q1 } + + - do: + search: + index: queries + body: + sort: _id + query: + percolate: + field: query + document: + field1: value + field2: value + - match: { hits.total: 3 } + - match: { hits.hits.0._id: q1 } + - match: { hits.hits.1._id: q2 } + - match: { hits.hits.2._id: q4 } + + - do: + search: + index: queries + body: + query: + percolate: + field: query + document: + field2: value + field3: value + - match: { hits.total: 1 } + - match: { hits.hits.0._id: q3 } diff --git a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash index 91c06974266fa..7b5f3a9962c26 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash +++ b/qa/vagrant/src/test/resources/packaging/tests/module_and_plugin_test_cases.bash @@ -206,10 +206,10 @@ fi } @test "[$GROUP] install ingest-attachment plugin" { - # we specify the version on the poi-3.15.jar so that the test does + # we specify the version on the poi-3.16.jar so that the test does # not spuriously pass if the jar is missing but the other poi jars # are present - install_and_check_plugin ingest attachment bcprov-jdk15on-*.jar tika-core-*.jar pdfbox-*.jar poi-3.15.jar poi-ooxml-3.15.jar poi-ooxml-schemas-*.jar poi-scratchpad-*.jar + install_and_check_plugin ingest attachment bcprov-jdk15on-*.jar tika-core-*.jar pdfbox-*.jar poi-3.16.jar poi-ooxml-3.16.jar poi-ooxml-schemas-*.jar poi-scratchpad-*.jar } @test "[$GROUP] install ingest-geoip plugin" { diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml index 0f037b890fc4b..b7fd64770d3cd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/10_basic.yml @@ -197,3 +197,20 @@ clear_scroll: scroll_id: $scroll_id +--- +"Scroll cannot used the request cache": + - skip: + version: " - 6.99.99" + reason: the error message has been added in v7.0.0 + - do: + indices.create: + index: test_scroll + - do: + catch: /\[request_cache\] cannot be used in a a scroll context/ + search: + index: test_scroll + scroll: 1m + request_cache: true + body: + query: + match_all: {} diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 9230a4eb248fc..2cd4ef94ae0e7 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -2568,7 +2568,7 @@ public void testProfileSettings() { Settings randomSettings = randomFrom(random(), globalSettings, transportSettings, profileSettings); ClusterSettings clusterSettings = new ClusterSettings(randomSettings, ClusterSettings .BUILT_IN_CLUSTER_SETTINGS); - clusterSettings.validate(randomSettings); + clusterSettings.validate(randomSettings, false); TcpTransport.ProfileSettings settings = new TcpTransport.ProfileSettings( Settings.builder().put(randomSettings).put("transport.profiles.some_profile.port", "9700-9800").build(), // port is required "some_profile");