Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable strict duplicate checks for all XContent types #22225

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions core/src/main/java/org/elasticsearch/common/xcontent/XContent.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

package org.elasticsearch.common.xcontent;

import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.bytes.BytesReference;

import java.io.IOException;
Expand All @@ -33,6 +34,27 @@
*/
public interface XContent {

/*
* NOTE: This comment is only meant for maintainers of the Elasticsearch code base and is intentionally not a Javadoc comment as it
* describes an undocumented system property.
*
*
* Determines whether the XContent parser will always check for duplicate keys. This behavior is enabled by default but
* can be disabled by setting the otherwise undocumented system property "es.xcontent.strict_duplicate_detection to "false".
*
* Before we've enabled this mode, we had custom duplicate checks in various parts of the code base. As the user can still disable this
* mode and fall back to the legacy duplicate checks, we still need to keep the custom duplicate checks around and we also need to keep
* the tests around.
*
* If this fallback via system property is removed one day in the future you can remove all tests that call this method and also remove
* the corresponding custom duplicate check code.
*
*/
static boolean isStrictDuplicateDetectionEnabled() {
// Don't allow duplicate keys in JSON content by default but let the user opt out
return Booleans.parseBooleanExact(System.getProperty("es.xcontent.strict_duplicate_detection", "true"));
}

/**
* The type this content handles and produces.
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.dataformat.cbor.CBORFactory;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesReference;
Expand Down Expand Up @@ -54,6 +55,7 @@ public static XContentBuilder contentBuilder() throws IOException {
cborFactory.configure(CBORFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); // this trips on many mappings now...
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.cbor.CBORGenerator#close() method
cborFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
cborFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, XContent.isStrictDuplicateDetectionEnabled());
cborXContent = new CborXContent();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.FastStringReader;
import org.elasticsearch.common.xcontent.XContent;
Expand All @@ -50,35 +49,14 @@ public static XContentBuilder contentBuilder() throws IOException {

public static final JsonXContent jsonXContent;

/*
* NOTE: This comment is only meant for maintainers of the Elasticsearch code base and is intentionally not a Javadoc comment as it
* describes an undocumented system property.
*
*
* Determines whether the JSON parser will always check for duplicate keys in JSON content. This behavior is enabled by default but
* can be disabled by setting the otherwise undocumented system property "es.json.strict_duplicate_detection" to "false".
*
* Before we've enabled this mode, we had custom duplicate checks in various parts of the code base. As the user can still disable this
* mode and fall back to the legacy duplicate checks, we still need to keep the custom duplicate checks around and we also need to keep
* the tests around.
*
* If this fallback via system property is removed one day in the future you can remove all tests that call this method and also remove
* the corresponding custom duplicate check code.
*
*/
public static boolean isStrictDuplicateDetectionEnabled() {
// Don't allow duplicate keys in JSON content by default but let the user opt out
return Booleans.parseBooleanExact(System.getProperty("es.json.strict_duplicate_detection", "true"));
}

static {
jsonFactory = new JsonFactory();
jsonFactory.configure(JsonGenerator.Feature.QUOTE_FIELD_NAMES, true);
jsonFactory.configure(JsonParser.Feature.ALLOW_COMMENTS, true);
jsonFactory.configure(JsonFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); // this trips on many mappings now...
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.core.json.UTF8JsonGenerator#close() method
jsonFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
jsonFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, isStrictDuplicateDetectionEnabled());
jsonFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, XContent.isStrictDuplicateDetectionEnabled());
jsonXContent = new JsonXContent();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.dataformat.smile.SmileFactory;
import com.fasterxml.jackson.dataformat.smile.SmileGenerator;
import org.elasticsearch.common.bytes.BytesReference;
Expand Down Expand Up @@ -55,6 +56,7 @@ public static XContentBuilder contentBuilder() throws IOException {
smileFactory.configure(SmileFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); // this trips on many mappings now...
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.dataformat.smile.SmileGenerator#close() method
smileFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
smileFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, XContent.isStrictDuplicateDetectionEnabled());
smileXContent = new SmileXContent();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.elasticsearch.common.xcontent.yaml;

import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesReference;
Expand Down Expand Up @@ -50,6 +51,7 @@ public static XContentBuilder contentBuilder() throws IOException {

static {
yamlFactory = new YAMLFactory();
yamlFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, XContent.isStrictDuplicateDetectionEnabled());
yamlXContent = new YamlXContent();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.test.ESTestCase;

import static org.hamcrest.CoreMatchers.containsString;
Expand All @@ -49,8 +49,8 @@ public void testSimpleJsonSettings() throws Exception {
}

public void testDuplicateKeysThrowsException() {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
XContent.isStrictDuplicateDetectionEnabled());
final String json = "{\"foo\":\"bar\",\"foo\":\"baz\"}";
final SettingsException e = expectThrows(SettingsException.class, () -> Settings.builder().loadFromSource(json).build());
assertEquals(e.getCause().getClass(), ElasticsearchParseException.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.test.ESTestCase;

import java.nio.charset.StandardCharsets;
Expand Down Expand Up @@ -68,6 +69,9 @@ public void testIndentationWithExplicitDocumentStart() throws Exception {
}

public void testDuplicateKeysThrowsException() {
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
XContent.isStrictDuplicateDetectionEnabled());

String yaml = "foo: bar\nfoo: baz";
SettingsException e = expectThrows(SettingsException.class, () -> {
Settings.builder().loadFromSource(yaml);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonGenerator;

import com.fasterxml.jackson.core.JsonParseException;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Constants;
import org.elasticsearch.cluster.metadata.IndexMetaData;
Expand Down Expand Up @@ -66,6 +67,7 @@
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;

public abstract class BaseXContentTestCase extends ESTestCase {

Expand Down Expand Up @@ -984,6 +986,22 @@ public void testSelfReferencingIterableTwoLevels() throws IOException {
assertThat(e.getMessage(), containsString("Object has already been built and is self-referencing itself"));
}

public void testChecksForDuplicates() throws Exception {
assumeTrue("Test only makes sense if XContent parser has strict duplicate checks enabled",
XContent.isStrictDuplicateDetectionEnabled());

BytesReference bytes = builder()
.startObject()
.field("key", 1)
.field("key", 2)
.endObject()
.bytes();

JsonParseException pex = expectThrows(JsonParseException.class, () -> createParser(xcontentType().xContent(), bytes).map());
assertThat(pex.getMessage(), startsWith("Duplicate field 'key'"));
}


private static void expectUnclosedException(ThrowingRunnable runnable) {
IllegalStateException e = expectThrows(IllegalStateException.class, runnable);
assertThat(e.getMessage(), containsString("Failed to close the XContentBuilder"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,8 @@ public void testMissingFirstConstructorArgButNotRequired() throws IOException {
}

public void testRepeatedConstructorParam() throws IOException {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
XContent.isStrictDuplicateDetectionEnabled());
XContentParser parser = createParser(JsonXContent.jsonXContent,
"{\n"
+ " \"vegetable\": 1,\n"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;

import com.fasterxml.jackson.core.JsonParseException;
import org.elasticsearch.common.xcontent.BaseXContentTestCase;
import org.elasticsearch.common.xcontent.XContentType;

Expand All @@ -40,13 +39,4 @@ public void testBigInteger() throws Exception {
JsonGenerator generator = new JsonFactory().createGenerator(os);
doTestBigInteger(generator, os);
}

public void testChecksForDuplicates() throws Exception {
assumeTrue("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());

JsonParseException pex = expectThrows(JsonParseException.class,
() -> XContentType.JSON.xContent().createParser("{ \"key\": 1, \"key\": 2 }").map());
assertEquals("Duplicate field 'key'", pex.getMessage());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
Expand Down Expand Up @@ -340,8 +341,8 @@ public void testUnknownQueryName() throws IOException {
* test that two queries in object throws error
*/
public void testTooManyQueriesInObject() throws IOException {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
XContent.isStrictDuplicateDetectionEnabled());
String clauseType = randomFrom("must", "should", "must_not", "filter");
// should also throw error if invalid query is preceded by a valid one
String query = "{\n" +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
Expand Down Expand Up @@ -66,8 +67,8 @@ public void testFilterElement() throws IOException {
* test that multiple "filter" elements causes {@link ParsingException}
*/
public void testMultipleFilterElements() throws IOException {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
XContent.isStrictDuplicateDetectionEnabled());
String queryString = "{ \"" + ConstantScoreQueryBuilder.NAME + "\" : {\n" +
"\"filter\" : { \"term\": { \"foo\": \"a\" } },\n" +
"\"filter\" : { \"term\": { \"foo\": \"x\" } },\n" +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.common.lucene.search.function.WeightFactorFunction;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
Expand Down Expand Up @@ -739,8 +740,8 @@ public void testMalformedQueryMultipleQueryObjects() throws IOException {
}

public void testMalformedQueryMultipleQueryElements() throws IOException {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
XContent.isStrictDuplicateDetectionEnabled());
String json = "{\n" +
" \"function_score\":{\n" +
" \"query\":{\n" +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
Expand Down Expand Up @@ -104,8 +105,8 @@ public void testTwoTypes() throws Exception {
}

public void testTwoAggs() throws Exception {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
XContent.isStrictDuplicateDetectionEnabled());
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
.startObject("by_date")
Expand Down Expand Up @@ -180,8 +181,8 @@ public void testInvalidAggregationName() throws Exception {
}

public void testSameAggregationName() throws Exception {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
assumeFalse("Test only makes sense if XContent parser doesn't have strict duplicate checks enabled",
XContent.isStrictDuplicateDetectionEnabled());
final String name = randomAsciiOfLengthBetween(1, 10);
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
Expand Down Expand Up @@ -149,7 +150,7 @@ public void testIllegalXContent() throws IOException {
"Required [field]");

// test two fieldnames
if (JsonXContent.isStrictDuplicateDetectionEnabled()) {
if (XContent.isStrictDuplicateDetectionEnabled()) {
logger.info("Skipping test as it uses a custom duplicate check that is obsolete when strict duplicate checks are enabled.");
} else {
directGenerator = "{ \"field\" : \"f1\", \"field\" : \"f2\" }";
Expand Down
6 changes: 3 additions & 3 deletions docs/reference/migration/migrate_6_0/rest.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@ This feature was removed in the 5.x series, but a backwards-compatibility layer
system property `elasticsearch.json.allow_unquoted_field_names`. This backwards-compatibility layer
has been removed in Elasticsearch 6.0.0.

==== Duplicate Keys in JSON
==== Duplicate Keys in JSON, CBOR, Yaml and Smile

In previous versions of Elasticsearch, JSON documents were allowed to contain duplicate keys. Elasticsearch 6.0.0
enforces that all keys are unique.
In previous versions of Elasticsearch, documents were allowed to contain duplicate keys. Elasticsearch 6.0.0
enforces that all keys are unique. This applies to all content types: JSON, CBOR, Yaml and Smile.

==== Analyze API changes

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,11 @@ public abstract class AbstractClientYamlTestFragmentParserTestCase extends ESTes
@After
public void tearDown() throws Exception {
super.tearDown();
//this is the way to make sure that we consumed the whole yaml
assertThat(parser.currentToken(), nullValue());
parser.close();
// test may be skipped so we did not create a parser instance
if (parser != null) {
//this is the way to make sure that we consumed the whole yaml
assertThat(parser.currentToken(), nullValue());
parser.close();
}
}
}
Loading