Enable strict duplicate checks for JSON content

With this commit we enable the Jackson feature 'STRICT_DUPLICATE_DETECTION'
by default. This ensures that JSON keys are always unique. While this has
a performance impact, benchmarking has indicated that the typical drop in
indexing throughput is around 1 - 2%.

As a last resort, we allow users to still disable strict duplicate checks
by setting `-Des.json.strict_duplicate_detection=false` which is
intentionally undocumented.

Closes #19614
This commit is contained in:
Daniel Mitterdorfer 2016-12-14 09:35:53 +01:00 committed by GitHub
parent 49bdd29f91
commit 7e5058037b
19 changed files with 358 additions and 244 deletions

View File

@ -23,6 +23,7 @@ import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.FastStringReader;
import org.elasticsearch.common.xcontent.XContent;
@ -45,10 +46,31 @@ public class JsonXContent implements XContent {
public static XContentBuilder contentBuilder() throws IOException {
return XContentBuilder.builder(jsonXContent);
}
private static final JsonFactory jsonFactory;
public static final JsonXContent jsonXContent;
/*
* NOTE: This comment is only meant for maintainers of the Elasticsearch code base and is intentionally not a Javadoc comment as it
* describes an undocumented system property.
*
*
* Determines whether the JSON parser will always check for duplicate keys in JSON content. This behavior is enabled by default but
* can be disabled by setting the otherwise undocumented system property "es.json.strict_duplicate_detection" to "false".
*
* Before we've enabled this mode, we had custom duplicate checks in various parts of the code base. As the user can still disable this
* mode and fall back to the legacy duplicate checks, we still need to keep the custom duplicate checks around and we also need to keep
* the tests around.
*
* If this fallback via system property is removed one day in the future you can remove all tests that call this method and also remove
* the corresponding custom duplicate check code.
*
*/
public static boolean isStrictDuplicateDetectionEnabled() {
// Don't allow duplicate keys in JSON content by default but let the user opt out
return Booleans.parseBooleanExact(System.getProperty("es.json.strict_duplicate_detection", "true"));
}
static {
jsonFactory = new JsonFactory();
jsonFactory.configure(JsonGenerator.Feature.QUOTE_FIELD_NAMES, true);
@ -56,6 +78,7 @@ public class JsonXContent implements XContent {
jsonFactory.configure(JsonFactory.Feature.FAIL_ON_SYMBOL_HASH_OVERFLOW, false); // this trips on many mappings now...
// Do not automatically close unclosed objects/arrays in com.fasterxml.jackson.core.json.UTF8JsonGenerator#close() method
jsonFactory.configure(JsonGenerator.Feature.AUTO_CLOSE_JSON_CONTENT, false);
jsonFactory.configure(JsonParser.Feature.STRICT_DUPLICATE_DETECTION, isStrictDuplicateDetectionEnabled());
jsonXContent = new JsonXContent();
}

View File

@ -67,7 +67,7 @@ public class FieldStatsRequestTests extends ESTestCase {
assertThat(request.getIndexConstraints()[3].getComparison(), equalTo(LTE));
assertThat(request.getIndexConstraints()[4].getField(), equalTo("field5"));
assertThat(request.getIndexConstraints()[4].getValue(), equalTo("2"));
assertThat(request.getIndexConstraints()[4].getProperty(), equalTo(MAX));
assertThat(request.getIndexConstraints()[4].getProperty(), equalTo(MIN));
assertThat(request.getIndexConstraints()[4].getComparison(), equalTo(GT));
assertThat(request.getIndexConstraints()[5].getField(), equalTo("field5"));
assertThat(request.getIndexConstraints()[5].getValue(), equalTo("9"));

View File

@ -22,6 +22,7 @@ package org.elasticsearch.common.settings.loader;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.ESTestCase;
import static org.hamcrest.CoreMatchers.containsString;
@ -48,6 +49,8 @@ public class JsonSettingsLoaderTests extends ESTestCase {
}
public void testDuplicateKeysThrowsException() {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
final String json = "{\"foo\":\"bar\",\"foo\":\"baz\"}";
final SettingsException e = expectThrows(SettingsException.class, () -> Settings.builder().loadFromSource(json).build());
assertEquals(e.getCause().getClass(), ElasticsearchParseException.class);

View File

@ -98,7 +98,7 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testMissingAllConstructorArgs() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent,
XContentParser parser = createParser(JsonXContent.jsonXContent,
"{\n"
+ " \"mineral\": 1\n"
+ "}");
@ -113,7 +113,7 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testMissingAllConstructorArgsButNotRequired() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent,
XContentParser parser = createParser(JsonXContent.jsonXContent,
"{\n"
+ " \"mineral\": 1\n"
+ "}");
@ -122,7 +122,7 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testMissingSecondConstructorArg() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent,
XContentParser parser = createParser(JsonXContent.jsonXContent,
"{\n"
+ " \"mineral\": 1,\n"
+ " \"animal\": \"cat\"\n"
@ -133,7 +133,7 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testMissingSecondConstructorArgButNotRequired() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent,
XContentParser parser = createParser(JsonXContent.jsonXContent,
"{\n"
+ " \"mineral\": 1,\n"
+ " \"animal\": \"cat\"\n"
@ -146,7 +146,7 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testMissingFirstConstructorArg() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent,
XContentParser parser = createParser(JsonXContent.jsonXContent,
"{\n"
+ " \"mineral\": 1,\n"
+ " \"vegetable\": 2\n"
@ -158,7 +158,7 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testMissingFirstConstructorArgButNotRequired() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent,
XContentParser parser = createParser(JsonXContent.jsonXContent,
"{\n"
+ " \"mineral\": 1,\n"
+ " \"vegetable\": 2\n"
@ -169,7 +169,9 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testRepeatedConstructorParam() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent,
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
XContentParser parser = createParser(JsonXContent.jsonXContent,
"{\n"
+ " \"vegetable\": 1,\n"
+ " \"vegetable\": 2\n"
@ -182,7 +184,7 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testBadParam() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent,
XContentParser parser = createParser(JsonXContent.jsonXContent,
"{\n"
+ " \"animal\": \"cat\",\n"
+ " \"vegetable\": 2,\n"
@ -196,7 +198,7 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testBadParamBeforeObjectBuilt() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent,
XContentParser parser = createParser(JsonXContent.jsonXContent,
"{\n"
+ " \"a\": \"supercalifragilisticexpialidocious\",\n"
+ " \"animal\": \"cat\"\n,"
@ -256,7 +258,7 @@ public class ConstructingObjectParserTests extends ESTestCase {
parser.declareString(ctorArgOptional ? optionalConstructorArg() : constructorArg(), new ParseField("yeah"));
// ctor arg first so we can test for the bug we found one time
XContentParser xcontent = createParser(JsonXContent.jsonXContent,
XContentParser xcontent = createParser(JsonXContent.jsonXContent,
"{\n"
+ " \"yeah\": \"!\",\n"
+ " \"foo\": \"foo\"\n"
@ -265,7 +267,7 @@ public class ConstructingObjectParserTests extends ESTestCase {
assertTrue(result.fooSet);
// and ctor arg second just in case
xcontent = createParser(JsonXContent.jsonXContent,
xcontent = createParser(JsonXContent.jsonXContent,
"{\n"
+ " \"foo\": \"foo\",\n"
+ " \"yeah\": \"!\"\n"
@ -275,7 +277,7 @@ public class ConstructingObjectParserTests extends ESTestCase {
if (ctorArgOptional) {
// and without the constructor arg if we've made it optional
xcontent = createParser(JsonXContent.jsonXContent,
xcontent = createParser(JsonXContent.jsonXContent,
"{\n"
+ " \"foo\": \"foo\"\n"
+ "}");
@ -285,7 +287,7 @@ public class ConstructingObjectParserTests extends ESTestCase {
}
public void testIgnoreUnknownFields() throws IOException {
XContentParser parser = createParser(JsonXContent.jsonXContent,
XContentParser parser = createParser(JsonXContent.jsonXContent,
"{\n"
+ " \"test\" : \"foo\",\n"
+ " \"junk\" : 2\n"

View File

@ -22,6 +22,7 @@ package org.elasticsearch.common.xcontent.json;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParseException;
import org.elasticsearch.common.xcontent.BaseXContentTestCase;
import org.elasticsearch.common.xcontent.XContentType;
@ -39,4 +40,13 @@ public class JsonXContentTests extends BaseXContentTestCase {
JsonGenerator generator = new JsonFactory().createGenerator(os);
doTestBigInteger(generator, os);
}
public void testChecksForDuplicates() throws Exception {
assumeTrue("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
JsonParseException pex = expectThrows(JsonParseException.class,
() -> XContentType.JSON.xContent().createParser("{ \"key\": 1, \"key\": 2 }").map());
assertEquals("Duplicate field 'key'", pex.getMessage());
}
}

View File

@ -83,8 +83,8 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
assertThat(copyTestMap.get("type").toString(), is("text"));
List<String> copyToList = (List<String>) copyTestMap.get("copy_to");
assertThat(copyToList.size(), equalTo(2));
assertThat(copyToList.get(0).toString(), equalTo("another_field"));
assertThat(copyToList.get(1).toString(), equalTo("cyclic_test"));
assertThat(copyToList.get(0), equalTo("another_field"));
assertThat(copyToList.get(1), equalTo("cyclic_test"));
// Check data parsing
BytesReference json = jsonBuilder().startObject()
@ -312,44 +312,43 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
public void testCopyToNestedField() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
for (boolean mapped : new boolean[] {true, false}) {
XContentBuilder mapping = jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("target")
.field("type", "long")
.field("doc_values", false)
.endObject()
.startObject("n1")
.field("type", "nested")
.startObject("properties")
.startObject("target")
.field("type", "long")
.field("doc_values", false)
XContentBuilder mapping = jsonBuilder().startObject()
.startObject("type")
.startObject("properties")
.startObject("target")
.field("type", "long")
.field("doc_values", false)
.endObject()
.startObject("n1")
.field("type", "nested")
.startObject("properties")
.startObject("target")
.field("type", "long")
.field("doc_values", false)
.endObject()
.startObject("n2")
.field("type", "nested")
.startObject("properties")
.startObject("target")
.field("type", "long")
.field("doc_values", false)
.endObject()
.startObject("source")
.field("type", "long")
.field("doc_values", false)
.startArray("copy_to")
.value("target") // should go to the root doc
.value("n1.target") // should go to the parent doc
.value("n1.n2.target") // should go to the current doc
.endArray()
.endObject()
.endObject()
.startObject("n2")
.field("type", "nested")
.startObject("properties")
.startObject("target")
.field("type", "long")
.field("doc_values", false)
.endObject()
.startObject("source")
.field("type", "long")
.field("doc_values", false)
.startArray("copy_to")
.value("target") // should go to the root doc
.value("n1.target") // should go to the parent doc
.value("n1.n2.target") // should go to the current doc
.endArray()
.endObject();
for (int i = 0; i < 3; ++i) {
if (mapped) {
mapping = mapping.startObject("target").field("type", "long").field("doc_values", false).endObject();
}
mapping = mapping.endObject().endObject();
}
mapping = mapping.endObject();
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject();
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping.string()));
@ -376,39 +375,38 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
.endArray()
.endObject();
ParsedDocument doc = mapper.parse("test", "type", "1", jsonDoc.bytes());
assertEquals(6, doc.docs().size());
ParsedDocument doc = mapper.parse("test", "type", "1", jsonDoc.bytes());
assertEquals(6, doc.docs().size());
Document nested = doc.docs().get(0);
assertFieldValue(nested, "n1.n2.target", 7L);
assertFieldValue(nested, "n1.target");
assertFieldValue(nested, "target");
Document nested = doc.docs().get(0);
assertFieldValue(nested, "n1.n2.target", 7L);
assertFieldValue(nested, "n1.target");
assertFieldValue(nested, "target");
nested = doc.docs().get(2);
assertFieldValue(nested, "n1.n2.target", 5L);
assertFieldValue(nested, "n1.target");
assertFieldValue(nested, "target");
nested = doc.docs().get(2);
assertFieldValue(nested, "n1.n2.target", 5L);
assertFieldValue(nested, "n1.target");
assertFieldValue(nested, "target");
nested = doc.docs().get(3);
assertFieldValue(nested, "n1.n2.target", 3L);
assertFieldValue(nested, "n1.target");
assertFieldValue(nested, "target");
nested = doc.docs().get(3);
assertFieldValue(nested, "n1.n2.target", 3L);
assertFieldValue(nested, "n1.target");
assertFieldValue(nested, "target");
Document parent = doc.docs().get(1);
assertFieldValue(parent, "target");
assertFieldValue(parent, "n1.target", 7L);
assertFieldValue(parent, "n1.n2.target");
Document parent = doc.docs().get(1);
assertFieldValue(parent, "target");
assertFieldValue(parent, "n1.target", 7L);
assertFieldValue(parent, "n1.n2.target");
parent = doc.docs().get(4);
assertFieldValue(parent, "target");
assertFieldValue(parent, "n1.target", 3L, 5L);
assertFieldValue(parent, "n1.n2.target");
parent = doc.docs().get(4);
assertFieldValue(parent, "target");
assertFieldValue(parent, "n1.target", 3L, 5L);
assertFieldValue(parent, "n1.n2.target");
Document root = doc.docs().get(5);
assertFieldValue(root, "target", 3L, 5L, 7L);
assertFieldValue(root, "n1.target");
assertFieldValue(root, "n1.n2.target");
}
Document root = doc.docs().get(5);
assertFieldValue(root, "target", 3L, 5L, 7L);
assertFieldValue(root, "n1.target");
assertFieldValue(root, "n1.n2.target");
}
public void testCopyToDynamicNestedObjectParsing() throws Exception {

View File

@ -29,6 +29,7 @@ import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.hamcrest.Matchers;
@ -339,6 +340,8 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase<BoolQueryBuilde
* test that two queries in object throws error
*/
public void testTooManyQueriesInObject() throws IOException {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
String clauseType = randomFrom("must", "should", "must_not", "filter");
// should also throw error if invalid query is preceded by a valid one
String query = "{\n" +

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
@ -65,6 +66,8 @@ public class ConstantScoreQueryBuilderTests extends AbstractQueryTestCase<Consta
* test that multiple "filter" elements causes {@link ParsingException}
*/
public void testMultipleFilterElements() throws IOException {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
String queryString = "{ \"" + ConstantScoreQueryBuilder.NAME + "\" : {\n" +
"\"filter\" : { \"term\": { \"foo\": \"a\" } },\n" +
"\"filter\" : { \"term\": { \"foo\": \"x\" } },\n" +

View File

@ -38,6 +38,7 @@ import org.elasticsearch.common.lucene.search.function.WeightFactorFunction;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
@ -730,6 +731,8 @@ public class FunctionScoreQueryBuilderTests extends AbstractQueryTestCase<Functi
}
public void testMalformedQueryMultipleQueryElements() throws IOException {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
String json = "{\n" +
" \"function_score\":{\n" +
" \"query\":{\n" +

View File

@ -21,23 +21,18 @@ package org.elasticsearch.search.aggregations;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.env.Environment;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.test.ESTestCase;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@ -68,11 +63,7 @@ public class AggregatorParsingTests extends ESTestCase {
Settings settings = Settings.builder().put("node.name", AbstractQueryTestCase.class.toString())
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(ScriptService.SCRIPT_AUTO_RELOAD_ENABLED_SETTING.getKey(), false).build();
IndicesModule indicesModule = new IndicesModule(Collections.emptyList()) ;
SearchModule searchModule = new SearchModule(settings, false, emptyList());
List<NamedWriteableRegistry.Entry> entries = new ArrayList<>();
entries.addAll(indicesModule.getNamedWriteables());
entries.addAll(searchModule.getNamedWriteables());
aggParsers = searchModule.getSearchRequestParsers().aggParsers;
// create some random type with some default field, those types will
// stick around for all of the subclasses
@ -113,6 +104,8 @@ public class AggregatorParsingTests extends ESTestCase {
}
public void testTwoAggs() throws Exception {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()
.startObject("by_date")
@ -187,6 +180,8 @@ public class AggregatorParsingTests extends ESTestCase {
}
public void testSameAggregationName() throws Exception {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
final String name = randomAsciiOfLengthBetween(1, 10);
XContentBuilder source = JsonXContent.contentBuilder()
.startObject()

View File

@ -506,120 +506,150 @@ public class SimpleNestedIT extends ESIntegTestCase {
public void testSortNestedWithNestedFilter() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type1", XContentFactory.jsonBuilder().startObject()
.addMapping("type1", XContentFactory.jsonBuilder()
.startObject()
.startObject("type1")
.startObject("properties")
.startObject("grand_parent_values").field("type", "long").endObject()
.startObject("parent").field("type", "nested")
.startObject("properties")
.startObject("parent_values").field("type", "long").endObject()
.startObject("child").field("type", "nested")
.startObject("properties")
.startObject("child_values").field("type", "long").endObject()
.startObject("properties")
.startObject("grand_parent_values")
.field("type", "long")
.endObject()
.startObject("parent")
.field("type", "nested")
.startObject("properties")
.startObject("parent_values")
.field("type", "long")
.endObject()
.startObject("child")
.field("type", "nested")
.startObject("properties")
.startObject("child_values")
.field("type", "long")
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()));
.endObject()));
ensureGreen();
// sum: 11
client().prepareIndex("test", "type1", Integer.toString(1)).setSource(jsonBuilder().startObject()
client().prepareIndex("test", "type1", Integer.toString(1)).setSource(jsonBuilder()
.startObject()
.field("grand_parent_values", 1L)
.startObject("parent")
.field("filter", false)
.field("parent_values", 1L)
.startObject("child")
.field("filter", true)
.field("child_values", 1L)
.startObject("child_obj")
.field("value", 1L)
.endObject()
.endObject()
.startObject("child")
.field("filter", false)
.field("child_values", 6L)
.endObject()
.endObject()
.startObject("parent")
.field("filter", true)
.field("parent_values", 2L)
.startObject("child")
.field("filter", false)
.field("child_values", -1L)
.endObject()
.startObject("child")
.field("filter", false)
.field("child_values", 5L)
.endObject()
.endObject()
.endObject()).execute().actionGet();
.startArray("parent")
.startObject()
.field("filter", false)
.field("parent_values", 1L)
.startArray("child")
.startObject()
.field("filter", true)
.field("child_values", 1L)
.startObject("child_obj")
.field("value", 1L)
.endObject()
.endObject()
.startObject()
.field("filter", false)
.field("child_values", 6L)
.endObject()
.endArray()
.endObject()
.startObject()
.field("filter", true)
.field("parent_values", 2L)
.startArray("child")
.startObject()
.field("filter", false)
.field("child_values", -1L)
.endObject()
.startObject()
.field("filter", false)
.field("child_values", 5L)
.endObject()
.endArray()
.endObject()
.endArray()
.endObject()).execute().actionGet();
// sum: 7
client().prepareIndex("test", "type1", Integer.toString(2)).setSource(jsonBuilder().startObject()
client().prepareIndex("test", "type1", Integer.toString(2)).setSource(jsonBuilder()
.startObject()
.field("grand_parent_values", 2L)
.startObject("parent")
.field("filter", false)
.field("parent_values", 2L)
.startObject("child")
.field("filter", true)
.field("child_values", 2L)
.startObject("child_obj")
.field("value", 2L)
.endObject()
.endObject()
.startObject("child")
.field("filter", false)
.field("child_values", 4L)
.endObject()
.endObject()
.startObject("parent")
.field("parent_values", 3L)
.field("filter", true)
.startObject("child")
.field("child_values", -2L)
.field("filter", false)
.endObject()
.startObject("child")
.field("filter", false)
.field("child_values", 3L)
.endObject()
.endObject()
.startArray("parent")
.startObject()
.field("filter", false)
.field("parent_values", 2L)
.startArray("child")
.startObject()
.field("filter", true)
.field("child_values", 2L)
.startObject("child_obj")
.field("value", 2L)
.endObject()
.endObject()
.startObject()
.field("filter", false)
.field("child_values", 4L)
.endObject()
.endArray()
.endObject()
.startObject()
.field("parent_values", 3L)
.field("filter", true)
.startArray("child")
.startObject()
.field("child_values", -2L)
.field("filter", false)
.endObject()
.startObject()
.field("filter", false)
.field("child_values", 3L)
.endObject()
.endArray()
.endObject()
.endArray()
.endObject()).execute().actionGet();
// sum: 2
client().prepareIndex("test", "type1", Integer.toString(3)).setSource(jsonBuilder().startObject()
client().prepareIndex("test", "type1", Integer.toString(3)).setSource(jsonBuilder()
.startObject()
.field("grand_parent_values", 3L)
.startObject("parent")
.field("parent_values", 3L)
.field("filter", false)
.startObject("child")
.field("filter", true)
.field("child_values", 3L)
.startObject("child_obj")
.field("value", 3L)
.endObject()
.endObject()
.startObject("child")
.field("filter", false)
.field("child_values", 1L)
.endObject()
.endObject()
.startObject("parent")
.field("parent_values", 4L)
.field("filter", true)
.startObject("child")
.field("filter", false)
.field("child_values", -3L)
.endObject()
.startObject("child")
.field("filter", false)
.field("child_values", 1L)
.endObject()
.endObject()
.endObject()).execute().actionGet();
.startArray("parent")
.startObject()
.field("parent_values", 3L)
.field("filter", false)
.startArray("child")
.startObject()
.field("filter", true)
.field("child_values", 3L)
.startObject("child_obj")
.field("value", 3L)
.endObject()
.endObject()
.startObject()
.field("filter", false)
.field("child_values", 1L)
.endObject()
.endArray()
.endObject()
.startObject()
.field("parent_values", 4L)
.field("filter", true)
.startArray("child")
.startObject()
.field("filter", false)
.field("child_values", -3L)
.endObject()
.startObject()
.field("filter", false)
.field("child_values", 1L)
.endObject()
.endArray()
.endObject()
.endArray()
.endObject()).execute().actionGet();
refresh();
// Without nested filter

View File

@ -149,9 +149,13 @@ public class DirectCandidateGeneratorTests extends ESTestCase{
"Required [field]");
// test two fieldnames
directGenerator = "{ \"field\" : \"f1\", \"field\" : \"f2\" }";
assertIllegalXContent(directGenerator, ParsingException.class,
if (JsonXContent.isStrictDuplicateDetectionEnabled()) {
logger.info("Skipping test as it uses a custom duplicate check that is obsolete when strict duplicate checks are enabled.");
} else {
directGenerator = "{ \"field\" : \"f1\", \"field\" : \"f2\" }";
assertIllegalXContent(directGenerator, ParsingException.class,
"[direct_generator] failed to parse field [field]");
}
// test unknown field
directGenerator = "{ \"unknown_param\" : \"f1\" }";

View File

@ -22,7 +22,7 @@
}
},
"field5": {
"max_value" : {
"min_value" : {
"gt": 2
},
"max_value" : {

View File

@ -8,6 +8,11 @@ This feature was removed in the 5.x series, but a backwards-compatibility layer
system property `elasticsearch.json.allow_unquoted_field_names`. This backwards-compatibility layer
has been removed in Elasticsearch 6.0.0.
==== Duplicate Keys in JSON
In previous versions of Elasticsearch, JSON documents were allowed to contain duplicate keys. Elasticsearch 6.0.0
enforces that all keys are unique.
==== Analyze API changes
The deprecated request parameters and plain text in request body has been removed. Define parameters in request body.

View File

@ -196,10 +196,24 @@ public class MoreExpressionTests extends ESIntegTestCase {
public void testMultiValueMethods() throws Exception {
ElasticsearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double0", "type=double", "double1", "type=double", "double2", "type=double"));
ensureGreen("test");
Map<String, Object> doc1 = new HashMap<>();
doc1.put("double0", new Double[]{5.0d, 1.0d, 1.5d});
doc1.put("double1", new Double[]{1.2d, 2.4d});
doc1.put("double2", 3.0d);
Map<String, Object> doc2 = new HashMap<>();
doc2.put("double0", 5.0d);
doc2.put("double1", 3.0d);
Map<String, Object> doc3 = new HashMap<>();
doc3.put("double0", new Double[]{5.0d, 1.0d, 1.5d, -1.5d});
doc3.put("double1", 4.0d);
indexRandom(true,
client().prepareIndex("test", "doc", "1").setSource("double0", "5.0", "double0", "1.0", "double0", "1.5", "double1", "1.2", "double1", "2.4", "double2", "3.0"),
client().prepareIndex("test", "doc", "2").setSource("double0", "5.0", "double1", "3.0"),
client().prepareIndex("test", "doc", "3").setSource("double0", "5.0", "double0", "1.0", "double0", "1.5", "double0", "-1.5", "double1", "4.0"));
client().prepareIndex("test", "doc", "1").setSource(doc1),
client().prepareIndex("test", "doc", "2").setSource(doc2),
client().prepareIndex("test", "doc", "3").setSource(doc3));
SearchResponse rsp = buildRequest("doc['double0'].count() + doc['double1'].count()").get();

View File

@ -184,6 +184,9 @@ public class TransportPercolateAction extends HandledTransportAction<PercolateRe
}
} else if (token.isValue()) {
if ("size".equals(currentFieldName)) {
if (percolateRequest.onlyCount()) {
throw new IllegalArgumentException("Cannot set size if onlyCount == true");
}
searchSource.field("size", parser.intValue());
} else if ("sort".equals(currentFieldName)) {
searchSource.field("sort", parser.text());

View File

@ -94,8 +94,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase {
PercolateRequestBuilder percolateRequestBuilder = preparePercolate(client())
.setIndices(INDEX_NAME)
.setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject()))
.setSize(expectedCount[i % numUniqueQueries]);
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject()));
SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values());
percolateRequestBuilder.addAggregation(AggregationBuilders.terms("a").field("field2").collectMode(aggCollectionMode));
@ -103,15 +102,17 @@ public class PercolatorAggregationsIT extends ESIntegTestCase {
if (randomBoolean()) {
percolateRequestBuilder.setPercolateQuery(matchAllQuery());
}
if (randomBoolean()) {
percolateRequestBuilder.setScore(true);
} else {
percolateRequestBuilder.setSortByScore(true).setSize(numQueries);
}
boolean countOnly = randomBoolean();
if (countOnly) {
percolateRequestBuilder.setOnlyCount(countOnly);
} else {
// can only set size if we also keep track of matches (i.e. countOnly == false)
if (randomBoolean()) {
percolateRequestBuilder.setScore(true).setSize(expectedCount[i % numUniqueQueries]);
} else {
percolateRequestBuilder.setSortByScore(true).setSize(numQueries);
}
}
PercolateResponse response = percolateRequestBuilder.execute().actionGet();
@ -161,8 +162,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase {
PercolateRequestBuilder percolateRequestBuilder = preparePercolate(client())
.setIndices(INDEX_NAME)
.setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject()))
.setSize(expectedCount[i % numUniqueQueries]);
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject()));
SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values());
percolateRequestBuilder.addAggregation(AggregationBuilders.terms("a").field("field2").collectMode(aggCollectionMode));
@ -170,15 +170,17 @@ public class PercolatorAggregationsIT extends ESIntegTestCase {
if (randomBoolean()) {
percolateRequestBuilder.setPercolateQuery(matchAllQuery());
}
if (randomBoolean()) {
percolateRequestBuilder.setScore(true);
} else {
percolateRequestBuilder.setSortByScore(true).setSize(numQueries);
}
boolean countOnly = randomBoolean();
if (countOnly) {
percolateRequestBuilder.setOnlyCount(countOnly);
} else {
// can only set size if we also keep track of matches (i.e. countOnly == false)
if (randomBoolean()) {
percolateRequestBuilder.setScore(true).setSize(expectedCount[i % numUniqueQueries]);
} else {
percolateRequestBuilder.setSortByScore(true).setSize(numQueries);
}
}
percolateRequestBuilder.addAggregation(PipelineAggregatorBuilders.maxBucket("max_a", "a>_count"));
@ -243,8 +245,7 @@ public class PercolatorAggregationsIT extends ESIntegTestCase {
PercolateRequestBuilder percolateRequestBuilder = preparePercolate(client())
.setIndices(INDEX_NAME)
.setDocumentType("type")
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject()))
.setSize(numQueries);
.setPercolateDoc(docBuilder().setDoc(jsonBuilder().startObject().field("field1", value).endObject()));
SubAggCollectionMode aggCollectionMode = randomFrom(SubAggCollectionMode.values());
percolateRequestBuilder.addAggregation(AggregationBuilders.terms("terms").field("field2").collectMode(aggCollectionMode)
@ -253,15 +254,17 @@ public class PercolatorAggregationsIT extends ESIntegTestCase {
if (randomBoolean()) {
percolateRequestBuilder.setPercolateQuery(matchAllQuery());
}
if (randomBoolean()) {
percolateRequestBuilder.setScore(true);
} else {
percolateRequestBuilder.setSortByScore(true).setSize(numQueries);
}
boolean countOnly = randomBoolean();
if (countOnly) {
percolateRequestBuilder.setOnlyCount(countOnly);
} else {
// can only set size if we also keep track of matches (i.e. countOnly == false)
if (randomBoolean()) {
percolateRequestBuilder.setScore(true).setSize(numQueries);
} else {
percolateRequestBuilder.setSortByScore(true).setSize(numQueries);
}
}
percolateRequestBuilder.addAggregation(PipelineAggregatorBuilders.maxBucket("max_terms", "terms>_count"));

View File

@ -1031,24 +1031,28 @@ public class PercolatorIT extends ESIntegTestCase {
refresh();
boolean onlyCount = randomBoolean();
PercolateResponse response = preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("my-type")
.setOnlyCount(onlyCount)
.setPercolateDoc(docBuilder().setDoc("field", "value"))
.setSize((int) totalQueries)
.execute().actionGet();
PercolateRequestBuilder builder = preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("my-type")
.setOnlyCount(onlyCount)
.setPercolateDoc(docBuilder().setDoc("field", "value"));
if (!onlyCount) {
builder.setSize((int) totalQueries);
}
PercolateResponse response = builder.execute().actionGet();
assertMatchCount(response, totalQueries);
if (!onlyCount) {
assertThat(response.getMatches().length, equalTo((int) totalQueries));
}
int size = randomIntBetween(0, (int) totalQueries - 1);
response = preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("my-type")
.setOnlyCount(onlyCount)
.setPercolateDoc(docBuilder().setDoc("field", "value"))
.setSize(size)
.execute().actionGet();
builder = preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("my-type")
.setOnlyCount(onlyCount)
.setPercolateDoc(docBuilder().setDoc("field", "value"));
if (!onlyCount) {
builder.setSize(size);
}
response = builder.execute().actionGet();
assertMatchCount(response, totalQueries);
if (!onlyCount) {
assertThat(response.getMatches().length, equalTo(size));
@ -1060,13 +1064,15 @@ public class PercolatorIT extends ESIntegTestCase {
int runs = randomIntBetween(3, 16);
for (int i = 0; i < runs; i++) {
onlyCount = randomBoolean();
response = preparePercolate(client())
builder = preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("my-type")
.setOnlyCount(onlyCount)
.setPercolateDoc(docBuilder().setDoc("field", "value"))
.setPercolateQuery(termQuery("level", 1 + randomInt(numLevels - 1)))
.setSize((int) numQueriesPerLevel)
.execute().actionGet();
.setPercolateQuery(termQuery("level", 1 + randomInt(numLevels - 1)));
if (!onlyCount) {
builder.setSize((int) numQueriesPerLevel);
}
response = builder.execute().actionGet();
assertMatchCount(response, numQueriesPerLevel);
if (!onlyCount) {
assertThat(response.getMatches().length, equalTo((int) numQueriesPerLevel));
@ -1075,13 +1081,15 @@ public class PercolatorIT extends ESIntegTestCase {
for (int i = 0; i < runs; i++) {
onlyCount = randomBoolean();
response = preparePercolate(client())
builder = preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("my-type")
.setOnlyCount(onlyCount)
.setPercolateDoc(docBuilder().setDoc("field", "value"))
.setPercolateQuery(termQuery("level", 1 + randomInt(numLevels - 1)))
.setSize((int) numQueriesPerLevel)
.execute().actionGet();
.setPercolateQuery(termQuery("level", 1 + randomInt(numLevels - 1)));
if (!onlyCount) {
builder.setSize((int) numQueriesPerLevel);
}
response = builder.execute().actionGet();
assertMatchCount(response, numQueriesPerLevel);
if (!onlyCount) {
assertThat(response.getMatches().length, equalTo((int) numQueriesPerLevel));
@ -1091,13 +1099,15 @@ public class PercolatorIT extends ESIntegTestCase {
for (int i = 0; i < runs; i++) {
onlyCount = randomBoolean();
size = randomIntBetween(0, (int) numQueriesPerLevel - 1);
response = preparePercolate(client())
builder = preparePercolate(client())
.setIndices(INDEX_NAME).setDocumentType("my-type")
.setOnlyCount(onlyCount)
.setSize(size)
.setPercolateDoc(docBuilder().setDoc("field", "value"))
.setPercolateQuery(termQuery("level", 1 + randomInt(numLevels - 1)))
.execute().actionGet();
.setPercolateQuery(termQuery("level", 1 + randomInt(numLevels - 1)));
if (!onlyCount) {
builder.setSize(size);
}
response = builder.execute().actionGet();
assertMatchCount(response, numQueriesPerLevel);
if (!onlyCount) {
assertThat(response.getMatches().length, equalTo(size));
@ -1726,7 +1736,7 @@ public class PercolatorIT extends ESIntegTestCase {
.setPercolateDoc(docBuilder().setDoc(doc))
.get();
assertMatchCount(response, 3L);
response = preparePercolate(client()).setScore(randomBoolean()).setSortByScore(randomBoolean()).setOnlyCount(randomBoolean()).setSize(10).setPercolateQuery(QueryBuilders.termQuery("text", "foo"))
response = preparePercolate(client()).setScore(randomBoolean()).setSortByScore(randomBoolean()).setOnlyCount(randomBoolean()).setPercolateQuery(QueryBuilders.termQuery("text", "foo"))
.setIndices(INDEX_NAME).setDocumentType("doc")
.setPercolateDoc(docBuilder().setDoc(doc))
.get();

View File

@ -19,6 +19,7 @@
package org.elasticsearch.test.rest.yaml.restspec;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
import org.elasticsearch.test.ESTestCase;
@ -71,6 +72,8 @@ public class ClientYamlSuiteRestApiParserFailingTests extends ESTestCase {
}
public void testDuplicateParts() throws Exception {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
parseAndExpectFailure("{\n" +
" \"ping\": {" +
" \"documentation\": \"http://www.elasticsearch.org/guide/\"," +
@ -103,6 +106,8 @@ public class ClientYamlSuiteRestApiParserFailingTests extends ESTestCase {
}
public void testDuplicateParams() throws Exception {
assumeFalse("Test only makes sense if JSON parser doesn't have strict duplicate checks enabled",
JsonXContent.isStrictDuplicateDetectionEnabled());
parseAndExpectFailure("{\n" +
" \"ping\": {" +
" \"documentation\": \"http://www.elasticsearch.org/guide/\"," +