Query DSL: Parsers should throw exception on unknown object
This PR adds a randomized test to the query test base class that mutates an otherwise correct query by adding an additional object into the query hierarchy. Doing so makes the query illegal and should trigger some kind of exception. The new test revelead that some query parsers quietly return queries when called with such an illegal query. Those are also fixed here. Relates to #10974
This commit is contained in:
parent
ebec4bdaf6
commit
72463768f5
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.action.termvectors;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.DocumentRequest;
|
||||
|
@ -211,7 +210,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
|||
public String id() {
|
||||
return id;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Sets the id of document the term vector is requested for.
|
||||
*/
|
||||
|
@ -651,7 +650,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
|
|||
if (e.getValue() instanceof String) {
|
||||
mapStrStr.put(e.getKey(), (String) e.getValue());
|
||||
} else {
|
||||
throw new ElasticsearchException("expecting the analyzer at [{}] to be a String, but found [{}] instead", e.getKey(), e.getValue().getClass());
|
||||
throw new ElasticsearchParseException("expecting the analyzer at [{}] to be a String, but found [{}] instead", e.getKey(), e.getValue().getClass());
|
||||
}
|
||||
}
|
||||
return mapStrStr;
|
||||
|
|
|
@ -39,7 +39,7 @@ public class CommonTermsQueryParser implements QueryParser<CommonTermsQueryBuild
|
|||
XContentParser parser = parseContext.parser();
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[common] query malformed, no field");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + CommonTermsQueryBuilder.NAME + "] query malformed, no field");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
Object text = null;
|
||||
|
@ -70,13 +70,16 @@ public class CommonTermsQueryParser implements QueryParser<CommonTermsQueryBuild
|
|||
} else if ("high_freq".equals(innerFieldName) || "highFreq".equals(innerFieldName)) {
|
||||
highFreqMinimumShouldMatch = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[common] query does not support [" + innerFieldName
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + CommonTermsQueryBuilder.NAME + "] query does not support [" + innerFieldName
|
||||
+ "] for [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + CommonTermsQueryBuilder.NAME + "] unexpected token type [" + token
|
||||
+ "] after [" + innerFieldName + "]");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[common] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + CommonTermsQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("query".equals(currentFieldName)) {
|
||||
|
@ -98,7 +101,7 @@ public class CommonTermsQueryParser implements QueryParser<CommonTermsQueryBuild
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[common] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + CommonTermsQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -55,13 +55,15 @@ public class ExistsQueryParser implements QueryParser<ExistsQueryBuilder> {
|
|||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[exists] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
if (fieldPattern == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "exists must be provided with a [field]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME + "] must be provided with a [field]");
|
||||
}
|
||||
|
||||
ExistsQueryBuilder builder = new ExistsQueryBuilder(fieldPattern);
|
||||
|
|
|
@ -70,8 +70,10 @@ public class GeoShapeQueryParser implements QueryParser<GeoShapeQueryBuilder> {
|
|||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] point specified twice. [" + currentFieldName + "]");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
|
@ -104,10 +106,12 @@ public class GeoShapeQueryParser implements QueryParser<GeoShapeQueryBuilder> {
|
|||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_PATH_FIELD)) {
|
||||
shapePath = parser.text();
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[geo_shape] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -117,7 +121,7 @@ public class GeoShapeQueryParser implements QueryParser<GeoShapeQueryBuilder> {
|
|||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[geo_shape] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -79,7 +79,7 @@ public class IdsQueryParser implements QueryParser<IdsQueryBuilder> {
|
|||
types.add(value);
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[ids] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("type".equals(currentFieldName) || "_type".equals(currentFieldName)) {
|
||||
|
@ -89,12 +89,14 @@ public class IdsQueryParser implements QueryParser<IdsQueryBuilder> {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[ids] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
if (!idsProvided) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[ids] query, no ids values provided");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + IdsQueryBuilder.NAME + "] query, no ids values provided");
|
||||
}
|
||||
|
||||
IdsQueryBuilder query = new IdsQueryBuilder(types.toArray(new String[types.size()]));
|
||||
|
|
|
@ -52,8 +52,10 @@ public class MatchAllQueryParser implements QueryParser<MatchAllQueryBuilder> {
|
|||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[match_all] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + MatchAllQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + MatchAllQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
MatchAllQueryBuilder queryBuilder = new MatchAllQueryBuilder();
|
||||
|
|
|
@ -51,6 +51,8 @@ public class MatchNoneQueryParser implements QueryParser<MatchNoneQueryBuilder>
|
|||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "["+MatchNoneQueryBuilder.NAME+"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + MatchNoneQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ public class MatchQueryParser implements QueryParser<MatchQueryBuilder> {
|
|||
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[match] query malformed, no field");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + MatchQueryBuilder.NAME + "] query malformed, no field");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
|
||||
|
@ -93,7 +93,7 @@ public class MatchQueryParser implements QueryParser<MatchQueryBuilder> {
|
|||
} else if ("phrase_prefix".equals(tStr) || "phrasePrefix".equals(currentFieldName)) {
|
||||
type = MatchQuery.Type.PHRASE_PREFIX;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[match] query does not support type " + tStr);
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + MatchQueryBuilder.NAME + "] query does not support type " + tStr);
|
||||
}
|
||||
} else if ("analyzer".equals(currentFieldName)) {
|
||||
analyzer = parser.text();
|
||||
|
@ -131,8 +131,10 @@ public class MatchQueryParser implements QueryParser<MatchQueryBuilder> {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[match] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + MatchQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + MatchQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
parser.nextToken();
|
||||
|
|
|
@ -61,8 +61,10 @@ public class MissingQueryParser implements QueryParser<MissingQueryBuilder> {
|
|||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[missing] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + MissingQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + MissingQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -122,8 +122,10 @@ public class MultiMatchQueryParser implements QueryParser<MultiMatchQueryBuilder
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[match] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + MultiMatchQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + MultiMatchQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -96,7 +96,7 @@ public class QueryStringQueryParser implements QueryParser {
|
|||
fieldsAndWeights.put(fField, fBoost);
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[query_string] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + QueryStringQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("query".equals(currentFieldName)) {
|
||||
|
@ -154,17 +154,19 @@ public class QueryStringQueryParser implements QueryParser {
|
|||
try {
|
||||
timeZone = parser.text();
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[query_string] time_zone [" + parser.text() + "] is unknown");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + QueryStringQueryBuilder.NAME + "] time_zone [" + parser.text() + "] is unknown");
|
||||
}
|
||||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[query_string] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + QueryStringQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + QueryStringQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
if (queryString == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "query_string must be provided with a [query]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + QueryStringQueryBuilder.NAME + "] must be provided with a [query]");
|
||||
}
|
||||
|
||||
QueryStringQueryBuilder queryStringQuery = new QueryStringQueryBuilder(queryString);
|
||||
|
|
|
@ -146,6 +146,8 @@ public class SimpleQueryStringParser implements QueryParser<SimpleQueryStringBui
|
|||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME + "] unsupported field [" + parser.currentName() + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -62,11 +62,15 @@ public class TermsQueryParser implements QueryParser {
|
|||
// skip
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[terms] query does not support multiple fields");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + TermsQueryBuilder.NAME + "] query does not support multiple fields");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
values = parseValues(parser);
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + TermsQueryBuilder.NAME + "] query does not support more than one field. "
|
||||
+ "Already got: [" + fieldName + "] but also found [" + currentFieldName +"]");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
termsLookup = TermsLookup.parseTermsLookup(parser);
|
||||
} else if (token.isValue()) {
|
||||
|
@ -75,13 +79,15 @@ public class TermsQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[terms] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + TermsQueryBuilder.NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + TermsQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
if (fieldName == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "terms query requires a field name, followed by array of terms or a document lookup specification");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + TermsQueryBuilder.NAME + "] query requires a field name, followed by array of terms or a document lookup specification");
|
||||
}
|
||||
return new TermsQueryBuilder(fieldName, values, termsLookup)
|
||||
.boost(boost)
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.Writeable;
|
|||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.TermsQueryBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
@ -49,13 +50,13 @@ public class TermsLookup implements Writeable<TermsLookup>, ToXContent {
|
|||
|
||||
public TermsLookup(String index, String type, String id, String path) {
|
||||
if (id == null) {
|
||||
throw new IllegalArgumentException("[terms] query lookup element requires specifying the id.");
|
||||
throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the id.");
|
||||
}
|
||||
if (type == null) {
|
||||
throw new IllegalArgumentException("[terms] query lookup element requires specifying the type.");
|
||||
throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the type.");
|
||||
}
|
||||
if (path == null) {
|
||||
throw new IllegalArgumentException("[terms] query lookup element requires specifying the path.");
|
||||
throw new IllegalArgumentException("[" + TermsQueryBuilder.NAME + "] query lookup element requires specifying the path.");
|
||||
}
|
||||
this.index = index;
|
||||
this.type = type;
|
||||
|
@ -122,9 +123,11 @@ public class TermsLookup implements Writeable<TermsLookup>, ToXContent {
|
|||
path = parser.text();
|
||||
break;
|
||||
default:
|
||||
throw new ParsingException(parser.getTokenLocation(), "[terms] query does not support [" + currentFieldName
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + TermsQueryBuilder.NAME + "] query does not support [" + currentFieldName
|
||||
+ "] within lookup element");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + TermsQueryBuilder.NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
return new TermsLookup(index, type, id, path).routing(routing);
|
||||
|
|
|
@ -20,10 +20,12 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
import com.fasterxml.jackson.core.io.JsonStringEncoder;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
|
@ -76,6 +78,7 @@ import org.elasticsearch.indices.IndicesWarmer;
|
|||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.script.*;
|
||||
import org.elasticsearch.script.Script.ScriptParseException;
|
||||
import org.elasticsearch.script.mustache.MustacheScriptEngineService;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
@ -344,6 +347,29 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
// we'd like to see the offending field name here
|
||||
assertThat(e.getMessage(), containsString("bogusField"));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that adding additional object into otherwise correct query string
|
||||
* should always trigger some kind of Parsing Exception.
|
||||
*/
|
||||
public void testUnknownObjectException() throws IOException {
|
||||
String validQuery = createTestQueryBuilder().toString();
|
||||
assertThat(validQuery, containsString("{"));
|
||||
for (int insertionPosition = 0; insertionPosition < validQuery.length(); insertionPosition++) {
|
||||
if (validQuery.charAt(insertionPosition) == '{') {
|
||||
String testQuery = validQuery.substring(0, insertionPosition) + "{ \"newField\" : " + validQuery.substring(insertionPosition) + "}";
|
||||
try {
|
||||
parseQuery(testQuery);
|
||||
fail("some parsing exception expected for query: " + testQuery);
|
||||
} catch (ParsingException | ScriptParseException | ElasticsearchParseException e) {
|
||||
// different kinds of exception wordings depending on location
|
||||
// of mutation, so no simple asserts possible here
|
||||
} catch (JsonParseException e) {
|
||||
// mutation produced invalid json
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -20,11 +20,15 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
|
||||
import org.apache.lucene.queries.TermsQuery;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -35,6 +39,7 @@ import org.elasticsearch.index.mapper.Uid;
|
|||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
import org.elasticsearch.script.Script.ScriptParseException;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -44,7 +49,8 @@ import org.elasticsearch.test.TestSearchContext;
|
|||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
|
||||
|
@ -52,6 +58,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
protected static final String PARENT_TYPE = "parent";
|
||||
protected static final String CHILD_TYPE = "child";
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
MapperService mapperService = queryParserService().mapperService;
|
||||
|
@ -74,6 +81,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
).string()), false, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setSearchContext(String[] types) {
|
||||
final MapperService mapperService = queryParserService().mapperService;
|
||||
final IndexFieldDataService fieldData = queryParserService().fieldDataService;
|
||||
|
@ -303,4 +311,33 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||
assertThat(typeTermQuery.getTerm().field(), equalTo(TypeFieldMapper.NAME));
|
||||
assertThat(typeTermQuery.getTerm().text(), equalTo(type));
|
||||
}
|
||||
|
||||
/**
|
||||
* override superclass test, because here we need to take care that mutation doesn't happen inside
|
||||
* `inner_hits` structure, because we don't parse them yet and so no exception will be triggered
|
||||
* for any mutation there.
|
||||
*/
|
||||
@Override
|
||||
public void testUnknownObjectException() throws IOException {
|
||||
String validQuery = createTestQueryBuilder().toString();
|
||||
assertThat(validQuery, containsString("{"));
|
||||
int endPosition = validQuery.indexOf("inner_hits");
|
||||
if (endPosition == -1) {
|
||||
endPosition = validQuery.length() - 1;
|
||||
}
|
||||
for (int insertionPosition = 0; insertionPosition < endPosition; insertionPosition++) {
|
||||
if (validQuery.charAt(insertionPosition) == '{') {
|
||||
String testQuery = validQuery.substring(0, insertionPosition) + "{ \"newField\" : " + validQuery.substring(insertionPosition) + "}";
|
||||
try {
|
||||
parseQuery(testQuery);
|
||||
fail("some parsing exception expected for query: " + testQuery);
|
||||
} catch (ParsingException | ScriptParseException | ElasticsearchParseException e) {
|
||||
// different kinds of exception wordings depending on location
|
||||
// of mutation, so no simple asserts possible here
|
||||
} catch (JsonParseException e) {
|
||||
// mutation produced invalid json
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,10 +20,14 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import com.fasterxml.jackson.core.JsonParseException;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -31,6 +35,7 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHits;
|
||||
import org.elasticsearch.script.Script.ScriptParseException;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsBuilder;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -40,6 +45,8 @@ import org.elasticsearch.test.TestSearchContext;
|
|||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
|
||||
|
@ -47,6 +54,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
protected static final String PARENT_TYPE = "parent";
|
||||
protected static final String CHILD_TYPE = "child";
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
MapperService mapperService = queryParserService().mapperService;
|
||||
|
@ -69,6 +77,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
).string()), false, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setSearchContext(String[] types) {
|
||||
final MapperService mapperService = queryParserService().mapperService;
|
||||
final IndexFieldDataService fieldData = queryParserService().fieldDataService;
|
||||
|
@ -204,4 +213,33 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||
assertThat(QueryShardContext.getTypes(), equalTo(searchTypes));
|
||||
HasChildQueryBuilderTests.assertLateParsingQuery(query, PARENT_TYPE, "id");
|
||||
}
|
||||
|
||||
/**
|
||||
* override superclass test, because here we need to take care that mutation doesn't happen inside
|
||||
* `inner_hits` structure, because we don't parse them yet and so no exception will be triggered
|
||||
* for any mutation there.
|
||||
*/
|
||||
@Override
|
||||
public void testUnknownObjectException() throws IOException {
|
||||
String validQuery = createTestQueryBuilder().toString();
|
||||
assertThat(validQuery, containsString("{"));
|
||||
int endPosition = validQuery.indexOf("inner_hits");
|
||||
if (endPosition == -1) {
|
||||
endPosition = validQuery.length() - 1;
|
||||
}
|
||||
for (int insertionPosition = 0; insertionPosition < endPosition; insertionPosition++) {
|
||||
if (validQuery.charAt(insertionPosition) == '{') {
|
||||
String testQuery = validQuery.substring(0, insertionPosition) + "{ \"newField\" : " + validQuery.substring(insertionPosition) + "}";
|
||||
try {
|
||||
parseQuery(testQuery);
|
||||
fail("some parsing exception expected for query: " + testQuery);
|
||||
} catch (ParsingException | ScriptParseException | ElasticsearchParseException e) {
|
||||
// different kinds of exception wordings depending on location
|
||||
// of mutation, so no simple asserts possible here
|
||||
} catch (JsonParseException e) {
|
||||
// mutation produced invalid json
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,9 +27,11 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
|
@ -259,7 +261,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase<SimpleQ
|
|||
"}";
|
||||
XContentParser parser = XContentFactory.xContent(contentString).createParser(contentString);
|
||||
context.reset(parser);
|
||||
SimpleQueryStringBuilder queryBuilder = new SimpleQueryStringParser().fromXContent(context);
|
||||
SimpleQueryStringBuilder queryBuilder = (SimpleQueryStringBuilder) parseQuery(parser, ParseFieldMatcher.EMPTY);
|
||||
assertThat(queryBuilder.value(), equalTo(query));
|
||||
assertThat(queryBuilder.fields(), notNullValue());
|
||||
assertThat(queryBuilder.fields().size(), equalTo(0));
|
||||
|
|
|
@ -42,7 +42,10 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
||||
public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuilder> {
|
||||
private List<Object> randomTerms;
|
||||
|
@ -195,9 +198,9 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
|||
"}";
|
||||
try {
|
||||
parseQuery(query);
|
||||
fail("Expected IllegalArgumentException");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), is("Both values and termsLookup specified for terms query"));
|
||||
fail("Expected ParsingException");
|
||||
} catch(ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("[" + TermsQueryBuilder.NAME + "] query does not support more than one field."));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -251,7 +254,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
|||
parseQuery(query);
|
||||
fail("parsing should have failed");
|
||||
} catch (ParsingException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("[terms] query does not support multiple fields"));
|
||||
assertThat(ex.getMessage(), equalTo("[" + TermsQueryBuilder.NAME + "] query does not support multiple fields"));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue