Rename QueryParsingException to a more generic ParsingException
this allows us to reuse this exception in more places rather than adding new ones that are basically just subclasses.
This commit is contained in:
parent
1a8495d1d6
commit
effaaf0566
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch;
|
||||
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -504,7 +505,7 @@ public class ElasticsearchException extends RuntimeException implements ToXConte
|
|||
exceptions.put(org.elasticsearch.index.mapper.MapperException.class, 37);
|
||||
exceptions.put(org.elasticsearch.indices.InvalidTypeNameException.class, 38);
|
||||
exceptions.put(org.elasticsearch.snapshots.SnapshotRestoreException.class, 39);
|
||||
exceptions.put(org.elasticsearch.index.query.QueryParsingException.class, 40);
|
||||
exceptions.put(ParsingException.class, 40);
|
||||
exceptions.put(org.elasticsearch.index.shard.IndexShardClosedException.class, 41);
|
||||
exceptions.put(org.elasticsearch.script.expression.ExpressionScriptCompilationException.class, 42);
|
||||
exceptions.put(org.elasticsearch.indices.recovery.RecoverFilesRecoveryException.class, 43);
|
||||
|
|
|
@ -42,7 +42,7 @@ import org.elasticsearch.common.util.BigArrays;
|
|||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
|
@ -189,7 +189,7 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<Valid
|
|||
if (request.rewrite()) {
|
||||
explanation = getRewrittenQuery(searcher.searcher(), searchContext.query());
|
||||
}
|
||||
} catch (QueryParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
valid = false;
|
||||
error = e.getDetailedMessage();
|
||||
} catch (AssertionError|IOException e) {
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
package org.elasticsearch.common;
|
||||
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
|
@ -26,6 +26,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -33,22 +34,25 @@ import java.io.IOException;
|
|||
/**
|
||||
*
|
||||
*/
|
||||
public class QueryParsingException extends ElasticsearchException {
|
||||
public class ParsingException extends ElasticsearchException {
|
||||
|
||||
static final int UNKNOWN_POSITION = -1;
|
||||
protected static final int UNKNOWN_POSITION = -1;
|
||||
private final int lineNumber;
|
||||
private final int columnNumber;
|
||||
|
||||
public QueryParsingException(QueryParseContext parseContext, String msg, Object... args) {
|
||||
public ParsingException(QueryParseContext parseContext, String msg, Object... args) {
|
||||
this(parseContext, msg, null, args);
|
||||
}
|
||||
|
||||
public QueryParsingException(QueryParseContext parseContext, String msg, Throwable cause, Object... args) {
|
||||
public ParsingException(QueryParseContext parseContext, String msg, Throwable cause, Object... args) {
|
||||
this(parseContext.index(), parseContext.parser(), msg, cause, args);
|
||||
}
|
||||
|
||||
public ParsingException(Index index, XContentParser parser, String msg, Throwable cause, Object... args) {
|
||||
super(msg, cause, args);
|
||||
setIndex(parseContext.index());
|
||||
setIndex(index);
|
||||
int lineNumber = UNKNOWN_POSITION;
|
||||
int columnNumber = UNKNOWN_POSITION;
|
||||
XContentParser parser = parseContext.parser();
|
||||
if (parser != null) {
|
||||
XContentLocation location = parser.getTokenLocation();
|
||||
if (location != null) {
|
||||
|
@ -64,7 +68,7 @@ public class QueryParsingException extends ElasticsearchException {
|
|||
* This constructor is provided for use in unit tests where a
|
||||
* {@link QueryParseContext} may not be available
|
||||
*/
|
||||
public QueryParsingException(Index index, int line, int col, String msg, Throwable cause) {
|
||||
public ParsingException(Index index, int line, int col, String msg, Throwable cause) {
|
||||
super(msg, cause);
|
||||
setIndex(index);
|
||||
this.lineNumber = line;
|
||||
|
@ -110,7 +114,7 @@ public class QueryParsingException extends ElasticsearchException {
|
|||
out.writeInt(columnNumber);
|
||||
}
|
||||
|
||||
public QueryParsingException(StreamInput in) throws IOException{
|
||||
public ParsingException(StreamInput in) throws IOException{
|
||||
super(in);
|
||||
lineNumber = in.readInt();
|
||||
columnNumber = in.readInt();
|
|
@ -32,7 +32,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.indexing.IndexingOperationListener;
|
||||
|
@ -44,7 +43,7 @@ import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
|||
import org.elasticsearch.index.percolator.stats.ShardPercolateService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
|
@ -209,7 +208,7 @@ public class PercolatorQueriesRegistry extends AbstractIndexShardComponent imple
|
|||
context.setMapUnmappedFieldAsString(mapUnmappedFieldsAsString ? true : false);
|
||||
return queryParserService.parseInnerQuery(context);
|
||||
} catch (IOException e) {
|
||||
throw new QueryParsingException(context, "Failed to parse", e);
|
||||
throw new ParsingException(context, "Failed to parse", e);
|
||||
} finally {
|
||||
if (type != null) {
|
||||
QueryParseContext.setTypes(previousTypes);
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.search.BooleanClause;
|
|||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -52,7 +53,7 @@ public class BoolQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
boolean disableCoord = false;
|
||||
|
@ -101,7 +102,7 @@ public class BoolQueryParser implements QueryParser {
|
|||
}
|
||||
break;
|
||||
default:
|
||||
throw new QueryParsingException(parseContext, "[bool] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[bool] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
|
@ -135,7 +136,7 @@ public class BoolQueryParser implements QueryParser {
|
|||
}
|
||||
break;
|
||||
default:
|
||||
throw new QueryParsingException(parseContext, "bool query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "bool query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
|
@ -152,7 +153,7 @@ public class BoolQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[bool] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[bool] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.queries.BoostingQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
|
@ -43,7 +44,7 @@ public class BoostingQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
Query positiveQuery = null;
|
||||
|
@ -66,7 +67,7 @@ public class BoostingQueryParser implements QueryParser {
|
|||
negativeQuery = parseContext.parseInnerQuery();
|
||||
negativeQueryFound = true;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[boosting] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[boosting] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("negative_boost".equals(currentFieldName) || "negativeBoost".equals(currentFieldName)) {
|
||||
|
@ -74,19 +75,19 @@ public class BoostingQueryParser implements QueryParser {
|
|||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[boosting] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[boosting] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (positiveQuery == null && !positiveQueryFound) {
|
||||
throw new QueryParsingException(parseContext, "[boosting] query requires 'positive' query to be set'");
|
||||
throw new ParsingException(parseContext, "[boosting] query requires 'positive' query to be set'");
|
||||
}
|
||||
if (negativeQuery == null && !negativeQueryFound) {
|
||||
throw new QueryParsingException(parseContext, "[boosting] query requires 'negative' query to be set'");
|
||||
throw new ParsingException(parseContext, "[boosting] query requires 'negative' query to be set'");
|
||||
}
|
||||
if (negativeBoost == -1) {
|
||||
throw new QueryParsingException(parseContext, "[boosting] query requires 'negative_boost' to be set'");
|
||||
throw new ParsingException(parseContext, "[boosting] query requires 'negative_boost' to be set'");
|
||||
}
|
||||
|
||||
// parsers returned null
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.lucene.search.BooleanClause;
|
|||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
@ -60,11 +61,11 @@ public class CommonTermsQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new QueryParsingException(parseContext, "[common] query malformed, no field");
|
||||
throw new ParsingException(parseContext, "[common] query malformed, no field");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
Object value = null;
|
||||
|
@ -95,13 +96,13 @@ public class CommonTermsQueryParser implements QueryParser {
|
|||
} else if ("high_freq".equals(innerFieldName) || "highFreq".equals(innerFieldName)) {
|
||||
highFreqMinimumShouldMatch = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[common] query does not support [" + innerFieldName
|
||||
throw new ParsingException(parseContext, "[common] query does not support [" + innerFieldName
|
||||
+ "] for [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[common] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[common] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("query".equals(currentFieldName)) {
|
||||
|
@ -109,7 +110,7 @@ public class CommonTermsQueryParser implements QueryParser {
|
|||
} else if ("analyzer".equals(currentFieldName)) {
|
||||
String analyzer = parser.text();
|
||||
if (parseContext.analysisService().analyzer(analyzer) == null) {
|
||||
throw new QueryParsingException(parseContext, "[common] analyzer [" + parser.text() + "] not found");
|
||||
throw new ParsingException(parseContext, "[common] analyzer [" + parser.text() + "] not found");
|
||||
}
|
||||
queryAnalyzer = analyzer;
|
||||
} else if ("disable_coord".equals(currentFieldName) || "disableCoord".equals(currentFieldName)) {
|
||||
|
@ -123,7 +124,7 @@ public class CommonTermsQueryParser implements QueryParser {
|
|||
} else if ("and".equalsIgnoreCase(op)) {
|
||||
highFreqOccur = BooleanClause.Occur.MUST;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext,
|
||||
throw new ParsingException(parseContext,
|
||||
"[common] query requires operator to be either 'and' or 'or', not [" + op + "]");
|
||||
}
|
||||
} else if ("low_freq_operator".equals(currentFieldName) || "lowFreqOperator".equals(currentFieldName)) {
|
||||
|
@ -133,7 +134,7 @@ public class CommonTermsQueryParser implements QueryParser {
|
|||
} else if ("and".equalsIgnoreCase(op)) {
|
||||
lowFreqOccur = BooleanClause.Occur.MUST;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext,
|
||||
throw new ParsingException(parseContext,
|
||||
"[common] query requires operator to be either 'and' or 'or', not [" + op + "]");
|
||||
}
|
||||
} else if ("minimum_should_match".equals(currentFieldName) || "minimumShouldMatch".equals(currentFieldName)) {
|
||||
|
@ -143,7 +144,7 @@ public class CommonTermsQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[common] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[common] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -153,14 +154,14 @@ public class CommonTermsQueryParser implements QueryParser {
|
|||
// move to the next token
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.END_OBJECT) {
|
||||
throw new QueryParsingException(
|
||||
throw new ParsingException(
|
||||
parseContext,
|
||||
"[common] query parsed in simplified form, with direct field name, but included more options than just the field name, possibly use its 'options' form, with 'query' element?");
|
||||
}
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new QueryParsingException(parseContext, "No text specified for text query");
|
||||
throw new ParsingException(parseContext, "No text specified for text query");
|
||||
}
|
||||
String field;
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -46,7 +47,7 @@ public class ConstantScoreQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
Query filter = null;
|
||||
|
@ -65,18 +66,18 @@ public class ConstantScoreQueryParser implements QueryParser {
|
|||
filter = parseContext.parseInnerFilter();
|
||||
queryFound = true;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[constant_score] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[constant_score] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[constant_score] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[constant_score] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!queryFound) {
|
||||
throw new QueryParsingException(parseContext, "[constant_score] requires a 'filter' element");
|
||||
throw new ParsingException(parseContext, "[constant_score] requires a 'filter' element");
|
||||
}
|
||||
|
||||
if (filter == null) {
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.DisjunctionMaxQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -46,7 +47,7 @@ public class DisMaxQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
float boost = 1.0f;
|
||||
|
@ -69,7 +70,7 @@ public class DisMaxQueryParser implements QueryParser {
|
|||
queries.add(query);
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[dis_max] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[dis_max] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if ("queries".equals(currentFieldName)) {
|
||||
|
@ -82,7 +83,7 @@ public class DisMaxQueryParser implements QueryParser {
|
|||
token = parser.nextToken();
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[dis_max] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[dis_max] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
if ("boost".equals(currentFieldName)) {
|
||||
|
@ -92,13 +93,13 @@ public class DisMaxQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[dis_max] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[dis_max] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!queriesFound) {
|
||||
throw new QueryParsingException(parseContext, "[dis_max] requires 'queries' field");
|
||||
throw new ParsingException(parseContext, "[dis_max] requires 'queries' field");
|
||||
}
|
||||
|
||||
if (queries.isEmpty()) {
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.*;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -47,7 +48,7 @@ public class ExistsQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldPattern = null;
|
||||
|
@ -64,13 +65,13 @@ public class ExistsQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[exists] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[exists] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (fieldPattern == null) {
|
||||
throw new QueryParsingException(parseContext, "exists must be provided with a [field]");
|
||||
throw new ParsingException(parseContext, "exists must be provided with a [field]");
|
||||
}
|
||||
|
||||
return newFilter(parseContext, fieldPattern, queryName);
|
||||
|
|
|
@ -22,10 +22,10 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.spans.FieldMaskingSpanQuery;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -47,7 +47,7 @@ public class FieldMaskingSpanQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
float boost = 1.0f;
|
||||
|
@ -65,11 +65,11 @@ public class FieldMaskingSpanQueryParser implements QueryParser {
|
|||
if ("query".equals(currentFieldName)) {
|
||||
Query query = parseContext.parseInnerQuery();
|
||||
if (!(query instanceof SpanQuery)) {
|
||||
throw new QueryParsingException(parseContext, "[field_masking_span] query] must be of type span query");
|
||||
throw new ParsingException(parseContext, "[field_masking_span] query] must be of type span query");
|
||||
}
|
||||
inner = (SpanQuery) query;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[field_masking_span] query does not support ["
|
||||
throw new ParsingException(parseContext, "[field_masking_span] query does not support ["
|
||||
+ currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
|
@ -80,15 +80,15 @@ public class FieldMaskingSpanQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[field_masking_span] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[field_masking_span] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (inner == null) {
|
||||
throw new QueryParsingException(parseContext, "field_masking_span must have [query] span query clause");
|
||||
throw new ParsingException(parseContext, "field_masking_span must have [query] span query clause");
|
||||
}
|
||||
if (field == null) {
|
||||
throw new QueryParsingException(parseContext, "field_masking_span must have [field] set for it");
|
||||
throw new ParsingException(parseContext, "field_masking_span must have [field] set for it");
|
||||
}
|
||||
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(field);
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.search.FuzzyQuery;
|
|||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
|
@ -53,12 +54,12 @@ public class FuzzyQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new QueryParsingException(parseContext, "[fuzzy] query malformed, no field");
|
||||
throw new ParsingException(parseContext, "[fuzzy] query malformed, no field");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
|
||||
|
@ -99,7 +100,7 @@ public class FuzzyQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[fuzzy] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[fuzzy] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -111,7 +112,7 @@ public class FuzzyQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new QueryParsingException(parseContext, "No value specified for fuzzy query");
|
||||
throw new ParsingException(parseContext, "No value specified for fuzzy query");
|
||||
}
|
||||
|
||||
Query query = null;
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
|
@ -68,7 +69,7 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = null;
|
||||
|
@ -149,7 +150,7 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
|||
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
|
||||
ignoreMalformed = parser.booleanValue();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "failed to parse [{}] query. unexpected field [{}]", NAME, currentFieldName);
|
||||
throw new ParsingException(parseContext, "failed to parse [{}] query. unexpected field [{}]", NAME, currentFieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -160,16 +161,16 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
|||
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||
if (topLeft.lat() > 90.0 || topLeft.lat() < -90.0) {
|
||||
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", topLeft.lat(), NAME);
|
||||
throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", topLeft.lat(), NAME);
|
||||
}
|
||||
if (topLeft.lon() > 180.0 || topLeft.lon() < -180) {
|
||||
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", topLeft.lon(), NAME);
|
||||
throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", topLeft.lon(), NAME);
|
||||
}
|
||||
if (bottomRight.lat() > 90.0 || bottomRight.lat() < -90.0) {
|
||||
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", bottomRight.lat(), NAME);
|
||||
throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", bottomRight.lat(), NAME);
|
||||
}
|
||||
if (bottomRight.lon() > 180.0 || bottomRight.lon() < -180) {
|
||||
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", bottomRight.lon(), NAME);
|
||||
throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", bottomRight.lon(), NAME);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -187,10 +188,10 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
|||
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryParsingException(parseContext, "failed to parse [{}] query. could not find [{}] field [{}]", NAME, GeoPointFieldMapper.CONTENT_TYPE, fieldName);
|
||||
throw new ParsingException(parseContext, "failed to parse [{}] query. could not find [{}] field [{}]", NAME, GeoPointFieldMapper.CONTENT_TYPE, fieldName);
|
||||
}
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryParsingException(parseContext, "failed to parse [{}] query. field [{}] is expected to be of type [{}], but is of [{}] type instead", NAME, fieldName, GeoPointFieldMapper.CONTENT_TYPE, fieldType.typeName());
|
||||
throw new ParsingException(parseContext, "failed to parse [{}] query. field [{}] is expected to be of type [{}], but is of [{}] type instead", NAME, fieldName, GeoPointFieldMapper.CONTENT_TYPE, fieldType.typeName());
|
||||
}
|
||||
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||
|
||||
|
@ -201,7 +202,7 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
|||
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
|
||||
filter = new InMemoryGeoBoundingBoxQuery(topLeft, bottomRight, indexFieldData);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "failed to parse [{}] query. geo bounding box type [{}] is not supported. either [indexed] or [memory] are allowed", NAME, type);
|
||||
throw new ParsingException(parseContext, "failed to parse [{}] query. geo bounding box type [{}] is not supported. either [indexed] or [memory] are allowed", NAME, type);
|
||||
}
|
||||
|
||||
if (queryName != null) {
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
|
@ -56,7 +57,7 @@ public class GeoDistanceQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token;
|
||||
|
@ -96,7 +97,7 @@ public class GeoDistanceQueryParser implements QueryParser {
|
|||
} else if (currentName.equals(GeoPointFieldMapper.Names.GEOHASH)) {
|
||||
point.resetFromGeoHash(parser.text());
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[geo_distance] query does not support [" + currentFieldName
|
||||
throw new ParsingException(parseContext, "[geo_distance] query does not support [" + currentFieldName
|
||||
+ "]");
|
||||
}
|
||||
}
|
||||
|
@ -142,10 +143,10 @@ public class GeoDistanceQueryParser implements QueryParser {
|
|||
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
|
||||
throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
|
||||
}
|
||||
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
|
||||
throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -154,7 +155,7 @@ public class GeoDistanceQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
if (vDistance == null) {
|
||||
throw new QueryParsingException(parseContext, "geo_distance requires 'distance' to be specified");
|
||||
throw new ParsingException(parseContext, "geo_distance requires 'distance' to be specified");
|
||||
} else if (vDistance instanceof Number) {
|
||||
distance = DistanceUnit.DEFAULT.convert(((Number) vDistance).doubleValue(), unit);
|
||||
} else {
|
||||
|
@ -164,10 +165,10 @@ public class GeoDistanceQueryParser implements QueryParser {
|
|||
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||
throw new ParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
|
||||
throw new ParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
|
@ -56,7 +57,7 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token;
|
||||
|
@ -172,10 +173,10 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
|
|||
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
|
||||
throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
|
||||
}
|
||||
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
|
||||
throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -204,10 +205,10 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
|
|||
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||
throw new ParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
|
||||
throw new ParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
|
@ -62,7 +63,7 @@ public class GeoPolygonQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = null;
|
||||
|
@ -96,11 +97,11 @@ public class GeoPolygonQueryParser implements QueryParser {
|
|||
shell.add(shell.get(0));
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[geo_polygon] query does not support [" + currentFieldName
|
||||
throw new ParsingException(parseContext, "[geo_polygon] query does not support [" + currentFieldName
|
||||
+ "]");
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[geo_polygon] query does not support token type [" + token.name()
|
||||
throw new ParsingException(parseContext, "[geo_polygon] query does not support token type [" + token.name()
|
||||
+ "] under [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
@ -115,25 +116,25 @@ public class GeoPolygonQueryParser implements QueryParser {
|
|||
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
|
||||
ignoreMalformed = parser.booleanValue();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[geo_polygon] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[geo_polygon] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[geo_polygon] unexpected token type [" + token.name() + "]");
|
||||
throw new ParsingException(parseContext, "[geo_polygon] unexpected token type [" + token.name() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
if (shell.isEmpty()) {
|
||||
throw new QueryParsingException(parseContext, "no points defined for geo_polygon query");
|
||||
throw new ParsingException(parseContext, "no points defined for geo_polygon query");
|
||||
} else {
|
||||
if (shell.size() < 3) {
|
||||
throw new QueryParsingException(parseContext, "too few points defined for geo_polygon query");
|
||||
throw new ParsingException(parseContext, "too few points defined for geo_polygon query");
|
||||
}
|
||||
GeoPoint start = shell.get(0);
|
||||
if (!start.equals(shell.get(shell.size() - 1))) {
|
||||
shell.add(start);
|
||||
}
|
||||
if (shell.size() < 4) {
|
||||
throw new QueryParsingException(parseContext, "too few points defined for geo_polygon query");
|
||||
throw new ParsingException(parseContext, "too few points defined for geo_polygon query");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -141,10 +142,10 @@ public class GeoPolygonQueryParser implements QueryParser {
|
|||
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||
for (GeoPoint point : shell) {
|
||||
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
|
||||
throw new ParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
|
||||
}
|
||||
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
|
||||
throw new ParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -157,10 +158,10 @@ public class GeoPolygonQueryParser implements QueryParser {
|
|||
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||
throw new ParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||
}
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
|
||||
throw new ParsingException(parseContext, "field [" + fieldName + "] is not a geo_point field");
|
||||
}
|
||||
|
||||
IndexGeoPointFieldData indexFieldData = parseContext.getForField(fieldType);
|
||||
|
|
|
@ -26,12 +26,12 @@ import org.apache.lucene.spatial.query.SpatialArgs;
|
|||
import org.apache.lucene.spatial.query.SpatialOperation;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.geo.ShapeRelation;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
|
||||
import org.elasticsearch.index.search.shape.ShapeFetchService;
|
||||
|
@ -56,7 +56,7 @@ public class GeoShapeQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = null;
|
||||
|
@ -91,7 +91,7 @@ public class GeoShapeQueryParser implements QueryParser {
|
|||
} else if ("relation".equals(currentFieldName)) {
|
||||
shapeRelation = ShapeRelation.getRelationByName(parser.text());
|
||||
if (shapeRelation == null) {
|
||||
throw new QueryParsingException(parseContext, "Unknown shape operation [" + parser.text() + " ]");
|
||||
throw new ParsingException(parseContext, "Unknown shape operation [" + parser.text() + " ]");
|
||||
}
|
||||
} else if ("indexed_shape".equals(currentFieldName) || "indexedShape".equals(currentFieldName)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
|
@ -110,15 +110,15 @@ public class GeoShapeQueryParser implements QueryParser {
|
|||
}
|
||||
}
|
||||
if (id == null) {
|
||||
throw new QueryParsingException(parseContext, "ID for indexed shape not provided");
|
||||
throw new ParsingException(parseContext, "ID for indexed shape not provided");
|
||||
} else if (type == null) {
|
||||
throw new QueryParsingException(parseContext, "Type for indexed shape not provided");
|
||||
throw new ParsingException(parseContext, "Type for indexed shape not provided");
|
||||
}
|
||||
GetRequest getRequest = new GetRequest(index, type, id);
|
||||
getRequest.copyContextAndHeadersFrom(SearchContext.current());
|
||||
shape = fetchService.fetch(getRequest, shapePath);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[geo_shape] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[geo_shape] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -128,25 +128,25 @@ public class GeoShapeQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[geo_shape] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[geo_shape] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (shape == null) {
|
||||
throw new QueryParsingException(parseContext, "No Shape defined");
|
||||
throw new ParsingException(parseContext, "No Shape defined");
|
||||
} else if (shapeRelation == null) {
|
||||
throw new QueryParsingException(parseContext, "No Shape Relation defined");
|
||||
throw new ParsingException(parseContext, "No Shape Relation defined");
|
||||
}
|
||||
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryParsingException(parseContext, "Failed to find geo_shape field [" + fieldName + "]");
|
||||
throw new ParsingException(parseContext, "Failed to find geo_shape field [" + fieldName + "]");
|
||||
}
|
||||
|
||||
// TODO: This isn't the nicest way to check this
|
||||
if (!(fieldType instanceof GeoShapeFieldMapper.GeoShapeFieldType)) {
|
||||
throw new QueryParsingException(parseContext, "Field [" + fieldName + "] is not a geo_shape");
|
||||
throw new ParsingException(parseContext, "Field [" + fieldName + "] is not a geo_shape");
|
||||
}
|
||||
|
||||
GeoShapeFieldMapper.GeoShapeFieldType shapeFieldType = (GeoShapeFieldMapper.GeoShapeFieldType) fieldType;
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
|
@ -180,7 +181,7 @@ public class GeohashCellQuery {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = null;
|
||||
|
@ -232,21 +233,21 @@ public class GeohashCellQuery {
|
|||
}
|
||||
|
||||
if (geohash == null) {
|
||||
throw new QueryParsingException(parseContext, "failed to parse [{}] query. missing geohash value", NAME);
|
||||
throw new ParsingException(parseContext, "failed to parse [{}] query. missing geohash value", NAME);
|
||||
}
|
||||
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryParsingException(parseContext, "failed to parse [{}] query. missing [{}] field [{}]", NAME, GeoPointFieldMapper.CONTENT_TYPE, fieldName);
|
||||
throw new ParsingException(parseContext, "failed to parse [{}] query. missing [{}] field [{}]", NAME, GeoPointFieldMapper.CONTENT_TYPE, fieldName);
|
||||
}
|
||||
|
||||
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
|
||||
throw new QueryParsingException(parseContext, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName);
|
||||
throw new ParsingException(parseContext, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName);
|
||||
}
|
||||
|
||||
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
|
||||
if (!geoFieldType.isGeohashPrefixEnabled()) {
|
||||
throw new QueryParsingException(parseContext, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME, fieldName);
|
||||
throw new ParsingException(parseContext, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME, fieldName);
|
||||
}
|
||||
|
||||
if(levels > 0) {
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.join.JoinUtil;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -62,7 +63,7 @@ public class HasChildQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
boolean queryFound = false;
|
||||
|
@ -93,7 +94,7 @@ public class HasChildQueryParser implements QueryParser {
|
|||
} else if ("inner_hits".equals(currentFieldName)) {
|
||||
innerHits = innerHitsQueryParserHelper.parse(parseContext);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[has_child] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[has_child] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("type".equals(currentFieldName) || "child_type".equals(currentFieldName) || "childType".equals(currentFieldName)) {
|
||||
|
@ -109,15 +110,15 @@ public class HasChildQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[has_child] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[has_child] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!queryFound) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] requires 'query' field");
|
||||
throw new ParsingException(parseContext, "[has_child] requires 'query' field");
|
||||
}
|
||||
if (childType == null) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] requires 'type' field");
|
||||
throw new ParsingException(parseContext, "[has_child] requires 'type' field");
|
||||
}
|
||||
|
||||
Query innerQuery = iq.asQuery(childType);
|
||||
|
@ -129,11 +130,11 @@ public class HasChildQueryParser implements QueryParser {
|
|||
|
||||
DocumentMapper childDocMapper = parseContext.mapperService().documentMapper(childType);
|
||||
if (childDocMapper == null) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] No mapping for for type [" + childType + "]");
|
||||
throw new ParsingException(parseContext, "[has_child] No mapping for for type [" + childType + "]");
|
||||
}
|
||||
ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper();
|
||||
if (parentFieldMapper.active() == false) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] _parent field has no parent type configured");
|
||||
throw new ParsingException(parseContext, "[has_child] _parent field has no parent type configured");
|
||||
}
|
||||
|
||||
if (innerHits != null) {
|
||||
|
@ -146,12 +147,12 @@ public class HasChildQueryParser implements QueryParser {
|
|||
String parentType = parentFieldMapper.type();
|
||||
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
|
||||
if (parentDocMapper == null) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] Type [" + childType + "] points to a non existent parent type ["
|
||||
throw new ParsingException(parseContext, "[has_child] Type [" + childType + "] points to a non existent parent type ["
|
||||
+ parentType + "]");
|
||||
}
|
||||
|
||||
if (maxChildren > 0 && maxChildren < minChildren) {
|
||||
throw new QueryParsingException(parseContext, "[has_child] 'max_children' is less than 'min_children'");
|
||||
throw new ParsingException(parseContext, "[has_child] 'max_children' is less than 'min_children'");
|
||||
}
|
||||
|
||||
// wrap the query with type query
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.join.ScoreMode;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -57,7 +58,7 @@ public class HasParentQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
boolean queryFound = false;
|
||||
|
@ -84,7 +85,7 @@ public class HasParentQueryParser implements QueryParser {
|
|||
} else if ("inner_hits".equals(currentFieldName)) {
|
||||
innerHits = innerHitsQueryParserHelper.parse(parseContext);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[has_parent] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[has_parent] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("type".equals(currentFieldName) || "parent_type".equals(currentFieldName) || "parentType".equals(currentFieldName)) {
|
||||
|
@ -101,15 +102,15 @@ public class HasParentQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[has_parent] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[has_parent] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!queryFound) {
|
||||
throw new QueryParsingException(parseContext, "[has_parent] query requires 'query' field");
|
||||
throw new ParsingException(parseContext, "[has_parent] query requires 'query' field");
|
||||
}
|
||||
if (parentType == null) {
|
||||
throw new QueryParsingException(parseContext, "[has_parent] query requires 'parent_type' field");
|
||||
throw new ParsingException(parseContext, "[has_parent] query requires 'parent_type' field");
|
||||
}
|
||||
|
||||
Query innerQuery = iq.asQuery(parentType);
|
||||
|
@ -134,7 +135,7 @@ public class HasParentQueryParser implements QueryParser {
|
|||
static Query createParentQuery(Query innerQuery, String parentType, boolean score, QueryParseContext parseContext, InnerHitsSubSearchContext innerHits) throws IOException {
|
||||
DocumentMapper parentDocMapper = parseContext.mapperService().documentMapper(parentType);
|
||||
if (parentDocMapper == null) {
|
||||
throw new QueryParsingException(parseContext, "[has_parent] query configured 'parent_type' [" + parentType
|
||||
throw new ParsingException(parseContext, "[has_parent] query configured 'parent_type' [" + parentType
|
||||
+ "] is not a valid type");
|
||||
}
|
||||
|
||||
|
@ -160,7 +161,7 @@ public class HasParentQueryParser implements QueryParser {
|
|||
}
|
||||
}
|
||||
if (parentChildIndexFieldData == null) {
|
||||
throw new QueryParsingException(parseContext, "[has_parent] no _parent field configured");
|
||||
throw new ParsingException(parseContext, "[has_parent] no _parent field configured");
|
||||
}
|
||||
|
||||
Query parentTypeQuery = null;
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.queries.TermsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
|
@ -52,7 +53,7 @@ public class IdsQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
List<BytesRef> ids = new ArrayList<>();
|
||||
|
@ -73,11 +74,11 @@ public class IdsQueryParser implements QueryParser {
|
|||
(token == XContentParser.Token.VALUE_NUMBER)) {
|
||||
BytesRef value = parser.utf8BytesOrNull();
|
||||
if (value == null) {
|
||||
throw new QueryParsingException(parseContext, "No value specified for term filter");
|
||||
throw new ParsingException(parseContext, "No value specified for term filter");
|
||||
}
|
||||
ids.add(value);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "Illegal value for id, expecting a string or number, got: "
|
||||
throw new ParsingException(parseContext, "Illegal value for id, expecting a string or number, got: "
|
||||
+ token);
|
||||
}
|
||||
}
|
||||
|
@ -86,12 +87,12 @@ public class IdsQueryParser implements QueryParser {
|
|||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
String value = parser.textOrNull();
|
||||
if (value == null) {
|
||||
throw new QueryParsingException(parseContext, "No type specified for term filter");
|
||||
throw new ParsingException(parseContext, "No type specified for term filter");
|
||||
}
|
||||
types.add(value);
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[ids] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[ids] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("type".equals(currentFieldName) || "_type".equals(currentFieldName)) {
|
||||
|
@ -101,13 +102,13 @@ public class IdsQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[ids] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[ids] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!idsProvided) {
|
||||
throw new QueryParsingException(parseContext, "[ids] query, no ids values provided");
|
||||
throw new ParsingException(parseContext, "[ids] query, no ids values provided");
|
||||
}
|
||||
|
||||
if (ids.isEmpty()) {
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.util.CloseableThreadLocal;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -126,10 +127,10 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
BytesReference bytes = queryBuilder.buildAsBytes();
|
||||
parser = XContentFactory.xContent(bytes).createParser(bytes);
|
||||
return parse(cache.get(), parser);
|
||||
} catch (QueryParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new QueryParsingException(getParseContext(), "Failed to parse", e);
|
||||
throw new ParsingException(getParseContext(), "Failed to parse", e);
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
|
@ -146,10 +147,10 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
try {
|
||||
parser = XContentFactory.xContent(source, offset, length).createParser(source, offset, length);
|
||||
return parse(cache.get(), parser);
|
||||
} catch (QueryParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new QueryParsingException(getParseContext(), "Failed to parse", e);
|
||||
throw new ParsingException(getParseContext(), "Failed to parse", e);
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
|
@ -166,10 +167,10 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
try {
|
||||
parser = XContentFactory.xContent(source).createParser(source);
|
||||
return innerParse(context, parser);
|
||||
} catch (QueryParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new QueryParsingException(context, "Failed to parse", e);
|
||||
throw new ParsingException(context, "Failed to parse", e);
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
|
@ -177,15 +178,15 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
}
|
||||
}
|
||||
|
||||
public ParsedQuery parse(String source) throws QueryParsingException {
|
||||
public ParsedQuery parse(String source) throws ParsingException {
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
parser = XContentFactory.xContent(source).createParser(source);
|
||||
return innerParse(cache.get(), parser);
|
||||
} catch (QueryParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
throw e;
|
||||
} catch (Exception e) {
|
||||
throw new QueryParsingException(getParseContext(), "Failed to parse [" + source + "]", e);
|
||||
throw new ParsingException(getParseContext(), "Failed to parse [" + source + "]", e);
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
|
@ -201,7 +202,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
try {
|
||||
return innerParse(context, parser);
|
||||
} catch (IOException e) {
|
||||
throw new QueryParsingException(context, "Failed to parse", e);
|
||||
throw new ParsingException(context, "Failed to parse", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -276,23 +277,23 @@ public class IndexQueryParserService extends AbstractIndexComponent {
|
|||
XContentParser qSourceParser = XContentFactory.xContent(querySource).createParser(querySource);
|
||||
parsedQuery = parse(qSourceParser);
|
||||
} else {
|
||||
throw new QueryParsingException(getParseContext(), "request does not support [" + fieldName + "]");
|
||||
throw new ParsingException(getParseContext(), "request does not support [" + fieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (parsedQuery != null) {
|
||||
return parsedQuery;
|
||||
}
|
||||
} catch (QueryParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
throw e;
|
||||
} catch (Throwable e) {
|
||||
throw new QueryParsingException(getParseContext(), "Failed to parse", e);
|
||||
throw new ParsingException(getParseContext(), "Failed to parse", e);
|
||||
}
|
||||
|
||||
throw new QueryParsingException(getParseContext(), "Required query is missing");
|
||||
throw new ParsingException(getParseContext(), "Required query is missing");
|
||||
}
|
||||
|
||||
private ParsedQuery innerParse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
|
||||
private ParsedQuery innerParse(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException {
|
||||
parseContext.reset(parser);
|
||||
try {
|
||||
parseContext.parseFieldMatcher(parseFieldMatcher);
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.cluster.ClusterService;
|
|||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
|
@ -59,7 +60,7 @@ public class IndicesQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
Query noMatchQuery = null;
|
||||
|
@ -82,30 +83,30 @@ public class IndicesQueryParser implements QueryParser {
|
|||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NO_MATCH_QUERY)) {
|
||||
innerNoMatchQuery = new XContentStructure.InnerQuery(parseContext, (String[])null);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[indices] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[indices] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if ("indices".equals(currentFieldName)) {
|
||||
if (indicesFound) {
|
||||
throw new QueryParsingException(parseContext, "[indices] indices or index already specified");
|
||||
throw new ParsingException(parseContext, "[indices] indices or index already specified");
|
||||
}
|
||||
indicesFound = true;
|
||||
Collection<String> indices = new ArrayList<>();
|
||||
while (parser.nextToken() != XContentParser.Token.END_ARRAY) {
|
||||
String value = parser.textOrNull();
|
||||
if (value == null) {
|
||||
throw new QueryParsingException(parseContext, "[indices] no value specified for 'indices' entry");
|
||||
throw new ParsingException(parseContext, "[indices] no value specified for 'indices' entry");
|
||||
}
|
||||
indices.add(value);
|
||||
}
|
||||
currentIndexMatchesIndices = matchesIndices(parseContext.index().name(), indices.toArray(new String[indices.size()]));
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[indices] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[indices] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("index".equals(currentFieldName)) {
|
||||
if (indicesFound) {
|
||||
throw new QueryParsingException(parseContext, "[indices] indices or index already specified");
|
||||
throw new ParsingException(parseContext, "[indices] indices or index already specified");
|
||||
}
|
||||
indicesFound = true;
|
||||
currentIndexMatchesIndices = matchesIndices(parseContext.index().name(), parser.text());
|
||||
|
@ -119,15 +120,15 @@ public class IndicesQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[indices] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[indices] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!queryFound) {
|
||||
throw new QueryParsingException(parseContext, "[indices] requires 'query' element");
|
||||
throw new ParsingException(parseContext, "[indices] requires 'query' element");
|
||||
}
|
||||
if (!indicesFound) {
|
||||
throw new QueryParsingException(parseContext, "[indices] requires 'indices' or 'index' element");
|
||||
throw new ParsingException(parseContext, "[indices] requires 'indices' or 'index' element");
|
||||
}
|
||||
|
||||
Query chosenQuery;
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -45,7 +46,7 @@ public class MatchAllQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
float boost = 1.0f;
|
||||
|
@ -59,7 +60,7 @@ public class MatchAllQueryParser implements QueryParser {
|
|||
if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[match_all] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[match_all] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.queries.ExtendedCommonTermsQuery;
|
|||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
|
@ -51,7 +52,7 @@ public class MatchQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
MatchQuery.Type type = MatchQuery.Type.BOOLEAN;
|
||||
|
@ -65,7 +66,7 @@ public class MatchQueryParser implements QueryParser {
|
|||
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new QueryParsingException(parseContext, "[match] query malformed, no field");
|
||||
throw new ParsingException(parseContext, "[match] query malformed, no field");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
|
||||
|
@ -93,12 +94,12 @@ public class MatchQueryParser implements QueryParser {
|
|||
} else if ("phrase_prefix".equals(tStr) || "phrasePrefix".equals(currentFieldName)) {
|
||||
type = MatchQuery.Type.PHRASE_PREFIX;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[match] query does not support type " + tStr);
|
||||
throw new ParsingException(parseContext, "[match] query does not support type " + tStr);
|
||||
}
|
||||
} else if ("analyzer".equals(currentFieldName)) {
|
||||
String analyzer = parser.text();
|
||||
if (parseContext.analysisService().analyzer(analyzer) == null) {
|
||||
throw new QueryParsingException(parseContext, "[match] analyzer [" + parser.text() + "] not found");
|
||||
throw new ParsingException(parseContext, "[match] analyzer [" + parser.text() + "] not found");
|
||||
}
|
||||
matchQuery.setAnalyzer(analyzer);
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
|
@ -118,7 +119,7 @@ public class MatchQueryParser implements QueryParser {
|
|||
} else if ("and".equalsIgnoreCase(op)) {
|
||||
matchQuery.setOccur(BooleanClause.Occur.MUST);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "text query requires operator to be either 'and' or 'or', not ["
|
||||
throw new ParsingException(parseContext, "text query requires operator to be either 'and' or 'or', not ["
|
||||
+ op + "]");
|
||||
}
|
||||
} else if ("minimum_should_match".equals(currentFieldName) || "minimumShouldMatch".equals(currentFieldName)) {
|
||||
|
@ -138,12 +139,12 @@ public class MatchQueryParser implements QueryParser {
|
|||
} else if ("all".equalsIgnoreCase(zeroTermsDocs)) {
|
||||
matchQuery.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]");
|
||||
throw new ParsingException(parseContext, "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]");
|
||||
}
|
||||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[match] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[match] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -153,13 +154,13 @@ public class MatchQueryParser implements QueryParser {
|
|||
// move to the next token
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.END_OBJECT) {
|
||||
throw new QueryParsingException(parseContext,
|
||||
throw new ParsingException(parseContext,
|
||||
"[match] query parsed in simplified form, with direct field name, but included more options than just the field name, possibly use its 'options' form, with 'query' element?");
|
||||
}
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new QueryParsingException(parseContext, "No text specified for text query");
|
||||
throw new ParsingException(parseContext, "No text specified for text query");
|
||||
}
|
||||
|
||||
Query query = matchQuery.parse(type, fieldName, value);
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.search.BooleanQuery;
|
|||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -53,7 +54,7 @@ public class MissingQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldPattern = null;
|
||||
|
@ -76,13 +77,13 @@ public class MissingQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[missing] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[missing] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (fieldPattern == null) {
|
||||
throw new QueryParsingException(parseContext, "missing must be provided with a [field]");
|
||||
throw new ParsingException(parseContext, "missing must be provided with a [field]");
|
||||
}
|
||||
|
||||
return newFilter(parseContext, fieldPattern, existence, nullValue, queryName);
|
||||
|
@ -90,7 +91,7 @@ public class MissingQueryParser implements QueryParser {
|
|||
|
||||
public static Query newFilter(QueryParseContext parseContext, String fieldPattern, boolean existence, boolean nullValue, String queryName) {
|
||||
if (!existence && !nullValue) {
|
||||
throw new QueryParsingException(parseContext, "missing must have either existence, or null_value, or both set to true");
|
||||
throw new ParsingException(parseContext, "missing must have either existence, or null_value, or both set to true");
|
||||
}
|
||||
|
||||
final FieldNamesFieldMapper.FieldNamesFieldType fieldNamesFieldType = (FieldNamesFieldMapper.FieldNamesFieldType)parseContext.mapperService().fullName(FieldNamesFieldMapper.NAME);
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
|
||||
|
@ -96,7 +97,7 @@ public class MoreLikeThisQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
MoreLikeThisQuery mltQuery = new MoreLikeThisQuery();
|
||||
|
@ -157,7 +158,7 @@ public class MoreLikeThisQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[mlt] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[mlt] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, Field.FIELDS)) {
|
||||
|
@ -196,7 +197,7 @@ public class MoreLikeThisQueryParser implements QueryParser {
|
|||
}
|
||||
mltQuery.setStopWords(stopWords);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[mlt] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[mlt] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, Field.LIKE)) {
|
||||
|
@ -204,16 +205,16 @@ public class MoreLikeThisQueryParser implements QueryParser {
|
|||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, Field.UNLIKE)) {
|
||||
parseLikeField(parseContext, unlikeTexts, unlikeItems);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[mlt] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[mlt] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (likeTexts.isEmpty() && likeItems.isEmpty()) {
|
||||
throw new QueryParsingException(parseContext, "more_like_this requires 'like' to be specified");
|
||||
throw new ParsingException(parseContext, "more_like_this requires 'like' to be specified");
|
||||
}
|
||||
if (moreLikeFields != null && moreLikeFields.isEmpty()) {
|
||||
throw new QueryParsingException(parseContext, "more_like_this requires 'fields' to be non-empty");
|
||||
throw new ParsingException(parseContext, "more_like_this requires 'fields' to be non-empty");
|
||||
}
|
||||
|
||||
// set analyzer
|
||||
|
@ -322,7 +323,7 @@ public class MoreLikeThisQueryParser implements QueryParser {
|
|||
}
|
||||
if (item.type() == null) {
|
||||
if (parseContext.queryTypes().size() > 1) {
|
||||
throw new QueryParsingException(parseContext,
|
||||
throw new ParsingException(parseContext,
|
||||
"ambiguous type for item with id: " + item.id() + " and index: " + item.index());
|
||||
} else {
|
||||
item.type(parseContext.queryTypes().iterator().next());
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
|
@ -52,7 +53,7 @@ public class MultiMatchQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
Object value = null;
|
||||
|
@ -77,7 +78,7 @@ public class MultiMatchQueryParser implements QueryParser {
|
|||
} else if (token.isValue()) {
|
||||
extractFieldAndBoost(parseContext, parser, fieldNameWithBoosts);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[" + NAME + "] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[" + NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("query".equals(currentFieldName)) {
|
||||
|
@ -87,7 +88,7 @@ public class MultiMatchQueryParser implements QueryParser {
|
|||
} else if ("analyzer".equals(currentFieldName)) {
|
||||
String analyzer = parser.text();
|
||||
if (parseContext.analysisService().analyzer(analyzer) == null) {
|
||||
throw new QueryParsingException(parseContext, "[" + NAME + "] analyzer [" + parser.text() + "] not found");
|
||||
throw new ParsingException(parseContext, "[" + NAME + "] analyzer [" + parser.text() + "] not found");
|
||||
}
|
||||
multiMatchQuery.setAnalyzer(analyzer);
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
|
@ -107,7 +108,7 @@ public class MultiMatchQueryParser implements QueryParser {
|
|||
} else if ("and".equalsIgnoreCase(op)) {
|
||||
multiMatchQuery.setOccur(BooleanClause.Occur.MUST);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "text query requires operator to be either 'and' or 'or', not [" + op
|
||||
throw new ParsingException(parseContext, "text query requires operator to be either 'and' or 'or', not [" + op
|
||||
+ "]");
|
||||
}
|
||||
} else if ("minimum_should_match".equals(currentFieldName) || "minimumShouldMatch".equals(currentFieldName)) {
|
||||
|
@ -129,22 +130,22 @@ public class MultiMatchQueryParser implements QueryParser {
|
|||
} else if ("all".equalsIgnoreCase(zeroTermsDocs)) {
|
||||
multiMatchQuery.setZeroTermsQuery(MatchQuery.ZeroTermsQuery.ALL);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]");
|
||||
throw new ParsingException(parseContext, "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]");
|
||||
}
|
||||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[match] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[match] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new QueryParsingException(parseContext, "No text specified for multi_match query");
|
||||
throw new ParsingException(parseContext, "No text specified for multi_match query");
|
||||
}
|
||||
|
||||
if (fieldNameWithBoosts.isEmpty()) {
|
||||
throw new QueryParsingException(parseContext, "No fields specified for multi_match query");
|
||||
throw new ParsingException(parseContext, "No fields specified for multi_match query");
|
||||
}
|
||||
if (type == null) {
|
||||
type = MultiMatchQueryBuilder.Type.BEST_FIELDS;
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.join.ScoreMode;
|
|||
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -54,7 +55,7 @@ public class NestedQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
final ToBlockJoinQueryBuilder builder = new ToBlockJoinQueryBuilder(parseContext);
|
||||
|
||||
|
@ -75,7 +76,7 @@ public class NestedQueryParser implements QueryParser {
|
|||
} else if ("inner_hits".equals(currentFieldName)) {
|
||||
builder.setInnerHits(innerHitsQueryParserHelper.parse(parseContext));
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[nested] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[nested] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("path".equals(currentFieldName)) {
|
||||
|
@ -95,12 +96,12 @@ public class NestedQueryParser implements QueryParser {
|
|||
} else if ("none".equals(sScoreMode)) {
|
||||
scoreMode = ScoreMode.None;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "illegal score_mode for nested query [" + sScoreMode + "]");
|
||||
throw new ParsingException(parseContext, "illegal score_mode for nested query [" + sScoreMode + "]");
|
||||
}
|
||||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[nested] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[nested] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -146,7 +147,7 @@ public class NestedQueryParser implements QueryParser {
|
|||
innerQuery = null;
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[nested] requires either 'query' or 'filter' field");
|
||||
throw new ParsingException(parseContext, "[nested] requires either 'query' or 'filter' field");
|
||||
}
|
||||
|
||||
if (innerHits != null) {
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -45,7 +46,7 @@ public class NotQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
Query query = null;
|
||||
|
@ -72,13 +73,13 @@ public class NotQueryParser implements QueryParser {
|
|||
if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[not] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[not] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!queryFound) {
|
||||
throw new QueryParsingException(parseContext, "filter is required when using `not` query");
|
||||
throw new ParsingException(parseContext, "filter is required when using `not` query");
|
||||
}
|
||||
|
||||
if (query == null) {
|
||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.search.MultiTermQuery;
|
|||
import org.apache.lucene.search.PrefixQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -51,7 +52,7 @@ public class PrefixQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = parser.currentName();
|
||||
|
@ -82,7 +83,7 @@ public class PrefixQueryParser implements QueryParser {
|
|||
} else if ("rewrite".equals(currentFieldName)) {
|
||||
rewriteMethod = parser.textOrNull();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[regexp] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[regexp] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -97,7 +98,7 @@ public class PrefixQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new QueryParsingException(parseContext, "No value specified for prefix query");
|
||||
throw new ParsingException(parseContext, "No value specified for prefix query");
|
||||
}
|
||||
|
||||
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), rewriteMethod, null);
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -40,7 +41,7 @@ public class QueryFilterParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
return parseContext.parseInnerQuery();
|
||||
}
|
||||
}
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
@ -210,7 +211,7 @@ public class QueryParseContext {
|
|||
public void addInnerHits(String name, InnerHitsContext.BaseInnerHits context) {
|
||||
SearchContext sc = SearchContext.current();
|
||||
if (sc == null) {
|
||||
throw new QueryParsingException(this, "inner_hits unsupported");
|
||||
throw new ParsingException(this, "inner_hits unsupported");
|
||||
}
|
||||
|
||||
InnerHitsContext innerHitsContext;
|
||||
|
@ -224,13 +225,13 @@ public class QueryParseContext {
|
|||
}
|
||||
|
||||
@Nullable
|
||||
public Query parseInnerQuery() throws QueryParsingException, IOException {
|
||||
public Query parseInnerQuery() throws ParsingException, IOException {
|
||||
// move to START object
|
||||
XContentParser.Token token;
|
||||
if (parser.currentToken() != XContentParser.Token.START_OBJECT) {
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new QueryParsingException(this, "[_na] query malformed, must start with start_object");
|
||||
throw new ParsingException(this, "[_na] query malformed, must start with start_object");
|
||||
}
|
||||
}
|
||||
token = parser.nextToken();
|
||||
|
@ -239,18 +240,18 @@ public class QueryParseContext {
|
|||
return null;
|
||||
}
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new QueryParsingException(this, "[_na] query malformed, no field after start_object");
|
||||
throw new ParsingException(this, "[_na] query malformed, no field after start_object");
|
||||
}
|
||||
String queryName = parser.currentName();
|
||||
// move to the next START_OBJECT
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT && token != XContentParser.Token.START_ARRAY) {
|
||||
throw new QueryParsingException(this, "[_na] query malformed, no field after start_object");
|
||||
throw new ParsingException(this, "[_na] query malformed, no field after start_object");
|
||||
}
|
||||
|
||||
QueryParser queryParser = indexQueryParser.queryParser(queryName);
|
||||
if (queryParser == null) {
|
||||
throw new QueryParsingException(this, "No query registered for [" + queryName + "]");
|
||||
throw new ParsingException(this, "No query registered for [" + queryName + "]");
|
||||
}
|
||||
Query result = queryParser.parse(this);
|
||||
if (parser.currentToken() == XContentParser.Token.END_OBJECT || parser.currentToken() == XContentParser.Token.END_ARRAY) {
|
||||
|
@ -261,7 +262,7 @@ public class QueryParseContext {
|
|||
}
|
||||
|
||||
@Nullable
|
||||
public Query parseInnerFilter() throws QueryParsingException, IOException {
|
||||
public Query parseInnerFilter() throws ParsingException, IOException {
|
||||
final boolean originalIsFilter = isFilter;
|
||||
try {
|
||||
isFilter = true;
|
||||
|
@ -271,13 +272,13 @@ public class QueryParseContext {
|
|||
}
|
||||
}
|
||||
|
||||
public Query parseInnerFilter(String queryName) throws IOException, QueryParsingException {
|
||||
public Query parseInnerFilter(String queryName) throws IOException, ParsingException {
|
||||
final boolean originalIsFilter = isFilter;
|
||||
try {
|
||||
isFilter = true;
|
||||
QueryParser queryParser = indexQueryParser.queryParser(queryName);
|
||||
if (queryParser == null) {
|
||||
throw new QueryParsingException(this, "No query registered for [" + queryName + "]");
|
||||
throw new ParsingException(this, "No query registered for [" + queryName + "]");
|
||||
}
|
||||
return queryParser.parse(this);
|
||||
} finally {
|
||||
|
@ -336,7 +337,7 @@ public class QueryParseContext {
|
|||
} else {
|
||||
Version indexCreatedVersion = indexQueryParser.getIndexCreatedVersion();
|
||||
if (fieldMapping == null && indexCreatedVersion.onOrAfter(Version.V_1_4_0_Beta1)) {
|
||||
throw new QueryParsingException(this, "Strict field resolution and no field mapping can be found for the field with name ["
|
||||
throw new ParsingException(this, "Strict field resolution and no field mapping can be found for the field with name ["
|
||||
+ name + "]");
|
||||
} else {
|
||||
return fieldMapping;
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -41,5 +42,5 @@ public interface QueryParser {
|
|||
* Returns <tt>null</tt> if this query should be ignored in the context of the DSL.
|
||||
*/
|
||||
@Nullable
|
||||
Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException;
|
||||
Query parse(QueryParseContext parseContext) throws IOException, ParsingException;
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.queryparser.classic.QueryParserSettings;
|
|||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -66,7 +67,7 @@ public class QueryStringQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String queryName = null;
|
||||
|
@ -125,7 +126,7 @@ public class QueryStringQueryParser implements QueryParser {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[query_string] query does not support [" + currentFieldName
|
||||
throw new ParsingException(parseContext, "[query_string] query does not support [" + currentFieldName
|
||||
+ "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
|
@ -140,18 +141,18 @@ public class QueryStringQueryParser implements QueryParser {
|
|||
} else if ("and".equalsIgnoreCase(op)) {
|
||||
qpSettings.defaultOperator(org.apache.lucene.queryparser.classic.QueryParser.Operator.AND);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "Query default operator [" + op + "] is not allowed");
|
||||
throw new ParsingException(parseContext, "Query default operator [" + op + "] is not allowed");
|
||||
}
|
||||
} else if ("analyzer".equals(currentFieldName)) {
|
||||
NamedAnalyzer analyzer = parseContext.analysisService().analyzer(parser.text());
|
||||
if (analyzer == null) {
|
||||
throw new QueryParsingException(parseContext, "[query_string] analyzer [" + parser.text() + "] not found");
|
||||
throw new ParsingException(parseContext, "[query_string] analyzer [" + parser.text() + "] not found");
|
||||
}
|
||||
qpSettings.forcedAnalyzer(analyzer);
|
||||
} else if ("quote_analyzer".equals(currentFieldName) || "quoteAnalyzer".equals(currentFieldName)) {
|
||||
NamedAnalyzer analyzer = parseContext.analysisService().analyzer(parser.text());
|
||||
if (analyzer == null) {
|
||||
throw new QueryParsingException(parseContext, "[query_string] quote_analyzer [" + parser.text()
|
||||
throw new ParsingException(parseContext, "[query_string] quote_analyzer [" + parser.text()
|
||||
+ "] not found");
|
||||
}
|
||||
qpSettings.forcedQuoteAnalyzer(analyzer);
|
||||
|
@ -200,19 +201,19 @@ public class QueryStringQueryParser implements QueryParser {
|
|||
try {
|
||||
qpSettings.timeZone(DateTimeZone.forID(parser.text()));
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new QueryParsingException(parseContext,
|
||||
throw new ParsingException(parseContext,
|
||||
"[query_string] time_zone [" + parser.text() + "] is unknown");
|
||||
}
|
||||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[query_string] query does not support [" + currentFieldName
|
||||
throw new ParsingException(parseContext, "[query_string] query does not support [" + currentFieldName
|
||||
+ "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (qpSettings.queryString() == null) {
|
||||
throw new QueryParsingException(parseContext, "query_string must be provided with a [query]");
|
||||
throw new ParsingException(parseContext, "query_string must be provided with a [query]");
|
||||
}
|
||||
qpSettings.defaultAnalyzer(parseContext.mapperService().searchAnalyzer());
|
||||
qpSettings.defaultQuoteAnalyzer(parseContext.mapperService().searchQuoteAnalyzer());
|
||||
|
@ -240,7 +241,7 @@ public class QueryStringQueryParser implements QueryParser {
|
|||
}
|
||||
return query;
|
||||
} catch (org.apache.lucene.queryparser.classic.ParseException e) {
|
||||
throw new QueryParsingException(parseContext, "Failed to parse query [" + qpSettings.queryString() + "]", e);
|
||||
throw new ParsingException(parseContext, "Failed to parse query [" + qpSettings.queryString() + "]", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermRangeQuery;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.joda.DateMathParser;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
|
@ -52,7 +53,7 @@ public class RangeQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = null;
|
||||
|
@ -105,7 +106,7 @@ public class RangeQueryParser implements QueryParser {
|
|||
} else if ("format".equals(currentFieldName)) {
|
||||
forcedDateParser = new DateMathParser(Joda.forPattern(parser.text()));
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[range] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[range] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -115,7 +116,7 @@ public class RangeQueryParser implements QueryParser {
|
|||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FIELDDATA_FIELD)) {
|
||||
// ignore
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[range] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[range] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -127,7 +128,7 @@ public class RangeQueryParser implements QueryParser {
|
|||
query = ((DateFieldMapper.DateFieldType) mapper).rangeQuery(from, to, includeLower, includeUpper, timeZone, forcedDateParser);
|
||||
} else {
|
||||
if (timeZone != null) {
|
||||
throw new QueryParsingException(parseContext, "[range] time_zone can not be applied to non date field ["
|
||||
throw new ParsingException(parseContext, "[range] time_zone can not be applied to non date field ["
|
||||
+ fieldName + "]");
|
||||
}
|
||||
//LUCENE 4 UPGRADE Mapper#rangeQuery should use bytesref as well?
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.RegexpQuery;
|
||||
import org.apache.lucene.util.automaton.Operations;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -54,7 +55,7 @@ public class RegexpQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = parser.currentName();
|
||||
|
@ -94,7 +95,7 @@ public class RegexpQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[regexp] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[regexp] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -109,7 +110,7 @@ public class RegexpQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new QueryParsingException(parseContext, "No value specified for regexp query");
|
||||
throw new ParsingException(parseContext, "No value specified for regexp query");
|
||||
}
|
||||
|
||||
MultiTermQuery.RewriteMethod method = QueryParsers.parseRewriteMethod(parseContext.parseFieldMatcher(), rewriteMethod, null);
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.RandomAccessWeight;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.script.LeafSearchScript;
|
||||
|
@ -59,7 +60,7 @@ public class ScriptQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
|
||||
|
||||
|
@ -83,13 +84,13 @@ public class ScriptQueryParser implements QueryParser {
|
|||
} else if ("params".equals(currentFieldName)) { // TODO remove in 3.0 (here to support old script APIs)
|
||||
params = parser.map();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[script] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[script] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else if (!scriptParameterParser.token(currentFieldName, token, parser, parseContext.parseFieldMatcher())) {
|
||||
throw new QueryParsingException(parseContext, "[script] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[script] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -103,11 +104,11 @@ public class ScriptQueryParser implements QueryParser {
|
|||
script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params);
|
||||
}
|
||||
} else if (params != null) {
|
||||
throw new QueryParsingException(parseContext, "script params must be specified inside script object in a [script] filter");
|
||||
throw new ParsingException(parseContext, "script params must be specified inside script object in a [script] filter");
|
||||
}
|
||||
|
||||
if (script == null) {
|
||||
throw new QueryParsingException(parseContext, "script must be provided with a [script] filter");
|
||||
throw new ParsingException(parseContext, "script must be provided with a [script] filter");
|
||||
}
|
||||
|
||||
Query query = new ScriptQuery(script, parseContext.scriptService(), parseContext.lookup());
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.analysis.Analyzer;
|
|||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -83,7 +84,7 @@ public class SimpleQueryStringParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String currentFieldName = null;
|
||||
|
@ -138,7 +139,7 @@ public class SimpleQueryStringParser implements QueryParser {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[" + NAME + "] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[" + NAME + "] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("query".equals(currentFieldName)) {
|
||||
|
@ -148,7 +149,7 @@ public class SimpleQueryStringParser implements QueryParser {
|
|||
} else if ("analyzer".equals(currentFieldName)) {
|
||||
analyzer = parseContext.analysisService().analyzer(parser.text());
|
||||
if (analyzer == null) {
|
||||
throw new QueryParsingException(parseContext, "[" + NAME + "] analyzer [" + parser.text() + "] not found");
|
||||
throw new ParsingException(parseContext, "[" + NAME + "] analyzer [" + parser.text() + "] not found");
|
||||
}
|
||||
} else if ("default_operator".equals(currentFieldName) || "defaultOperator".equals(currentFieldName)) {
|
||||
String op = parser.text();
|
||||
|
@ -157,7 +158,7 @@ public class SimpleQueryStringParser implements QueryParser {
|
|||
} else if ("and".equalsIgnoreCase(op)) {
|
||||
defaultOperator = BooleanClause.Occur.MUST;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[" + NAME + "] default operator [" + op + "] is not allowed");
|
||||
throw new ParsingException(parseContext, "[" + NAME + "] default operator [" + op + "] is not allowed");
|
||||
}
|
||||
} else if ("flags".equals(currentFieldName)) {
|
||||
if (parser.currentToken() != XContentParser.Token.VALUE_NUMBER) {
|
||||
|
@ -185,14 +186,14 @@ public class SimpleQueryStringParser implements QueryParser {
|
|||
} else if ("minimum_should_match".equals(currentFieldName)) {
|
||||
minimumShouldMatch = parser.textOrNull();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[" + NAME + "] unsupported field [" + parser.currentName() + "]");
|
||||
throw new ParsingException(parseContext, "[" + NAME + "] unsupported field [" + parser.currentName() + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Query text is required
|
||||
if (queryBody == null) {
|
||||
throw new QueryParsingException(parseContext, "[" + NAME + "] query text missing");
|
||||
throw new ParsingException(parseContext, "[" + NAME + "] query text missing");
|
||||
}
|
||||
|
||||
// Use standard analyzer by default
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.spans.SpanContainingQuery;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -45,7 +46,7 @@ public class SpanContainingQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
float boost = 1.0f;
|
||||
|
@ -62,32 +63,32 @@ public class SpanContainingQueryParser implements QueryParser {
|
|||
if ("big".equals(currentFieldName)) {
|
||||
Query query = parseContext.parseInnerQuery();
|
||||
if (!(query instanceof SpanQuery)) {
|
||||
throw new QueryParsingException(parseContext, "span_containing [big] must be of type span query");
|
||||
throw new ParsingException(parseContext, "span_containing [big] must be of type span query");
|
||||
}
|
||||
big = (SpanQuery) query;
|
||||
} else if ("little".equals(currentFieldName)) {
|
||||
Query query = parseContext.parseInnerQuery();
|
||||
if (!(query instanceof SpanQuery)) {
|
||||
throw new QueryParsingException(parseContext, "span_containing [little] must be of type span query");
|
||||
throw new ParsingException(parseContext, "span_containing [little] must be of type span query");
|
||||
}
|
||||
little = (SpanQuery) query;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[span_containing] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[span_containing] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[span_containing] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[span_containing] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
if (big == null) {
|
||||
throw new QueryParsingException(parseContext, "span_containing must include [big]");
|
||||
throw new ParsingException(parseContext, "span_containing must include [big]");
|
||||
}
|
||||
if (little == null) {
|
||||
throw new QueryParsingException(parseContext, "span_containing must include [little]");
|
||||
throw new ParsingException(parseContext, "span_containing must include [little]");
|
||||
}
|
||||
|
||||
Query query = new SpanContainingQuery(big, little);
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.spans.SpanFirstQuery;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -45,7 +46,7 @@ public class SpanFirstQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
float boost = 1.0f;
|
||||
|
@ -63,11 +64,11 @@ public class SpanFirstQueryParser implements QueryParser {
|
|||
if ("match".equals(currentFieldName)) {
|
||||
Query query = parseContext.parseInnerQuery();
|
||||
if (!(query instanceof SpanQuery)) {
|
||||
throw new QueryParsingException(parseContext, "spanFirst [match] must be of type span query");
|
||||
throw new ParsingException(parseContext, "spanFirst [match] must be of type span query");
|
||||
}
|
||||
match = (SpanQuery) query;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[span_first] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[span_first] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
if ("boost".equals(currentFieldName)) {
|
||||
|
@ -77,15 +78,15 @@ public class SpanFirstQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[span_first] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[span_first] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (match == null) {
|
||||
throw new QueryParsingException(parseContext, "spanFirst must have [match] span query clause");
|
||||
throw new ParsingException(parseContext, "spanFirst must have [match] span query clause");
|
||||
}
|
||||
if (end == -1) {
|
||||
throw new QueryParsingException(parseContext, "spanFirst must have [end] set for it");
|
||||
throw new ParsingException(parseContext, "spanFirst must have [end] set for it");
|
||||
}
|
||||
|
||||
SpanFirstQuery query = new SpanFirstQuery(match, end);
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.MultiTermQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.spans.SpanMultiTermQueryWrapper;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -46,22 +47,22 @@ public class SpanMultiTermQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
Token token = parser.nextToken();
|
||||
if (!MATCH_NAME.equals(parser.currentName()) || token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new QueryParsingException(parseContext, "spanMultiTerm must have [" + MATCH_NAME + "] multi term query clause");
|
||||
throw new ParsingException(parseContext, "spanMultiTerm must have [" + MATCH_NAME + "] multi term query clause");
|
||||
}
|
||||
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new QueryParsingException(parseContext, "spanMultiTerm must have [" + MATCH_NAME + "] multi term query clause");
|
||||
throw new ParsingException(parseContext, "spanMultiTerm must have [" + MATCH_NAME + "] multi term query clause");
|
||||
}
|
||||
|
||||
Query subQuery = parseContext.parseInnerQuery();
|
||||
if (!(subQuery instanceof MultiTermQuery)) {
|
||||
throw new QueryParsingException(parseContext, "spanMultiTerm [" + MATCH_NAME + "] must be of type multi term query");
|
||||
throw new ParsingException(parseContext, "spanMultiTerm [" + MATCH_NAME + "] must be of type multi term query");
|
||||
}
|
||||
|
||||
parser.nextToken();
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.spans.SpanNearQuery;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -47,7 +48,7 @@ public class SpanNearQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
float boost = 1.0f;
|
||||
|
@ -68,12 +69,12 @@ public class SpanNearQueryParser implements QueryParser {
|
|||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
Query query = parseContext.parseInnerQuery();
|
||||
if (!(query instanceof SpanQuery)) {
|
||||
throw new QueryParsingException(parseContext, "spanNear [clauses] must be of type span query");
|
||||
throw new ParsingException(parseContext, "spanNear [clauses] must be of type span query");
|
||||
}
|
||||
clauses.add((SpanQuery) query);
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[span_near] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[span_near] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("in_order".equals(currentFieldName) || "inOrder".equals(currentFieldName)) {
|
||||
|
@ -87,17 +88,17 @@ public class SpanNearQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[span_near] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[span_near] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[span_near] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[span_near] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
if (clauses.isEmpty()) {
|
||||
throw new QueryParsingException(parseContext, "span_near must include [clauses]");
|
||||
throw new ParsingException(parseContext, "span_near must include [clauses]");
|
||||
}
|
||||
if (slop == null) {
|
||||
throw new QueryParsingException(parseContext, "span_near must include [slop]");
|
||||
throw new ParsingException(parseContext, "span_near must include [slop]");
|
||||
}
|
||||
|
||||
SpanNearQuery query = new SpanNearQuery(clauses.toArray(new SpanQuery[clauses.size()]), slop.intValue(), inOrder, collectPayloads);
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.spans.SpanNotQuery;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -45,7 +46,7 @@ public class SpanNotQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
float boost = 1.0f;
|
||||
|
@ -68,17 +69,17 @@ public class SpanNotQueryParser implements QueryParser {
|
|||
if ("include".equals(currentFieldName)) {
|
||||
Query query = parseContext.parseInnerQuery();
|
||||
if (!(query instanceof SpanQuery)) {
|
||||
throw new QueryParsingException(parseContext, "spanNot [include] must be of type span query");
|
||||
throw new ParsingException(parseContext, "spanNot [include] must be of type span query");
|
||||
}
|
||||
include = (SpanQuery) query;
|
||||
} else if ("exclude".equals(currentFieldName)) {
|
||||
Query query = parseContext.parseInnerQuery();
|
||||
if (!(query instanceof SpanQuery)) {
|
||||
throw new QueryParsingException(parseContext, "spanNot [exclude] must be of type span query");
|
||||
throw new ParsingException(parseContext, "spanNot [exclude] must be of type span query");
|
||||
}
|
||||
exclude = (SpanQuery) query;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[span_not] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[span_not] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
if ("dist".equals(currentFieldName)) {
|
||||
|
@ -92,18 +93,18 @@ public class SpanNotQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[span_not] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[span_not] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (include == null) {
|
||||
throw new QueryParsingException(parseContext, "spanNot must have [include] span query clause");
|
||||
throw new ParsingException(parseContext, "spanNot must have [include] span query clause");
|
||||
}
|
||||
if (exclude == null) {
|
||||
throw new QueryParsingException(parseContext, "spanNot must have [exclude] span query clause");
|
||||
throw new ParsingException(parseContext, "spanNot must have [exclude] span query clause");
|
||||
}
|
||||
if (dist != null && (pre != null || post != null)) {
|
||||
throw new QueryParsingException(parseContext, "spanNot can either use [dist] or [pre] & [post] (or none)");
|
||||
throw new ParsingException(parseContext, "spanNot can either use [dist] or [pre] & [post] (or none)");
|
||||
}
|
||||
|
||||
// set appropriate defaults
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.spans.SpanOrQuery;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -47,7 +48,7 @@ public class SpanOrQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
float boost = 1.0f;
|
||||
|
@ -65,12 +66,12 @@ public class SpanOrQueryParser implements QueryParser {
|
|||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
Query query = parseContext.parseInnerQuery();
|
||||
if (!(query instanceof SpanQuery)) {
|
||||
throw new QueryParsingException(parseContext, "spanOr [clauses] must be of type span query");
|
||||
throw new ParsingException(parseContext, "spanOr [clauses] must be of type span query");
|
||||
}
|
||||
clauses.add((SpanQuery) query);
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[span_or] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[span_or] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
if ("boost".equals(currentFieldName)) {
|
||||
|
@ -78,12 +79,12 @@ public class SpanOrQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[span_or] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[span_or] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (clauses.isEmpty()) {
|
||||
throw new QueryParsingException(parseContext, "spanOr must include [clauses]");
|
||||
throw new ParsingException(parseContext, "spanOr must include [clauses]");
|
||||
}
|
||||
|
||||
SpanOrQuery query = new SpanOrQuery(clauses.toArray(new SpanQuery[clauses.size()]));
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -47,7 +48,7 @@ public class SpanTermQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
|
@ -77,7 +78,7 @@ public class SpanTermQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[span_term] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[span_term] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -89,7 +90,7 @@ public class SpanTermQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new QueryParsingException(parseContext, "No value specified for term query");
|
||||
throw new ParsingException(parseContext, "No value specified for term query");
|
||||
}
|
||||
|
||||
BytesRef valueBytes = null;
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.spans.SpanQuery;
|
||||
import org.apache.lucene.search.spans.SpanWithinQuery;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -45,7 +46,7 @@ public class SpanWithinQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
float boost = 1.0f;
|
||||
|
@ -62,32 +63,32 @@ public class SpanWithinQueryParser implements QueryParser {
|
|||
if ("big".equals(currentFieldName)) {
|
||||
Query query = parseContext.parseInnerQuery();
|
||||
if (query instanceof SpanQuery == false) {
|
||||
throw new QueryParsingException(parseContext, "span_within [big] must be of type span query");
|
||||
throw new ParsingException(parseContext, "span_within [big] must be of type span query");
|
||||
}
|
||||
big = (SpanQuery) query;
|
||||
} else if ("little".equals(currentFieldName)) {
|
||||
Query query = parseContext.parseInnerQuery();
|
||||
if (query instanceof SpanQuery == false) {
|
||||
throw new QueryParsingException(parseContext, "span_within [little] must be of type span query");
|
||||
throw new ParsingException(parseContext, "span_within [little] must be of type span query");
|
||||
}
|
||||
little = (SpanQuery) query;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[span_within] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[span_within] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[span_within] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[span_within] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
if (big == null) {
|
||||
throw new QueryParsingException(parseContext, "span_within must include [big]");
|
||||
throw new ParsingException(parseContext, "span_within must include [big]");
|
||||
}
|
||||
if (little == null) {
|
||||
throw new QueryParsingException(parseContext, "span_within must include [little]");
|
||||
throw new ParsingException(parseContext, "span_within must include [little]");
|
||||
}
|
||||
|
||||
Query query = new SpanWithinQuery(big, little);
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -50,7 +51,7 @@ public class TermQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String queryName = null;
|
||||
|
@ -67,7 +68,7 @@ public class TermQueryParser implements QueryParser {
|
|||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
// also support a format of "term" : {"field_name" : { ... }}
|
||||
if (fieldName != null) {
|
||||
throw new QueryParsingException(parseContext, "[term] query does not support different field names, use [bool] query instead");
|
||||
throw new ParsingException(parseContext, "[term] query does not support different field names, use [bool] query instead");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
|
@ -83,7 +84,7 @@ public class TermQueryParser implements QueryParser {
|
|||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[term] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[term] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -94,18 +95,18 @@ public class TermQueryParser implements QueryParser {
|
|||
boost = parser.floatValue();
|
||||
} else {
|
||||
if (fieldName != null) {
|
||||
throw new QueryParsingException(parseContext, "[term] query does not support different field names, use [bool] query instead");
|
||||
throw new ParsingException(parseContext, "[term] query does not support different field names, use [bool] query instead");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
value = parser.objectBytes();
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
throw new QueryParsingException(parseContext, "[term] query does not support array of values");
|
||||
throw new ParsingException(parseContext, "[term] query does not support array of values");
|
||||
}
|
||||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new QueryParsingException(parseContext, "No value specified for term query");
|
||||
throw new ParsingException(parseContext, "No value specified for term query");
|
||||
}
|
||||
|
||||
Query query = null;
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.elasticsearch.action.get.GetRequest;
|
|||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
|
@ -69,7 +70,7 @@ public class TermsQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String queryName = null;
|
||||
|
@ -95,14 +96,14 @@ public class TermsQueryParser implements QueryParser {
|
|||
// skip
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (fieldName != null) {
|
||||
throw new QueryParsingException(parseContext, "[terms] query does not support multiple fields");
|
||||
throw new ParsingException(parseContext, "[terms] query does not support multiple fields");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
Object value = parser.objectBytes();
|
||||
if (value == null) {
|
||||
throw new QueryParsingException(parseContext, "No value specified for terms query");
|
||||
throw new ParsingException(parseContext, "No value specified for terms query");
|
||||
}
|
||||
terms.add(value);
|
||||
}
|
||||
|
@ -123,19 +124,19 @@ public class TermsQueryParser implements QueryParser {
|
|||
} else if ("routing".equals(currentFieldName)) {
|
||||
lookupRouting = parser.textOrNull();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[terms] query does not support [" + currentFieldName
|
||||
throw new ParsingException(parseContext, "[terms] query does not support [" + currentFieldName
|
||||
+ "] within lookup element");
|
||||
}
|
||||
}
|
||||
}
|
||||
if (lookupType == null) {
|
||||
throw new QueryParsingException(parseContext, "[terms] query lookup element requires specifying the type");
|
||||
throw new ParsingException(parseContext, "[terms] query lookup element requires specifying the type");
|
||||
}
|
||||
if (lookupId == null) {
|
||||
throw new QueryParsingException(parseContext, "[terms] query lookup element requires specifying the id");
|
||||
throw new ParsingException(parseContext, "[terms] query lookup element requires specifying the id");
|
||||
}
|
||||
if (lookupPath == null) {
|
||||
throw new QueryParsingException(parseContext, "[terms] query lookup element requires specifying the path");
|
||||
throw new ParsingException(parseContext, "[terms] query lookup element requires specifying the path");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, EXECUTION_FIELD)) {
|
||||
|
@ -152,13 +153,13 @@ public class TermsQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[terms] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[terms] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (fieldName == null) {
|
||||
throw new QueryParsingException(parseContext, "terms query requires a field name, followed by array of terms");
|
||||
throw new ParsingException(parseContext, "terms query requires a field name, followed by array of terms");
|
||||
}
|
||||
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
|
@ -44,20 +45,20 @@ public class TypeQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new QueryParsingException(parseContext, "[type] filter should have a value field, and the type name");
|
||||
throw new ParsingException(parseContext, "[type] filter should have a value field, and the type name");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
if (!fieldName.equals("value")) {
|
||||
throw new QueryParsingException(parseContext, "[type] filter should have a value field, and the type name");
|
||||
throw new ParsingException(parseContext, "[type] filter should have a value field, and the type name");
|
||||
}
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.VALUE_STRING) {
|
||||
throw new QueryParsingException(parseContext, "[type] filter should have a value field, and the type name");
|
||||
throw new ParsingException(parseContext, "[type] filter should have a value field, and the type name");
|
||||
}
|
||||
BytesRef type = parser.utf8Bytes();
|
||||
// move to the next token
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.WildcardQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
@ -47,12 +48,12 @@ public class WildcardQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new QueryParsingException(parseContext, "[wildcard] query malformed, no field");
|
||||
throw new ParsingException(parseContext, "[wildcard] query malformed, no field");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
String rewriteMethod = null;
|
||||
|
@ -78,7 +79,7 @@ public class WildcardQueryParser implements QueryParser {
|
|||
} else if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "[wildcard] query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, "[wildcard] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -89,7 +90,7 @@ public class WildcardQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
if (value == null) {
|
||||
throw new QueryParsingException(parseContext, "No value specified for prefix query");
|
||||
throw new ParsingException(parseContext, "No value specified for prefix query");
|
||||
}
|
||||
|
||||
BytesRef valueBytes;
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -43,16 +44,16 @@ public class WrapperQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.FIELD_NAME) {
|
||||
throw new QueryParsingException(parseContext, "[wrapper] query malformed");
|
||||
throw new ParsingException(parseContext, "[wrapper] query malformed");
|
||||
}
|
||||
String fieldName = parser.currentName();
|
||||
if (!fieldName.equals("query")) {
|
||||
throw new QueryParsingException(parseContext, "[wrapper] query malformed");
|
||||
throw new ParsingException(parseContext, "[wrapper] query malformed");
|
||||
}
|
||||
parser.nextToken();
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ import org.elasticsearch.index.mapper.core.DateFieldMapper;
|
|||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
@ -118,7 +118,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
*
|
||||
* */
|
||||
@Override
|
||||
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
|
||||
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException {
|
||||
String currentFieldName;
|
||||
XContentParser.Token token;
|
||||
AbstractDistanceScoreFunction scoreFunction;
|
||||
|
@ -152,7 +152,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
// the doc later
|
||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||
if (fieldType == null) {
|
||||
throw new QueryParsingException(parseContext, "unknown field [{}]", fieldName);
|
||||
throw new ParsingException(parseContext, "unknown field [{}]", fieldName);
|
||||
}
|
||||
|
||||
// dates and time need special handling
|
||||
|
@ -164,7 +164,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
} else if (fieldType instanceof NumberFieldMapper.NumberFieldType) {
|
||||
return parseNumberVariable(fieldName, parser, parseContext, (NumberFieldMapper.NumberFieldType) fieldType, mode);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "field [{}] is of type [{}], but only numeric types are supported.", fieldName, fieldType);
|
||||
throw new ParsingException(parseContext, "field [{}] is of type [{}], but only numeric types are supported.", fieldName, fieldType);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ import org.elasticsearch.common.lucene.search.function.*;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -76,7 +76,7 @@ public class FunctionScoreQueryParser implements QueryParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
Query query = null;
|
||||
|
@ -195,7 +195,7 @@ public class FunctionScoreQueryParser implements QueryParser {
|
|||
ScoreFunction scoreFunction = null;
|
||||
Float functionWeight = null;
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new QueryParsingException(parseContext, "failed to parse [{}]. malformed query, expected a [{}] while parsing functions but got a [{}] instead", XContentParser.Token.START_OBJECT, token, NAME);
|
||||
throw new ParsingException(parseContext, "failed to parse [{}]. malformed query, expected a [{}] while parsing functions but got a [{}] instead", XContentParser.Token.START_OBJECT, token, NAME);
|
||||
} else {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
|
@ -245,7 +245,7 @@ public class FunctionScoreQueryParser implements QueryParser {
|
|||
} else if ("first".equals(scoreMode)) {
|
||||
return FiltersFunctionScoreQuery.ScoreMode.First;
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "failed to parse [{}] query. illegal score_mode [{}]", NAME, scoreMode);
|
||||
throw new ParsingException(parseContext, "failed to parse [{}] query. illegal score_mode [{}]", NAME, scoreMode);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -253,7 +253,7 @@ public class FunctionScoreQueryParser implements QueryParser {
|
|||
String boostMode = parser.text();
|
||||
CombineFunction cf = combineFunctionsMap.get(boostMode);
|
||||
if (cf == null) {
|
||||
throw new QueryParsingException(parseContext, "failed to parse [{}] query. illegal boost_mode [{}]", NAME, boostMode);
|
||||
throw new ParsingException(parseContext, "failed to parse [{}] query. illegal boost_mode [{}]", NAME, boostMode);
|
||||
}
|
||||
return cf;
|
||||
}
|
||||
|
|
|
@ -22,13 +22,13 @@ package org.elasticsearch.index.query.functionscore;
|
|||
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public interface ScoreFunctionParser {
|
||||
|
||||
ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException;
|
||||
ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException;
|
||||
|
||||
/**
|
||||
* Returns the name of the function, for example "linear", "gauss" etc. This
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.index.query.functionscore;
|
|||
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.index.query.functionscore.exp.ExponentialDecayFunctionParser;
|
||||
import org.elasticsearch.index.query.functionscore.fieldvaluefactor.FieldValueFactorFunctionParser;
|
||||
import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser;
|
||||
|
@ -57,7 +57,7 @@ public class ScoreFunctionParserMapper {
|
|||
public ScoreFunctionParser get(QueryParseContext parseContext, String parserName) {
|
||||
ScoreFunctionParser functionParser = get(parserName);
|
||||
if (functionParser == null) {
|
||||
throw new QueryParsingException(parseContext, "No function with the name [" + parserName + "] is registered.");
|
||||
throw new ParsingException(parseContext, "No function with the name [" + parserName + "] is registered.");
|
||||
}
|
||||
return functionParser;
|
||||
}
|
||||
|
|
|
@ -19,17 +19,14 @@
|
|||
|
||||
package org.elasticsearch.index.query.functionscore.fieldvaluefactor;
|
||||
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction;
|
||||
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.DoubleArrayIndexFieldData;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
|
@ -54,7 +51,7 @@ public class FieldValueFactorFunctionParser implements ScoreFunctionParser {
|
|||
public static String[] NAMES = { "field_value_factor", "fieldValueFactor" };
|
||||
|
||||
@Override
|
||||
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
|
||||
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException {
|
||||
|
||||
String currentFieldName = null;
|
||||
String field = null;
|
||||
|
@ -75,15 +72,15 @@ public class FieldValueFactorFunctionParser implements ScoreFunctionParser {
|
|||
} else if ("missing".equals(currentFieldName)) {
|
||||
missing = parser.doubleValue();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if("factor".equals(currentFieldName) && (token == XContentParser.Token.START_ARRAY || token == XContentParser.Token.START_OBJECT)) {
|
||||
throw new QueryParsingException(parseContext, "[" + NAMES[0] + "] field 'factor' does not support lists or objects");
|
||||
throw new ParsingException(parseContext, "[" + NAMES[0] + "] field 'factor' does not support lists or objects");
|
||||
}
|
||||
}
|
||||
|
||||
if (field == null) {
|
||||
throw new QueryParsingException(parseContext, "[" + NAMES[0] + "] required field 'field' missing");
|
||||
throw new ParsingException(parseContext, "[" + NAMES[0] + "] required field 'field' missing");
|
||||
}
|
||||
|
||||
SearchContext searchContext = SearchContext.current();
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -49,7 +49,7 @@ public class RandomScoreFunctionParser implements ScoreFunctionParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
|
||||
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException {
|
||||
|
||||
int seed = -1;
|
||||
|
||||
|
@ -66,17 +66,17 @@ public class RandomScoreFunctionParser implements ScoreFunctionParser {
|
|||
} else if (parser.numberType() == XContentParser.NumberType.LONG) {
|
||||
seed = hash(parser.longValue());
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "random_score seed must be an int, long or string, not '"
|
||||
throw new ParsingException(parseContext, "random_score seed must be an int, long or string, not '"
|
||||
+ token.toString() + "'");
|
||||
}
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
seed = parser.text().hashCode();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, "random_score seed must be an int/long or string, not '"
|
||||
throw new ParsingException(parseContext, "random_score seed must be an int/long or string, not '"
|
||||
+ token.toString() + "'");
|
||||
}
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ import org.elasticsearch.common.lucene.search.function.ScoreFunction;
|
|||
import org.elasticsearch.common.lucene.search.function.ScriptScoreFunction;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionParser;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.Script.ScriptField;
|
||||
|
@ -56,7 +56,7 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser {
|
|||
}
|
||||
|
||||
@Override
|
||||
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
|
||||
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, ParsingException {
|
||||
ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
|
||||
Script script = null;
|
||||
Map<String, Object> vars = null;
|
||||
|
@ -71,11 +71,11 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser {
|
|||
} else if ("params".equals(currentFieldName)) { // TODO remove in 3.0 (here to support old script APIs)
|
||||
vars = parser.map();
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (!scriptParameterParser.token(currentFieldName, token, parser, parseContext.parseFieldMatcher())) {
|
||||
throw new QueryParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]");
|
||||
throw new ParsingException(parseContext, NAMES[0] + " query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -89,11 +89,11 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser {
|
|||
script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), vars);
|
||||
}
|
||||
} else if (vars != null) {
|
||||
throw new QueryParsingException(parseContext, "script params must be specified inside script object");
|
||||
throw new ParsingException(parseContext, "script params must be specified inside script object");
|
||||
}
|
||||
|
||||
if (script == null) {
|
||||
throw new QueryParsingException(parseContext, NAMES[0] + " requires 'script' field");
|
||||
throw new ParsingException(parseContext, NAMES[0] + " requires 'script' field");
|
||||
}
|
||||
|
||||
SearchScript searchScript;
|
||||
|
@ -101,7 +101,7 @@ public class ScriptScoreFunctionParser implements ScoreFunctionParser {
|
|||
searchScript = parseContext.scriptService().search(parseContext.lookup(), script, ScriptContext.Standard.SEARCH);
|
||||
return new ScriptScoreFunction(script, searchScript);
|
||||
} catch (Exception e) {
|
||||
throw new QueryParsingException(parseContext, NAMES[0] + " the script could not be loaded", e);
|
||||
throw new ParsingException(parseContext, NAMES[0] + " the script could not be loaded", e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.index.query.support;
|
|||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.search.fetch.fielddata.FieldDataFieldsParseElement;
|
||||
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
|
||||
import org.elasticsearch.search.fetch.script.ScriptFieldsParseElement;
|
||||
|
@ -51,7 +51,7 @@ public class InnerHitsQueryParserHelper {
|
|||
this.fieldDataFieldsParseElement = fieldDataFieldsParseElement;
|
||||
}
|
||||
|
||||
public InnerHitsSubSearchContext parse(QueryParseContext parserContext) throws IOException, QueryParsingException {
|
||||
public InnerHitsSubSearchContext parse(QueryParseContext parserContext) throws IOException, ParsingException {
|
||||
String fieldName = null;
|
||||
XContentParser.Token token;
|
||||
String innerHitName = null;
|
||||
|
@ -72,7 +72,7 @@ public class InnerHitsQueryParserHelper {
|
|||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new QueryParsingException(parserContext, "Failed to parse [_inner_hits]", e);
|
||||
throw new ParsingException(parserContext, "Failed to parse [_inner_hits]", e);
|
||||
}
|
||||
return new InnerHitsSubSearchContext(innerHitName, subSearchContext);
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentHelper;
|
|||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -103,10 +103,10 @@ public class NestedInnerQueryParseSupport {
|
|||
return innerQuery;
|
||||
} else {
|
||||
if (path == null) {
|
||||
throw new QueryParsingException(parseContext, "[nested] requires 'path' field");
|
||||
throw new ParsingException(parseContext, "[nested] requires 'path' field");
|
||||
}
|
||||
if (!queryFound) {
|
||||
throw new QueryParsingException(parseContext, "[nested] requires either 'query' or 'filter' field");
|
||||
throw new ParsingException(parseContext, "[nested] requires either 'query' or 'filter' field");
|
||||
}
|
||||
|
||||
XContentParser old = parseContext.parser();
|
||||
|
@ -132,10 +132,10 @@ public class NestedInnerQueryParseSupport {
|
|||
return innerFilter;
|
||||
} else {
|
||||
if (path == null) {
|
||||
throw new QueryParsingException(parseContext, "[nested] requires 'path' field");
|
||||
throw new ParsingException(parseContext, "[nested] requires 'path' field");
|
||||
}
|
||||
if (!filterFound) {
|
||||
throw new QueryParsingException(parseContext, "[nested] requires either 'query' or 'filter' field");
|
||||
throw new ParsingException(parseContext, "[nested] requires either 'query' or 'filter' field");
|
||||
}
|
||||
|
||||
setPathLevel();
|
||||
|
@ -157,10 +157,10 @@ public class NestedInnerQueryParseSupport {
|
|||
this.path = path;
|
||||
nestedObjectMapper = parseContext.getObjectMapper(path);
|
||||
if (nestedObjectMapper == null) {
|
||||
throw new QueryParsingException(parseContext, "[nested] failed to find nested object under path [" + path + "]");
|
||||
throw new ParsingException(parseContext, "[nested] failed to find nested object under path [" + path + "]");
|
||||
}
|
||||
if (!nestedObjectMapper.nested().isNested()) {
|
||||
throw new QueryParsingException(parseContext, "[nested] nested object under path [" + path + "] is not of nested type");
|
||||
throw new ParsingException(parseContext, "[nested] nested object under path [" + path + "] is not of nested type");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ import org.elasticsearch.index.engine.IgnoreOnRecoveryEngineException;
|
|||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.index.translog.Translog;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -214,7 +214,7 @@ public class TranslogRecoveryPerformer {
|
|||
Query query;
|
||||
try {
|
||||
query = queryParserService.parseQuery(source).query();
|
||||
} catch (QueryParsingException ex) {
|
||||
} catch (ParsingException ex) {
|
||||
// for BWC we try to parse directly the query since pre 1.0.0.Beta2 we didn't require a top level query field
|
||||
if (queryParserService.getIndexCreatedVersion().onOrBefore(Version.V_1_0_0_Beta2)) {
|
||||
try {
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -117,7 +117,7 @@ public class GND extends NXYSignificanceHeuristic {
|
|||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context)
|
||||
throws IOException, QueryParsingException {
|
||||
throws IOException, ParsingException {
|
||||
String givenName = parser.currentName();
|
||||
boolean backgroundIsSuperset = true;
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -110,7 +110,7 @@ public class JLHScore extends SignificanceHeuristic {
|
|||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context)
|
||||
throws IOException, QueryParsingException {
|
||||
throws IOException, ParsingException {
|
||||
// move to the closing bracket
|
||||
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
|
||||
throw new ElasticsearchParseException("failed to parse [jhl] significance heuristic. expected an empty object, but found [{}] instead", parser.currentToken());
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.common.ParseFieldMatcher;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -140,7 +140,7 @@ public abstract class NXYSignificanceHeuristic extends SignificanceHeuristic {
|
|||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context)
|
||||
throws IOException, QueryParsingException {
|
||||
throws IOException, ParsingException {
|
||||
String givenName = parser.currentName();
|
||||
boolean includeNegatives = false;
|
||||
boolean backgroundIsSuperset = true;
|
||||
|
|
|
@ -27,7 +27,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -79,7 +79,7 @@ public class PercentageScore extends SignificanceHeuristic {
|
|||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context)
|
||||
throws IOException, QueryParsingException {
|
||||
throws IOException, ParsingException {
|
||||
// move to the closing bracket
|
||||
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
|
||||
throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, but got [{}] instead", parser.currentToken());
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.Script.ScriptField;
|
||||
|
@ -134,7 +134,7 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
|||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context)
|
||||
throws IOException, QueryParsingException {
|
||||
throws IOException, ParsingException {
|
||||
String heuristicName = parser.currentName();
|
||||
Script script = null;
|
||||
XContentParser.Token token;
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
|
|||
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -30,7 +30,7 @@ import java.io.IOException;
|
|||
public interface SignificanceHeuristicParser {
|
||||
|
||||
SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context) throws IOException,
|
||||
QueryParsingException;
|
||||
ParsingException;
|
||||
|
||||
String[] getNames();
|
||||
}
|
||||
|
|
|
@ -36,8 +36,8 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentLocation;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.index.query.TestQueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.index.query.TestParsingException;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.search.SearchParseException;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
|
@ -53,7 +53,6 @@ import java.io.EOFException;
|
|||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.NoSuchFileException;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -88,9 +87,9 @@ public class ESExceptionTests extends ESTestCase {
|
|||
assertEquals(rootCauses.length, 1);
|
||||
assertEquals(ElasticsearchException.getExceptionName(rootCauses[0]), "index_not_found_exception");
|
||||
assertEquals(rootCauses[0].getMessage(), "no such index");
|
||||
ShardSearchFailure failure = new ShardSearchFailure(new TestQueryParsingException(new Index("foo"), "foobar", null),
|
||||
ShardSearchFailure failure = new ShardSearchFailure(new TestParsingException(new Index("foo"), "foobar", null),
|
||||
new SearchShardTarget("node_1", "foo", 1));
|
||||
ShardSearchFailure failure1 = new ShardSearchFailure(new TestQueryParsingException(new Index("foo"), "foobar", null),
|
||||
ShardSearchFailure failure1 = new ShardSearchFailure(new TestParsingException(new Index("foo"), "foobar", null),
|
||||
new SearchShardTarget("node_1", "foo", 2));
|
||||
SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1});
|
||||
if (randomBoolean()) {
|
||||
|
@ -108,22 +107,22 @@ public class ESExceptionTests extends ESTestCase {
|
|||
}
|
||||
{
|
||||
ShardSearchFailure failure = new ShardSearchFailure(
|
||||
new TestQueryParsingException(new Index("foo"), 1, 2, "foobar", null),
|
||||
new TestParsingException(new Index("foo"), 1, 2, "foobar", null),
|
||||
new SearchShardTarget("node_1", "foo", 1));
|
||||
ShardSearchFailure failure1 = new ShardSearchFailure(new TestQueryParsingException(new Index("foo1"), 1, 2, "foobar", null),
|
||||
ShardSearchFailure failure1 = new ShardSearchFailure(new TestParsingException(new Index("foo1"), 1, 2, "foobar", null),
|
||||
new SearchShardTarget("node_1", "foo1", 1));
|
||||
ShardSearchFailure failure2 = new ShardSearchFailure(new TestQueryParsingException(new Index("foo1"), 1, 2, "foobar", null),
|
||||
ShardSearchFailure failure2 = new ShardSearchFailure(new TestParsingException(new Index("foo1"), 1, 2, "foobar", null),
|
||||
new SearchShardTarget("node_1", "foo1", 2));
|
||||
SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1, failure2});
|
||||
final ElasticsearchException[] rootCauses = ex.guessRootCauses();
|
||||
assertEquals(rootCauses.length, 2);
|
||||
assertEquals(ElasticsearchException.getExceptionName(rootCauses[0]), "test_query_parsing_exception");
|
||||
assertEquals(rootCauses[0].getMessage(), "foobar");
|
||||
assertEquals(((QueryParsingException)rootCauses[0]).getIndex(), "foo");
|
||||
assertEquals(((ParsingException)rootCauses[0]).getIndex(), "foo");
|
||||
assertEquals(ElasticsearchException.getExceptionName(rootCauses[1]), "test_query_parsing_exception");
|
||||
assertEquals(rootCauses[1].getMessage(), "foobar");
|
||||
assertEquals(((QueryParsingException) rootCauses[1]).getLineNumber(), 1);
|
||||
assertEquals(((QueryParsingException) rootCauses[1]).getColumnNumber(), 2);
|
||||
assertEquals(((ParsingException) rootCauses[1]).getLineNumber(), 1);
|
||||
assertEquals(((ParsingException) rootCauses[1]).getColumnNumber(), 2);
|
||||
|
||||
}
|
||||
|
||||
|
@ -140,9 +139,9 @@ public class ESExceptionTests extends ESTestCase {
|
|||
|
||||
public void testDeduplicate() throws IOException {
|
||||
{
|
||||
ShardSearchFailure failure = new ShardSearchFailure(new TestQueryParsingException(new Index("foo"), "foobar", null),
|
||||
ShardSearchFailure failure = new ShardSearchFailure(new TestParsingException(new Index("foo"), "foobar", null),
|
||||
new SearchShardTarget("node_1", "foo", 1));
|
||||
ShardSearchFailure failure1 = new ShardSearchFailure(new TestQueryParsingException(new Index("foo"), "foobar", null),
|
||||
ShardSearchFailure failure1 = new ShardSearchFailure(new TestParsingException(new Index("foo"), "foobar", null),
|
||||
new SearchShardTarget("node_1", "foo", 2));
|
||||
SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1});
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
|
@ -153,11 +152,11 @@ public class ESExceptionTests extends ESTestCase {
|
|||
assertEquals(expected, builder.string());
|
||||
}
|
||||
{
|
||||
ShardSearchFailure failure = new ShardSearchFailure(new TestQueryParsingException(new Index("foo"), "foobar", null),
|
||||
ShardSearchFailure failure = new ShardSearchFailure(new TestParsingException(new Index("foo"), "foobar", null),
|
||||
new SearchShardTarget("node_1", "foo", 1));
|
||||
ShardSearchFailure failure1 = new ShardSearchFailure(new TestQueryParsingException(new Index("foo1"), "foobar", null),
|
||||
ShardSearchFailure failure1 = new ShardSearchFailure(new TestParsingException(new Index("foo1"), "foobar", null),
|
||||
new SearchShardTarget("node_1", "foo1", 1));
|
||||
ShardSearchFailure failure2 = new ShardSearchFailure(new TestQueryParsingException(new Index("foo1"), "foobar", null),
|
||||
ShardSearchFailure failure2 = new ShardSearchFailure(new TestParsingException(new Index("foo1"), "foobar", null),
|
||||
new SearchShardTarget("node_1", "foo1", 2));
|
||||
SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[]{failure, failure1, failure2});
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
|
@ -220,7 +219,7 @@ public class ESExceptionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
{
|
||||
QueryParsingException ex = new TestQueryParsingException(new Index("foo"), 1, 2, "foobar", null);
|
||||
ParsingException ex = new TestParsingException(new Index("foo"), 1, 2, "foobar", null);
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
ElasticsearchException.toXContent(builder, PARAMS, ex);
|
||||
|
@ -246,7 +245,7 @@ public class ESExceptionTests extends ESTestCase {
|
|||
}
|
||||
|
||||
{ // render header
|
||||
QueryParsingException ex = new TestQueryParsingException(new Index("foo"), 1, 2, "foobar", null);
|
||||
ParsingException ex = new TestParsingException(new Index("foo"), 1, 2, "foobar", null);
|
||||
ex.addHeader("test", "some value");
|
||||
ex.addHeader("test_multi", "some value", "another value");
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
|
@ -262,11 +261,11 @@ public class ESExceptionTests extends ESTestCase {
|
|||
|
||||
public void testSerializeElasticsearchException() throws IOException {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
QueryParsingException ex = new QueryParsingException(new Index("foo"), 1, 2, "foobar", null);
|
||||
ParsingException ex = new ParsingException(new Index("foo"), 1, 2, "foobar", null);
|
||||
out.writeThrowable(ex);
|
||||
|
||||
StreamInput in = StreamInput.wrap(out.bytes());
|
||||
QueryParsingException e = in.readThrowable();
|
||||
ParsingException e = in.readThrowable();
|
||||
assertEquals(ex.getIndex(), e.getIndex());
|
||||
assertEquals(ex.getMessage(), e.getMessage());
|
||||
assertEquals(ex.getLineNumber(), e.getLineNumber());
|
||||
|
@ -275,19 +274,19 @@ public class ESExceptionTests extends ESTestCase {
|
|||
|
||||
public void testSerializeUnknownException() throws IOException {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
QueryParsingException queryParsingException = new QueryParsingException(new Index("foo"), 1, 2, "foobar", null);
|
||||
Throwable ex = new Throwable("wtf", queryParsingException);
|
||||
ParsingException parsingException = new ParsingException(new Index("foo"), 1, 2, "foobar", null);
|
||||
Throwable ex = new Throwable("wtf", parsingException);
|
||||
out.writeThrowable(ex);
|
||||
|
||||
StreamInput in = StreamInput.wrap(out.bytes());
|
||||
Throwable throwable = in.readThrowable();
|
||||
assertEquals("wtf", throwable.getMessage());
|
||||
assertTrue(throwable instanceof ElasticsearchException);
|
||||
QueryParsingException e = (QueryParsingException)throwable.getCause();
|
||||
assertEquals(queryParsingException.getIndex(), e.getIndex());
|
||||
assertEquals(queryParsingException.getMessage(), e.getMessage());
|
||||
assertEquals(queryParsingException.getLineNumber(), e.getLineNumber());
|
||||
assertEquals(queryParsingException.getColumnNumber(), e.getColumnNumber());
|
||||
ParsingException e = (ParsingException)throwable.getCause();
|
||||
assertEquals(parsingException.getIndex(), e.getIndex());
|
||||
assertEquals(parsingException.getMessage(), e.getMessage());
|
||||
assertEquals(parsingException.getLineNumber(), e.getLineNumber());
|
||||
assertEquals(parsingException.getColumnNumber(), e.getColumnNumber());
|
||||
}
|
||||
|
||||
public void testWriteThrowable() throws IOException {
|
||||
|
@ -310,7 +309,7 @@ public class ESExceptionTests extends ESTestCase {
|
|||
new OutOfMemoryError("no memory left"),
|
||||
new AlreadyClosedException("closed!!", new NullPointerException()),
|
||||
new LockObtainFailedException("can't lock directory", new NullPointerException()),
|
||||
new Throwable("this exception is unknown", new QueryParsingException(new Index("foo"), 1, 2, "foobar", null) ), // somethin unknown
|
||||
new Throwable("this exception is unknown", new ParsingException(new Index("foo"), 1, 2, "foobar", null) ), // somethin unknown
|
||||
};
|
||||
for (Throwable t : causes) {
|
||||
BytesStreamOutput out = new BytesStreamOutput();
|
||||
|
|
|
@ -60,7 +60,8 @@ import org.elasticsearch.index.engine.CreateFailedEngineException;
|
|||
import org.elasticsearch.index.engine.IndexFailedEngineException;
|
||||
import org.elasticsearch.index.engine.RecoveryEngineException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.index.query.TestParsingException;
|
||||
import org.elasticsearch.index.shard.IllegalIndexShardStateException;
|
||||
import org.elasticsearch.index.shard.IndexShardState;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
|
@ -110,7 +111,7 @@ public class ExceptionSerializationTests extends ESTestCase {
|
|||
final Path startPath = PathUtils.get(ElasticsearchException.class.getProtectionDomain().getCodeSource().getLocation().toURI()).resolve("org").resolve("elasticsearch");
|
||||
final Set<? extends Class> ignore = Sets.newHashSet(
|
||||
org.elasticsearch.test.rest.parser.RestTestParseException.class,
|
||||
org.elasticsearch.index.query.TestQueryParsingException.class,
|
||||
TestParsingException.class,
|
||||
org.elasticsearch.test.rest.client.RestException.class,
|
||||
CancellableThreadsTests.CustomException.class,
|
||||
org.elasticsearch.rest.BytesRestResponseTests.WithHeadersException.class,
|
||||
|
@ -225,14 +226,14 @@ public class ExceptionSerializationTests extends ESTestCase {
|
|||
assertNull(serialize.getCause());
|
||||
}
|
||||
|
||||
public void testQueryParsingException() throws IOException {
|
||||
QueryParsingException ex = serialize(new QueryParsingException(new Index("foo"), 1, 2, "fobar", null));
|
||||
public void testParsingException() throws IOException {
|
||||
ParsingException ex = serialize(new ParsingException(new Index("foo"), 1, 2, "fobar", null));
|
||||
assertEquals(ex.getIndex(), "foo");
|
||||
assertEquals(ex.getMessage(), "fobar");
|
||||
assertEquals(ex.getLineNumber(),1);
|
||||
assertEquals(ex.getColumnNumber(), 2);
|
||||
|
||||
ex = serialize(new QueryParsingException(null, 1, 2, null, null));
|
||||
ex = serialize(new ParsingException(null, 1, 2, null, null));
|
||||
assertNull(ex.getIndex());
|
||||
assertNull(ex.getMessage());
|
||||
assertEquals(ex.getLineNumber(),1);
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
|
@ -83,7 +84,7 @@ public class IndexQueryParserFilterDateRangeTimezoneTests extends ESSingleNodeTe
|
|||
SearchContext.setCurrent(new TestSearchContext());
|
||||
queryParser.parse(query).query();
|
||||
fail("A Range Filter on a numeric field with a TimeZone should raise a QueryParsingException");
|
||||
} catch (QueryParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
// We expect it
|
||||
} finally {
|
||||
SearchContext.removeCurrent();
|
||||
|
@ -120,7 +121,7 @@ public class IndexQueryParserFilterDateRangeTimezoneTests extends ESSingleNodeTe
|
|||
SearchContext.setCurrent(new TestSearchContext());
|
||||
queryParser.parse(query).query();
|
||||
fail("A Range Query on a numeric field with a TimeZone should raise a QueryParsingException");
|
||||
} catch (QueryParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
// We expect it
|
||||
} finally {
|
||||
SearchContext.removeCurrent();
|
||||
|
|
|
@ -66,6 +66,7 @@ import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
|
|||
import org.elasticsearch.action.termvectors.TermVectorsRequest;
|
||||
import org.elasticsearch.action.termvectors.TermVectorsResponse;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.compress.CompressedXContent;
|
||||
import org.elasticsearch.common.lucene.search.MoreLikeThisQuery;
|
||||
|
@ -320,7 +321,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
try {
|
||||
queryParser.parse(copyToStringFromClasspath("/org/elasticsearch/index/query/query-timezone-incorrect.json"));
|
||||
fail("we expect a QueryParsingException as we are providing an unknown time_zome");
|
||||
} catch (QueryParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
// We expect this one
|
||||
}
|
||||
}
|
||||
|
@ -342,7 +343,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
try {
|
||||
queryParser.parse(query).query();
|
||||
fail("did not hit exception");
|
||||
} catch (QueryParsingException qpe) {
|
||||
} catch (ParsingException qpe) {
|
||||
// expected
|
||||
assertTrue(qpe.getCause() instanceof TooComplexToDeterminizeException);
|
||||
}
|
||||
|
@ -468,7 +469,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
assertThat(fieldQuery.getTerm().bytes(), equalTo(indexedValueForSearch(34l)));
|
||||
}
|
||||
|
||||
@Test(expected = QueryParsingException.class)
|
||||
@Test(expected = ParsingException.class)
|
||||
public void testTermQueryArrayInvalid() throws IOException {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/term-array-invalid.json");
|
||||
|
@ -921,7 +922,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
try {
|
||||
queryParser.parse(query).query();
|
||||
fail();
|
||||
} catch (QueryParsingException ex) {
|
||||
} catch (ParsingException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("[terms] query does not support multiple fields"));
|
||||
}
|
||||
}
|
||||
|
@ -935,7 +936,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
try {
|
||||
queryParser.parse(query).query();
|
||||
fail();
|
||||
} catch (QueryParsingException ex) {
|
||||
} catch (ParsingException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("[terms] query does not support multiple fields"));
|
||||
}
|
||||
}
|
||||
|
@ -967,7 +968,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
try {
|
||||
queryParser.parse(query);
|
||||
fail("Expected Query Parsing Exception but did not happen");
|
||||
} catch (QueryParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getMessage(), containsString("[term] query does not support different field names, use [bool] query instead"));
|
||||
}
|
||||
}
|
||||
|
@ -1747,7 +1748,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
try {
|
||||
queryParser.parse(query).query();
|
||||
fail("parsing a broken geo_polygon filter didn't fail as expected while parsing: " + brokenFile);
|
||||
} catch (QueryParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
// success!
|
||||
}
|
||||
}
|
||||
|
@ -1883,7 +1884,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
assertTrue(ectQuery.isCoordDisabled());
|
||||
}
|
||||
|
||||
@Test(expected = QueryParsingException.class)
|
||||
@Test(expected = ParsingException.class)
|
||||
public void assureMalformedThrowsException() throws IOException {
|
||||
IndexQueryParserService queryParser;
|
||||
queryParser = queryParser();
|
||||
|
@ -1906,10 +1907,10 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
public void testBadTypeMatchQuery() throws Exception {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/match-query-bad-type.json");
|
||||
QueryParsingException expectedException = null;
|
||||
ParsingException expectedException = null;
|
||||
try {
|
||||
queryParser.parse(query).query();
|
||||
} catch (QueryParsingException qpe) {
|
||||
} catch (ParsingException qpe) {
|
||||
expectedException = qpe;
|
||||
}
|
||||
assertThat(expectedException, notNullValue());
|
||||
|
@ -1927,10 +1928,10 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
public void testBadTypeMultiMatchQuery() throws Exception {
|
||||
IndexQueryParserService queryParser = queryParser();
|
||||
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/multiMatch-query-bad-type.json");
|
||||
QueryParsingException expectedException = null;
|
||||
ParsingException expectedException = null;
|
||||
try {
|
||||
queryParser.parse(query).query();
|
||||
} catch (QueryParsingException qpe) {
|
||||
} catch (ParsingException qpe) {
|
||||
expectedException = qpe;
|
||||
}
|
||||
assertThat(expectedException, notNullValue());
|
||||
|
@ -2018,7 +2019,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
try {
|
||||
queryParser.parse(query).query();
|
||||
fail("FunctionScoreQueryParser should throw an exception here because two functions in body are not allowed.");
|
||||
} catch (QueryParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getDetailedMessage(), containsString("use [functions] array if you want to define several functions."));
|
||||
}
|
||||
}
|
||||
|
@ -2063,7 +2064,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
try {
|
||||
queryParser.parse(query).query();
|
||||
fail("Expect exception here because array of functions and one weight in body is not allowed.");
|
||||
} catch (QueryParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getDetailedMessage(), containsString("you can either define [functions] array or a single function, not both. already found [functions] array, now encountering [weight]."));
|
||||
}
|
||||
query = jsonBuilder().startObject().startObject("function_score")
|
||||
|
@ -2075,7 +2076,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
|
|||
try {
|
||||
queryParser.parse(query).query();
|
||||
fail("Expect exception here because array of functions and one weight in body is not allowed.");
|
||||
} catch (QueryParsingException e) {
|
||||
} catch (ParsingException e) {
|
||||
assertThat(e.getDetailedMessage(), containsString("you can either define [functions] array or a single function, not both. already found [weight], now encountering [functions]."));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
|
@ -142,7 +143,7 @@ public class TemplateQueryParserTests extends ESTestCase {
|
|||
* expressed as a single string but still it expects only the query
|
||||
* specification (thus this test should fail with specific exception).
|
||||
*/
|
||||
@Test(expected = QueryParsingException.class)
|
||||
@Test(expected = ParsingException.class)
|
||||
public void testParseTemplateFailsToParseCompleteQueryAsSingleString() throws IOException {
|
||||
String templateString = "{" + " \"inline\" : \"{ \\\"size\\\": \\\"{{size}}\\\", \\\"query\\\":{\\\"match_all\\\":{}}}\","
|
||||
+ " \"params\":{" + " \"size\":2" + " }\n" + "}";
|
||||
|
|
|
@ -19,19 +19,20 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.index.Index;
|
||||
|
||||
/**
|
||||
* Class used to avoid dragging QueryContext into unit testing framework for
|
||||
* basic exception handling
|
||||
*/
|
||||
public class TestQueryParsingException extends QueryParsingException {
|
||||
public class TestParsingException extends ParsingException {
|
||||
|
||||
public TestQueryParsingException(Index index, int line, int col, String msg, Throwable cause) {
|
||||
public TestParsingException(Index index, int line, int col, String msg, Throwable cause) {
|
||||
super(index, line, col, msg, cause);
|
||||
}
|
||||
|
||||
public TestQueryParsingException(Index index, String msg, Throwable cause) {
|
||||
public TestParsingException(Index index, String msg, Throwable cause) {
|
||||
super(index, UNKNOWN_POSITION, UNKNOWN_POSITION, msg, cause);
|
||||
}
|
||||
}
|
|
@ -28,7 +28,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
|||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
||||
|
@ -64,7 +64,7 @@ public class DummyQueryParserPlugin extends Plugin {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
XContentParser.Token token = parseContext.parser().nextToken();
|
||||
assert token == XContentParser.Token.END_OBJECT;
|
||||
return new DummyQuery(parseContext.isFilter());
|
||||
|
|
|
@ -59,7 +59,7 @@ import org.elasticsearch.index.mapper.Mapping;
|
|||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.index.settings.IndexSettingsService;
|
||||
import org.elasticsearch.index.store.Store;
|
||||
import org.elasticsearch.index.translog.Translog;
|
||||
|
@ -75,7 +75,6 @@ import java.nio.file.Files;
|
|||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
@ -406,7 +405,7 @@ public class IndexShardTests extends ESSingleNodeTestCase {
|
|||
shard.engine().config().getTranslogRecoveryPerformer().performRecoveryOperation(shard.engine(), new Translog.DeleteByQuery(new Engine.DeleteByQuery(null, new BytesArray("{\"term\" : { \"user\" : \"kimchy\" }}"), null, null, null, Engine.Operation.Origin.RECOVERY, 0, "person")), false);
|
||||
assertTrue(version.onOrBefore(Version.V_1_0_0_Beta2));
|
||||
numDocs = 0;
|
||||
} catch (QueryParsingException ex) {
|
||||
} catch (ParsingException ex) {
|
||||
assertTrue(version.after(Version.V_1_0_0_Beta2));
|
||||
} finally {
|
||||
shard.state = IndexShardState.STARTED;
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.elasticsearch.common.inject.ModuleTestCase;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.query.QueryParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.index.query.TermQueryParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -40,7 +40,7 @@ public class IndicesModuleTests extends ModuleTestCase {
|
|||
return new String[] {"fake-query-parser"};
|
||||
}
|
||||
@Override
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException, ParsingException {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ import org.elasticsearch.cluster.metadata.AliasMetaData;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.indices.IndexTemplateAlreadyExistsException;
|
||||
import org.elasticsearch.indices.InvalidAliasNameException;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
|
@ -505,7 +505,7 @@ public class SimpleIndexTemplateIT extends ESIntegTestCase {
|
|||
fail("index creation should have failed due to invalid alias filter in matching index template");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), equalTo("failed to parse filter for alias [invalid_alias]"));
|
||||
assertThat(e.getCause(), instanceOf(QueryParsingException.class));
|
||||
assertThat(e.getCause(), instanceOf(ParsingException.class));
|
||||
assertThat(e.getCause().getMessage(), equalTo("No query registered for [invalid]"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.elasticsearch.action.index.IndexRequestBuilder;
|
|||
import org.elasticsearch.action.percolate.PercolateResponse;
|
||||
import org.elasticsearch.action.percolate.PercolateSourceBuilder;
|
||||
import org.elasticsearch.index.percolator.PercolatorException;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
|
@ -67,7 +67,7 @@ public class PercolatorBackwardsCompatibilityIT extends ESIntegTestCase {
|
|||
fail();
|
||||
} catch (PercolatorException e) {
|
||||
e.printStackTrace();
|
||||
assertThat(e.getRootCause(), instanceOf(QueryParsingException.class));
|
||||
assertThat(e.getRootCause(), instanceOf(ParsingException.class));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ import org.elasticsearch.index.engine.VersionConflictEngineException;
|
|||
import org.elasticsearch.index.percolator.PercolatorException;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.index.query.functionscore.weight.WeightBuilder;
|
||||
import org.elasticsearch.index.query.support.QueryInnerHitBuilder;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
@ -1737,7 +1737,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
|||
.get();
|
||||
fail();
|
||||
} catch (PercolatorException e) {
|
||||
assertThat(e.getRootCause(), instanceOf(QueryParsingException.class));
|
||||
assertThat(e.getRootCause(), instanceOf(ParsingException.class));
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -1746,7 +1746,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
|||
.get();
|
||||
fail();
|
||||
} catch (PercolatorException e) {
|
||||
assertThat(e.getRootCause(), instanceOf(QueryParsingException.class));
|
||||
assertThat(e.getRootCause(), instanceOf(ParsingException.class));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1989,7 +1989,7 @@ public class PercolatorIT extends ESIntegTestCase {
|
|||
.execute().actionGet();
|
||||
fail("Expected a parse error, because inner_hits isn't supported in the percolate api");
|
||||
} catch (Exception e) {
|
||||
assertThat(e.getCause(), instanceOf(QueryParsingException.class));
|
||||
assertThat(e.getCause(), instanceOf(ParsingException.class));
|
||||
assertThat(e.getCause().getMessage(), containsString("inner_hits unsupported"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.elasticsearch.ExceptionsHelper;
|
|||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.query.TestQueryParsingException;
|
||||
import org.elasticsearch.index.query.TestParsingException;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.rest.FakeRestRequest;
|
||||
|
@ -142,9 +142,9 @@ public class BytesRestResponseTests extends ESTestCase {
|
|||
public void testConvert() throws IOException {
|
||||
RestRequest request = new FakeRestRequest();
|
||||
RestChannel channel = new DetailedExceptionRestChannel(request);
|
||||
ShardSearchFailure failure = new ShardSearchFailure(new TestQueryParsingException(new Index("foo"), "foobar", null),
|
||||
ShardSearchFailure failure = new ShardSearchFailure(new TestParsingException(new Index("foo"), "foobar", null),
|
||||
new SearchShardTarget("node_1", "foo", 1));
|
||||
ShardSearchFailure failure1 = new ShardSearchFailure(new TestQueryParsingException(new Index("foo"), "foobar", null),
|
||||
ShardSearchFailure failure1 = new ShardSearchFailure(new TestParsingException(new Index("foo"), "foobar", null),
|
||||
new SearchShardTarget("node_1", "foo", 2));
|
||||
SearchPhaseExecutionException ex = new SearchPhaseExecutionException("search", "all shards failed", new ShardSearchFailure[] {failure, failure1});
|
||||
BytesRestResponse response = new BytesRestResponse(channel, new RemoteTransportException("foo", ex));
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptModule;
|
||||
|
@ -237,7 +237,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context)
|
||||
throws IOException, QueryParsingException {
|
||||
throws IOException, ParsingException {
|
||||
parser.nextToken();
|
||||
return new SimpleHeuristic();
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue