Merge branch 'feature/query-refactoring' into feature/search-request-refactoring

# Conflicts:
#	core/src/main/java/org/elasticsearch/index/query/IndexQueryParserService.java
#	core/src/main/java/org/elasticsearch/index/query/TemplateQueryParser.java
This commit is contained in:
Colin Goodheart-Smithe 2015-09-24 14:33:35 +01:00
commit a9c6e4c051
200 changed files with 1129 additions and 3901 deletions

View File

@ -145,7 +145,7 @@ public class AliasValidator extends AbstractComponent {
QueryShardContext context = indexQueryParserService.getShardContext(); QueryShardContext context = indexQueryParserService.getShardContext();
try { try {
context.reset(parser); context.reset(parser);
context.parseContext().parseInnerFilter(); context.parseContext().parseInnerQueryBuilder().toFilter(context);
} finally { } finally {
context.reset(null); context.reset(null);
parser.close(); parser.close();

View File

@ -109,10 +109,7 @@ public abstract class AbstractQueryBuilder<QB extends AbstractQueryBuilder> exte
return result; return result;
} }
//norelease to be made abstract once all query builders override doToQuery providing their own specific implementation. protected abstract Query doToQuery(QueryShardContext context) throws IOException;
protected Query doToQuery(QueryShardContext context) throws IOException {
return context.indexQueryParserService().indicesQueriesRegistry().queryParsers().get(getName()).parse(context);
}
/** /**
* Returns the query name for the query. * Returns the query name for the query.

View File

@ -1,39 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import java.io.IOException;
/**
* Class used during the query parsers refactoring. Will be removed once we can parse search requests on the coordinating node.
* All query parsers that have a refactored "fromXContent" method can be changed to extend this instead of {@link BaseQueryParserTemp}.
* Keeps old {@link QueryParser#parse(QueryShardContext)} method as a stub delegating to
* {@link QueryParser#fromXContent(QueryParseContext)} and {@link QueryBuilder#toQuery(QueryShardContext)}}
*/
//norelease needs to be removed once we parse search requests on the coordinating node, as the parse method is not needed anymore at that point.
public abstract class BaseQueryParser<QB extends QueryBuilder<QB>> implements QueryParser<QB> {
@Override
public final Query parse(QueryShardContext context) throws IOException {
return fromXContent(context.parseContext()).toQuery(context);
}
}

View File

@ -1,39 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import java.io.IOException;
/**
* This class with method impl is an intermediate step in the query parsers refactoring.
* Provides a fromXContent default implementation for query parsers that don't have yet a
* specific fromXContent implementation that returns a QueryBuilder.
*/
//norelease to be removed once all queries are moved over to extend BaseQueryParser
public abstract class BaseQueryParserTemp implements QueryParser {
@Override
public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
Query query = parse(parseContext.shardContext());
return new QueryWrappingQueryBuilder(query);
}
}

View File

@ -191,7 +191,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
/** /**
* @return the string representation of the minimumShouldMatch settings for this query * @return the string representation of the minimumShouldMatch settings for this query
*/ */
public String minimumNumberShouldMatch() { public String minimumShouldMatch() {
return this.minimumShouldMatch; return this.minimumShouldMatch;
} }

View File

@ -32,7 +32,7 @@ import java.util.List;
/** /**
* Parser for bool query * Parser for bool query
*/ */
public class BoolQueryParser extends BaseQueryParser<BoolQueryBuilder> { public class BoolQueryParser implements QueryParser<BoolQueryBuilder> {
@Inject @Inject
public BoolQueryParser(Settings settings) { public BoolQueryParser(Settings settings) {
@ -78,12 +78,12 @@ public class BoolQueryParser extends BaseQueryParser<BoolQueryBuilder> {
shouldClauses.add(query); shouldClauses.add(query);
break; break;
case "filter": case "filter":
query = parseContext.parseInnerFilterToQueryBuilder(); query = parseContext.parseInnerQueryBuilder();
filterClauses.add(query); filterClauses.add(query);
break; break;
case "must_not": case "must_not":
case "mustNot": case "mustNot":
query = parseContext.parseInnerFilterToQueryBuilder(); query = parseContext.parseInnerQueryBuilder();
mustNotClauses.add(query); mustNotClauses.add(query);
break; break;
default: default:
@ -101,12 +101,12 @@ public class BoolQueryParser extends BaseQueryParser<BoolQueryBuilder> {
shouldClauses.add(query); shouldClauses.add(query);
break; break;
case "filter": case "filter":
query = parseContext.parseInnerFilterToQueryBuilder(); query = parseContext.parseInnerQueryBuilder();
filterClauses.add(query); filterClauses.add(query);
break; break;
case "must_not": case "must_not":
case "mustNot": case "mustNot":
query = parseContext.parseInnerFilterToQueryBuilder(); query = parseContext.parseInnerQueryBuilder();
mustNotClauses.add(query); mustNotClauses.add(query);
break; break;
default: default:

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -28,7 +27,7 @@ import java.io.IOException;
/** /**
* Parser for boosting query * Parser for boosting query
*/ */
public class BoostingQueryParser extends BaseQueryParser<BoostingQueryBuilder> { public class BoostingQueryParser implements QueryParser<BoostingQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -27,7 +27,7 @@ import java.io.IOException;
/** /**
* Parser for common terms query * Parser for common terms query
*/ */
public class CommonTermsQueryParser extends BaseQueryParser<CommonTermsQueryBuilder> { public class CommonTermsQueryParser implements QueryParser<CommonTermsQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -29,7 +29,7 @@ import java.io.IOException;
/** /**
* Parser for constant_score query * Parser for constant_score query
*/ */
public class ConstantScoreQueryParser extends BaseQueryParser<ConstantScoreQueryBuilder> { public class ConstantScoreQueryParser implements QueryParser<ConstantScoreQueryBuilder> {
private static final ParseField INNER_QUERY_FIELD = new ParseField("filter", "query"); private static final ParseField INNER_QUERY_FIELD = new ParseField("filter", "query");
@ -56,7 +56,7 @@ public class ConstantScoreQueryParser extends BaseQueryParser<ConstantScoreQuery
// skip // skip
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_QUERY_FIELD)) { if (parseContext.parseFieldMatcher().match(currentFieldName, INNER_QUERY_FIELD)) {
query = parseContext.parseInnerFilterToQueryBuilder(); query = parseContext.parseInnerQueryBuilder();
queryFound = true; queryFound = true;
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "[constant_score] query does not support [" + currentFieldName + "]"); throw new ParsingException(parser.getTokenLocation(), "[constant_score] query does not support [" + currentFieldName + "]");

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -31,7 +30,7 @@ import java.util.List;
/** /**
* Parser for dis_max query * Parser for dis_max query
*/ */
public class DisMaxQueryParser extends BaseQueryParser<DisMaxQueryBuilder> { public class DisMaxQueryParser implements QueryParser<DisMaxQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -28,7 +27,7 @@ import java.io.IOException;
/** /**
* Parser for exists query * Parser for exists query
*/ */
public class ExistsQueryParser extends BaseQueryParser<ExistsQueryBuilder> { public class ExistsQueryParser implements QueryParser<ExistsQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -21,14 +21,13 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
/** /**
* Parser for field_masking_span query * Parser for field_masking_span query
*/ */
public class FieldMaskingSpanQueryParser extends BaseQueryParser<FieldMaskingSpanQueryBuilder> { public class FieldMaskingSpanQueryParser implements QueryParser<FieldMaskingSpanQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -21,14 +21,11 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.support.QueryParsers;
import java.io.IOException; import java.io.IOException;
public class FuzzyQueryParser extends BaseQueryParser { public class FuzzyQueryParser implements QueryParser<FuzzyQueryBuilder> {
private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("min_similarity"); private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("min_similarity");
@ -38,7 +35,7 @@ public class FuzzyQueryParser extends BaseQueryParser {
} }
@Override @Override
public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { public FuzzyQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser(); XContentParser parser = parseContext.parser();
XContentParser.Token token = parser.nextToken(); XContentParser.Token token = parser.nextToken();

View File

@ -27,7 +27,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
public class GeoBoundingBoxQueryParser extends BaseQueryParser<GeoBoundingBoxQueryBuilder> { public class GeoBoundingBoxQueryParser implements QueryParser<GeoBoundingBoxQueryBuilder> {
public static final String NAME = "geo_bbox"; public static final String NAME = "geo_bbox";

View File

@ -39,7 +39,7 @@ import java.io.IOException;
* } * }
* </pre> * </pre>
*/ */
public class GeoDistanceQueryParser extends BaseQueryParser { public class GeoDistanceQueryParser implements QueryParser<GeoDistanceQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {
@ -47,7 +47,7 @@ public class GeoDistanceQueryParser extends BaseQueryParser {
} }
@Override @Override
public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { public GeoDistanceQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser(); XContentParser parser = parseContext.parser();
XContentParser.Token token; XContentParser.Token token;

View File

@ -37,7 +37,7 @@ import java.io.IOException;
* } * }
* </pre> * </pre>
*/ */
public class GeoDistanceRangeQueryParser extends BaseQueryParser<GeoDistanceRangeQueryBuilder> { public class GeoDistanceRangeQueryParser implements QueryParser<GeoDistanceRangeQueryBuilder> {
public static final ParseField FROM_FIELD = new ParseField("from"); public static final ParseField FROM_FIELD = new ParseField("from");
public static final ParseField TO_FIELD = new ParseField("to"); public static final ParseField TO_FIELD = new ParseField("to");

View File

@ -115,7 +115,7 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQuery
shell.add(shell.get(0)); shell.add(shell.get(0));
} }
final boolean indexCreatedBeforeV2_0 = context.parseContext().shardContext().indexVersionCreated().before(Version.V_2_0_0); final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0);
// validation was not available prior to 2.x, so to support bwc // validation was not available prior to 2.x, so to support bwc
// percolation queries we only ignore_malformed on 2.x created indexes // percolation queries we only ignore_malformed on 2.x created indexes
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) { if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {

View File

@ -23,7 +23,6 @@ import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.common.xcontent.XContentParser.Token;
@ -43,7 +42,7 @@ import java.util.List;
* } * }
* </pre> * </pre>
*/ */
public class GeoPolygonQueryParser extends BaseQueryParser<GeoPolygonQueryBuilder> { public class GeoPolygonQueryParser implements QueryParser<GeoPolygonQueryBuilder> {
public static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize"); public static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize");
public static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed"); public static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed");

View File

@ -31,7 +31,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
public class GeoShapeQueryParser extends BaseQueryParser<GeoShapeQueryBuilder> { public class GeoShapeQueryParser implements QueryParser<GeoShapeQueryBuilder> {
public static final ParseField SHAPE_FIELD = new ParseField("shape"); public static final ParseField SHAPE_FIELD = new ParseField("shape");
public static final ParseField STRATEGY_FIELD = new ParseField("strategy"); public static final ParseField STRATEGY_FIELD = new ParseField("strategy");

View File

@ -266,7 +266,7 @@ public class GeohashCellQuery {
} }
} }
public static class Parser extends BaseQueryParser<Builder> { public static class Parser implements QueryParser<Builder> {
@Inject @Inject
public Parser() { public Parser() {

View File

@ -31,7 +31,7 @@ import java.io.IOException;
/** /**
* A query parser for <tt>has_child</tt> queries. * A query parser for <tt>has_child</tt> queries.
*/ */
public class HasChildQueryParser extends BaseQueryParser { public class HasChildQueryParser implements QueryParser<HasChildQueryBuilder> {
private static final ParseField QUERY_FIELD = new ParseField("query", "filter"); private static final ParseField QUERY_FIELD = new ParseField("query", "filter");
@ -41,7 +41,7 @@ public class HasChildQueryParser extends BaseQueryParser {
} }
@Override @Override
public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { public HasChildQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser(); XContentParser parser = parseContext.parser();
float boost = AbstractQueryBuilder.DEFAULT_BOOST; float boost = AbstractQueryBuilder.DEFAULT_BOOST;
String childType = null; String childType = null;

View File

@ -27,7 +27,7 @@ import org.elasticsearch.index.query.support.QueryInnerHits;
import java.io.IOException; import java.io.IOException;
public class HasParentQueryParser extends BaseQueryParser { public class HasParentQueryParser implements QueryParser<HasParentQueryBuilder> {
private static final HasParentQueryBuilder PROTOTYPE = new HasParentQueryBuilder("", EmptyQueryBuilder.PROTOTYPE); private static final HasParentQueryBuilder PROTOTYPE = new HasParentQueryBuilder("", EmptyQueryBuilder.PROTOTYPE);
private static final ParseField QUERY_FIELD = new ParseField("query", "filter"); private static final ParseField QUERY_FIELD = new ParseField("query", "filter");
@ -40,7 +40,7 @@ public class HasParentQueryParser extends BaseQueryParser {
} }
@Override @Override
public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { public HasParentQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser parser = parseContext.parser(); XContentParser parser = parseContext.parser();
float boost = AbstractQueryBuilder.DEFAULT_BOOST; float boost = AbstractQueryBuilder.DEFAULT_BOOST;

View File

@ -30,7 +30,7 @@ import java.util.List;
/** /**
* Parser for ids query * Parser for ids query
*/ */
public class IdsQueryParser extends BaseQueryParser<IdsQueryBuilder> { public class IdsQueryParser implements QueryParser<IdsQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -188,22 +188,6 @@ public class IndexQueryParserService extends AbstractIndexComponent {
} }
} }
public ParsedQuery parse(String source) throws ParsingException, QueryShardException {
XContentParser parser = null;
try {
parser = XContentFactory.xContent(source).createParser(source);
return innerParse(cache.get(), parser);
} catch (QueryShardException|ParsingException e) {
throw e;
} catch (Exception e) {
throw new ParsingException(parser == null ? null : parser.getTokenLocation(), "Failed to parse [" + source + "]", e);
} finally {
if (parser != null) {
parser.close();
}
}
}
public ParsedQuery parse(XContentParser parser) { public ParsedQuery parse(XContentParser parser) {
try { try {
return innerParse(cache.get(), parser); return innerParse(cache.get(), parser);
@ -221,7 +205,7 @@ public class IndexQueryParserService extends AbstractIndexComponent {
QueryShardContext context = cache.get(); QueryShardContext context = cache.get();
context.reset(parser); context.reset(parser);
try { try {
Query filter = context.parseContext().parseInnerFilter(); Query filter = context.parseContext().parseInnerQueryBuilder().toFilter(context);
if (filter == null) { if (filter == null) {
return null; return null;
} }

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -31,7 +30,7 @@ import java.util.Collection;
/** /**
* Parser for {@link IndicesQueryBuilder}. * Parser for {@link IndicesQueryBuilder}.
*/ */
public class IndicesQueryParser extends BaseQueryParser { public class IndicesQueryParser implements QueryParser {
private static final ParseField QUERY_FIELD = new ParseField("query", "filter"); private static final ParseField QUERY_FIELD = new ParseField("query", "filter");
private static final ParseField NO_MATCH_QUERY = new ParseField("no_match_query", "no_match_filter"); private static final ParseField NO_MATCH_QUERY = new ParseField("no_match_query", "no_match_filter");

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -29,7 +28,7 @@ import java.io.IOException;
/** /**
* Parser for match_all query * Parser for match_all query
*/ */
public class MatchAllQueryParser extends BaseQueryParser<MatchAllQueryBuilder> { public class MatchAllQueryParser implements QueryParser<MatchAllQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -21,12 +21,11 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
public class MatchNoneQueryParser extends BaseQueryParser { public class MatchNoneQueryParser implements QueryParser<MatchNoneQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.FuzzyQuery;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.index.search.MatchQuery;
@ -32,7 +31,7 @@ import java.io.IOException;
/** /**
* *
*/ */
public class MatchQueryParser extends BaseQueryParser { public class MatchQueryParser implements QueryParser<MatchQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -27,7 +27,7 @@ import java.io.IOException;
/** /**
* Parser for missing query * Parser for missing query
*/ */
public class MissingQueryParser extends BaseQueryParser<MissingQueryBuilder> { public class MissingQueryParser implements QueryParser<MissingQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -33,7 +33,7 @@ import java.util.List;
* *
* The documents are provided as a set of strings and/or a list of {@link Item}. * The documents are provided as a set of strings and/or a list of {@link Item}.
*/ */
public class MoreLikeThisQueryParser extends BaseQueryParser<MoreLikeThisQueryBuilder> { public class MoreLikeThisQueryParser implements QueryParser<MoreLikeThisQueryBuilder> {
public interface Field { public interface Field {
ParseField FIELDS = new ParseField("fields"); ParseField FIELDS = new ParseField("fields");

View File

@ -31,7 +31,7 @@ import java.util.Map;
/** /**
* Same as {@link MatchQueryParser} but has support for multiple fields. * Same as {@link MatchQueryParser} but has support for multiple fields.
*/ */
public class MultiMatchQueryParser extends BaseQueryParser<MultiMatchQueryBuilder> { public class MultiMatchQueryParser implements QueryParser<MultiMatchQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -19,26 +19,15 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.search.join.ToParentBlockJoinQuery;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.support.InnerHitsQueryParserHelper;
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
import org.elasticsearch.index.query.support.QueryInnerHits; import org.elasticsearch.index.query.support.QueryInnerHits;
import org.elasticsearch.search.fetch.innerhits.InnerHitsContext;
import org.elasticsearch.search.fetch.innerhits.InnerHitsSubSearchContext;
import java.io.IOException; import java.io.IOException;
public class NestedQueryParser extends BaseQueryParser<NestedQueryBuilder> { public class NestedQueryParser implements QueryParser<NestedQueryBuilder> {
private static final ParseField FILTER_FIELD = new ParseField("filter").withAllDeprecated("query"); private static final ParseField FILTER_FIELD = new ParseField("filter").withAllDeprecated("query");
private static final NestedQueryBuilder PROTOTYPE = new NestedQueryBuilder("", EmptyQueryBuilder.PROTOTYPE); private static final NestedQueryBuilder PROTOTYPE = new NestedQueryBuilder("", EmptyQueryBuilder.PROTOTYPE);
@ -66,7 +55,7 @@ public class NestedQueryParser extends BaseQueryParser<NestedQueryBuilder> {
if ("query".equals(currentFieldName)) { if ("query".equals(currentFieldName)) {
query = parseContext.parseInnerQueryBuilder(); query = parseContext.parseInnerQueryBuilder();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FILTER_FIELD)) { } else if (parseContext.parseFieldMatcher().match(currentFieldName, FILTER_FIELD)) {
query = parseContext.parseInnerFilterToQueryBuilder(); query = parseContext.parseInnerQueryBuilder();
} else if ("inner_hits".equals(currentFieldName)) { } else if ("inner_hits".equals(currentFieldName)) {
queryInnerHits = new QueryInnerHits(parser); queryInnerHits = new QueryInnerHits(parser);
} else { } else {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -29,7 +28,7 @@ import java.io.IOException;
/** /**
* Parser for not query * Parser for not query
*/ */
public class NotQueryParser extends BaseQueryParser<NotQueryBuilder> { public class NotQueryParser implements QueryParser<NotQueryBuilder> {
private static final ParseField QUERY_FIELD = new ParseField("query", "filter"); private static final ParseField QUERY_FIELD = new ParseField("query", "filter");
@ -56,12 +55,12 @@ public class NotQueryParser extends BaseQueryParser<NotQueryBuilder> {
// skip // skip
} else if (token == XContentParser.Token.START_OBJECT) { } else if (token == XContentParser.Token.START_OBJECT) {
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) { if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
query = parseContext.parseInnerFilterToQueryBuilder(); query = parseContext.parseInnerQueryBuilder();
queryFound = true; queryFound = true;
} else { } else {
queryFound = true; queryFound = true;
// its the filter, and the name is the field // its the filter, and the name is the field
query = parseContext.parseInnerFilterToQueryBuilder(currentFieldName); query = parseContext.parseInnerQueryBuilderByName(currentFieldName);
} }
} else if (token.isValue()) { } else if (token.isValue()) {
if ("_name".equals(currentFieldName)) { if ("_name".equals(currentFieldName)) {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -29,7 +28,7 @@ import java.io.IOException;
/** /**
* Parser for prefix query * Parser for prefix query
*/ */
public class PrefixQueryParser extends BaseQueryParser<PrefixQueryBuilder> { public class PrefixQueryParser implements QueryParser<PrefixQueryBuilder> {
private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of prefix query"); private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of prefix query");

View File

@ -19,8 +19,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException;
import java.io.IOException; import java.io.IOException;
/** /**
@ -29,7 +27,7 @@ import java.io.IOException;
*/ */
// TODO: remove when https://github.com/elastic/elasticsearch/issues/13326 is fixed // TODO: remove when https://github.com/elastic/elasticsearch/issues/13326 is fixed
@Deprecated @Deprecated
public class QueryFilterParser extends BaseQueryParser<QueryFilterBuilder> { public class QueryFilterParser implements QueryParser<QueryFilterBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -19,8 +19,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
@ -60,12 +58,6 @@ public class QueryParseContext {
} }
} }
//norelease this is still used in BaseQueryParserTemp and FunctionScoreQueryParser, remove if not needed there anymore
@Deprecated
public QueryShardContext shardContext() {
return this.shardContext;
}
public XContentParser parser() { public XContentParser parser() {
return this.parser; return this.parser;
} }
@ -81,37 +73,6 @@ public class QueryParseContext {
return parseFieldMatcher.match(setting, CACHE) || parseFieldMatcher.match(setting, CACHE_KEY); return parseFieldMatcher.match(setting, CACHE) || parseFieldMatcher.match(setting, CACHE_KEY);
} }
/**
* @deprecated replaced by calls to parseInnerFilterToQueryBuilder() for the resulting queries
*/
@Nullable
@Deprecated
//norelease should be possible to remove after refactoring all queries
public Query parseInnerFilter() throws QueryShardException, IOException {
assert this.shardContext != null;
QueryBuilder builder = parseInnerFilterToQueryBuilder();
Query result = null;
if (builder != null) {
result = builder.toQuery(this.shardContext);
}
return result;
}
/**
* @deprecated replaced by calls to parseInnerQueryBuilder() for the resulting queries
*/
@Nullable
@Deprecated
//norelease this method will be removed once all queries are refactored
public Query parseInnerQuery() throws IOException, QueryShardException {
QueryBuilder builder = parseInnerQueryBuilder();
Query result = null;
if (builder != null) {
result = builder.toQuery(this.shardContext);
}
return result;
}
/** /**
* @return a new QueryBuilder based on the current state of the parser * @return a new QueryBuilder based on the current state of the parser
*/ */
@ -139,11 +100,7 @@ public class QueryParseContext {
throw new ParsingException(parser.getTokenLocation(), "[_na] query malformed, no field after start_object"); throw new ParsingException(parser.getTokenLocation(), "[_na] query malformed, no field after start_object");
} }
QueryParser queryParser = queryParser(queryName); QueryBuilder result = parseInnerQueryBuilderByName(queryName);
if (queryParser == null) {
throw new ParsingException(parser.getTokenLocation(), "No query registered for [" + queryName + "]");
}
QueryBuilder result = queryParser.fromXContent(this);
if (parser.currentToken() == XContentParser.Token.END_OBJECT || parser.currentToken() == XContentParser.Token.END_ARRAY) { if (parser.currentToken() == XContentParser.Token.END_OBJECT || parser.currentToken() == XContentParser.Token.END_ARRAY) {
// if we are at END_OBJECT, move to the next one... // if we are at END_OBJECT, move to the next one...
parser.nextToken(); parser.nextToken();
@ -151,34 +108,12 @@ public class QueryParseContext {
return result; return result;
} }
/** public QueryBuilder parseInnerQueryBuilderByName(String queryName) throws IOException {
* @return a new QueryBuilder based on the current state of the parser, but does so that the inner query QueryParser queryParser = queryParser(queryName);
* is parsed to a filter if (queryParser == null) {
*/ throw new ParsingException(parser.getTokenLocation(), "No query registered for [" + queryName + "]");
//norelease setting and checking the isFilter Flag should completely be moved to toQuery/toFilter after query refactoring
public QueryBuilder parseInnerFilterToQueryBuilder() throws IOException {
final boolean originalIsFilter = this.shardContext.isFilter;
try {
this.shardContext.isFilter = true;
return parseInnerQueryBuilder();
} finally {
this.shardContext.isFilter = originalIsFilter;
}
}
//norelease setting and checking the isFilter Flag should completely be moved to toQuery/toFilter after query refactoring
public QueryBuilder parseInnerFilterToQueryBuilder(String queryName) throws IOException {
final boolean originalIsFilter = this.shardContext.isFilter;
try {
this.shardContext.isFilter = true;
QueryParser queryParser = queryParser(queryName);
if (queryParser == null) {
throw new ParsingException(parser.getTokenLocation(), "No query registered for [" + queryName + "]");
}
return queryParser.fromXContent(this);
} finally {
this.shardContext.isFilter = originalIsFilter;
} }
return queryParser.fromXContent(this);
} }
public ParseFieldMatcher parseFieldMatcher() { public ParseFieldMatcher parseFieldMatcher() {
@ -189,6 +124,11 @@ public class QueryParseContext {
this.parser = innerParser; this.parser = innerParser;
} }
/**
* Get the query parser for a specific type of query registered under its name
* @param name the name of the parser to retrieve
* @return the query parser
*/
QueryParser queryParser(String name) { QueryParser queryParser(String name) {
return indicesQueriesRegistry.queryParsers().get(name); return indicesQueriesRegistry.queryParsers().get(name);
} }

View File

@ -19,9 +19,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Nullable;
import java.io.IOException; import java.io.IOException;
/** /**
@ -35,18 +32,6 @@ public interface QueryParser<QB extends QueryBuilder<QB>> {
*/ */
String[] names(); String[] names();
/**
* Parses the into a query from the current parser location. Will be at
* "START_OBJECT" location, and should end when the token is at the matching
* "END_OBJECT".
* <p>
* Returns <tt>null</tt> if this query should be ignored in the context of
* the DSL.
*/
//norelease can be removed in favour of fromXContent once search requests can be parsed on the coordinating node
@Nullable
Query parse(QueryShardContext context) throws IOException;
/** /**
* Creates a new {@link QueryBuilder} from the query held by the {@link QueryShardContext} * Creates a new {@link QueryBuilder} from the query held by the {@link QueryShardContext}
* in {@link org.elasticsearch.common.xcontent.XContent} format * in {@link org.elasticsearch.common.xcontent.XContent} format

View File

@ -22,7 +22,6 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -34,7 +33,7 @@ import java.util.Map;
/** /**
* Parser for query_string query * Parser for query_string query
*/ */
public class QueryStringQueryParser extends BaseQueryParser { public class QueryStringQueryParser implements QueryParser {
private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("fuzzy_min_sim"); private static final ParseField FUZZINESS = Fuzziness.FIELD.withDeprecation("fuzzy_min_sim");

View File

@ -1,60 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
/**
* QueryBuilder implementation that holds a lucene query, which can be returned by {@link QueryBuilder#toQuery(QueryShardContext)}.
* Doesn't support conversion to {@link org.elasticsearch.common.xcontent.XContent} via {@link #doXContent(XContentBuilder, Params)}.
*/
//norelease to be removed once all queries support separate fromXContent and toQuery methods. Make AbstractQueryBuilder#toQuery final as well then.
public class QueryWrappingQueryBuilder extends AbstractQueryBuilder<QueryWrappingQueryBuilder> implements SpanQueryBuilder<QueryWrappingQueryBuilder>, MultiTermQueryBuilder<QueryWrappingQueryBuilder>{
private Query query;
public QueryWrappingQueryBuilder(Query query) {
this.query = query;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
throw new UnsupportedOperationException();
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
return query;
}
@Override
public String getWriteableName() {
// this should not be called since we overwrite BaseQueryBuilder#toQuery() in this class
throw new UnsupportedOperationException();
}
@Override
protected void setFinalBoost(Query query) {
//no-op the wrapper lucene query has already its boost set
}
}

View File

@ -28,7 +28,7 @@ import java.io.IOException;
/** /**
* Parser for range query * Parser for range query
*/ */
public class RangeQueryParser extends BaseQueryParser<RangeQueryBuilder> { public class RangeQueryParser implements QueryParser<RangeQueryBuilder> {
private static final ParseField FIELDDATA_FIELD = new ParseField("fielddata").withAllDeprecated("[no replacement]"); private static final ParseField FIELDDATA_FIELD = new ParseField("fielddata").withAllDeprecated("[no replacement]");
private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of range query"); private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of range query");

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -29,7 +28,7 @@ import java.io.IOException;
/** /**
* Parser for regexp query * Parser for regexp query
*/ */
public class RegexpQueryParser extends BaseQueryParser<RegexpQueryBuilder> { public class RegexpQueryParser implements QueryParser<RegexpQueryBuilder> {
private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of regexp query"); private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of regexp query");

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.script.Script; import org.elasticsearch.script.Script;
import org.elasticsearch.script.Script.ScriptField; import org.elasticsearch.script.Script.ScriptField;
@ -34,7 +33,7 @@ import java.util.Map;
/** /**
* Parser for script query * Parser for script query
*/ */
public class ScriptQueryParser extends BaseQueryParser<ScriptQueryBuilder> { public class ScriptQueryParser implements QueryParser<ScriptQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -19,10 +19,8 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanQuery;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -60,7 +58,7 @@ import java.util.Map;
* {@code fields} - fields to search, defaults to _all if not set, allows * {@code fields} - fields to search, defaults to _all if not set, allows
* boosting a field with ^n * boosting a field with ^n
*/ */
public class SimpleQueryStringParser extends BaseQueryParser<SimpleQueryStringBuilder> { public class SimpleQueryStringParser implements QueryParser<SimpleQueryStringBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -29,7 +28,7 @@ import java.io.IOException;
/** /**
* Parser for span_containing query * Parser for span_containing query
*/ */
public class SpanContainingQueryParser extends BaseQueryParser<SpanContainingQueryBuilder> { public class SpanContainingQueryParser implements QueryParser<SpanContainingQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -28,7 +28,7 @@ import java.io.IOException;
/** /**
* Parser for span_first query * Parser for span_first query
*/ */
public class SpanFirstQueryParser extends BaseQueryParser<SpanFirstQueryBuilder> { public class SpanFirstQueryParser implements QueryParser<SpanFirstQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -20,7 +20,6 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -28,7 +27,7 @@ import java.io.IOException;
/** /**
* Parser for span_multi query * Parser for span_multi query
*/ */
public class SpanMultiTermQueryParser extends BaseQueryParser<SpanMultiTermQueryBuilder> { public class SpanMultiTermQueryParser implements QueryParser<SpanMultiTermQueryBuilder> {
public static final String MATCH_NAME = "match"; public static final String MATCH_NAME = "match";

View File

@ -30,7 +30,7 @@ import java.util.List;
/** /**
* Parser for span_near query * Parser for span_near query
*/ */
public class SpanNearQueryParser extends BaseQueryParser<SpanNearQueryBuilder> { public class SpanNearQueryParser implements QueryParser<SpanNearQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -29,7 +28,7 @@ import java.io.IOException;
/** /**
* Parser for span_not query * Parser for span_not query
*/ */
public class SpanNotQueryParser extends BaseQueryParser<SpanNotQueryBuilder> { public class SpanNotQueryParser implements QueryParser<SpanNotQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -30,7 +30,7 @@ import java.util.List;
/** /**
* Parser for span_or query * Parser for span_or query
*/ */
public class SpanOrQueryParser extends BaseQueryParser<SpanOrQueryBuilder> { public class SpanOrQueryParser implements QueryParser<SpanOrQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -29,7 +28,7 @@ import java.io.IOException;
/** /**
* Parser for span_term query * Parser for span_term query
*/ */
public class SpanTermQueryParser extends BaseQueryParser<SpanTermQueryBuilder> { public class SpanTermQueryParser implements QueryParser<SpanTermQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -28,7 +28,7 @@ import java.io.IOException;
/** /**
* Parser for span_within query * Parser for span_within query
*/ */
public class SpanWithinQueryParser extends BaseQueryParser<SpanWithinQueryBuilder> { public class SpanWithinQueryParser implements QueryParser<SpanWithinQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -22,7 +22,6 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.HasContextAndHeaders; import org.elasticsearch.common.HasContextAndHeaders;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContent;
@ -42,7 +41,7 @@ import static org.elasticsearch.common.Strings.hasLength;
* In the simplest case, parse template string and variables from the request, * In the simplest case, parse template string and variables from the request,
* compile the template and execute the template against the given variables. * compile the template and execute the template against the given variables.
* */ * */
public class TemplateQueryParser extends BaseQueryParser<TemplateQueryBuilder> { public class TemplateQueryParser implements QueryParser<TemplateQueryBuilder> {
private final static Map<String, ScriptService.ScriptType> parametersToTypes = new HashMap<>(); private final static Map<String, ScriptService.ScriptType> parametersToTypes = new HashMap<>();
static { static {

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -29,7 +28,7 @@ import java.io.IOException;
/** /**
* Parser for the term query * Parser for the term query
*/ */
public class TermQueryParser extends BaseQueryParser<TermQueryBuilder> { public class TermQueryParser implements QueryParser<TermQueryBuilder> {
private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of term query"); private static final ParseField NAME_FIELD = new ParseField("_name").withAllDeprecated("query name is not supported in short version of term query");
private static final ParseField BOOST_FIELD = new ParseField("boost").withAllDeprecated("boost is not supported in short version of term query"); private static final ParseField BOOST_FIELD = new ParseField("boost").withAllDeprecated("boost is not supported in short version of term query");

View File

@ -36,7 +36,7 @@ import java.util.List;
* It also supports a terms lookup mechanism which can be used to fetch the term values from * It also supports a terms lookup mechanism which can be used to fetch the term values from
* a document in an index. * a document in an index.
*/ */
public class TermsQueryParser extends BaseQueryParser { public class TermsQueryParser implements QueryParser {
private static final ParseField MIN_SHOULD_MATCH_FIELD = new ParseField("min_match", "min_should_match", "minimum_should_match") private static final ParseField MIN_SHOULD_MATCH_FIELD = new ParseField("min_match", "min_should_match", "minimum_should_match")
.withAllDeprecated("Use [bool] query instead"); .withAllDeprecated("Use [bool] query instead");

View File

@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -29,7 +28,7 @@ import java.io.IOException;
/** /**
* Parser for type query * Parser for type query
*/ */
public class TypeQueryParser extends BaseQueryParser<TypeQueryBuilder> { public class TypeQueryParser implements QueryParser<TypeQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -28,7 +27,7 @@ import java.io.IOException;
/** /**
* Parser for wildcard query * Parser for wildcard query
*/ */
public class WildcardQueryParser extends BaseQueryParser<WildcardQueryBuilder> { public class WildcardQueryParser implements QueryParser<WildcardQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException; import java.io.IOException;
@ -28,7 +27,7 @@ import java.io.IOException;
/** /**
* Query parser for JSON Queries. * Query parser for JSON Queries.
*/ */
public class WrapperQueryParser extends BaseQueryParser { public class WrapperQueryParser implements QueryParser {
@Override @Override
public String[] names() { public String[] names() {

View File

@ -38,7 +38,7 @@ import java.util.List;
/** /**
* Parser for function_score query * Parser for function_score query
*/ */
public class FunctionScoreQueryParser extends BaseQueryParser<FunctionScoreQueryBuilder> { public class FunctionScoreQueryParser implements QueryParser<FunctionScoreQueryBuilder> {
private static final FunctionScoreQueryBuilder PROTOTYPE = new FunctionScoreQueryBuilder(EmptyQueryBuilder.PROTOTYPE, new FunctionScoreQueryBuilder.FilterFunctionBuilder[0]); private static final FunctionScoreQueryBuilder PROTOTYPE = new FunctionScoreQueryBuilder(EmptyQueryBuilder.PROTOTYPE, new FunctionScoreQueryBuilder.FilterFunctionBuilder[0]);
@ -88,7 +88,7 @@ public class FunctionScoreQueryParser extends BaseQueryParser<FunctionScoreQuery
} else if ("query".equals(currentFieldName)) { } else if ("query".equals(currentFieldName)) {
query = parseContext.parseInnerQueryBuilder(); query = parseContext.parseInnerQueryBuilder();
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FILTER_FIELD)) { } else if (parseContext.parseFieldMatcher().match(currentFieldName, FILTER_FIELD)) {
filter = parseContext.parseInnerFilterToQueryBuilder(); filter = parseContext.parseInnerQueryBuilder();
} else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) { } else if ("score_mode".equals(currentFieldName) || "scoreMode".equals(currentFieldName)) {
scoreMode = FiltersFunctionScoreQuery.ScoreMode.fromString(parser.text()); scoreMode = FiltersFunctionScoreQuery.ScoreMode.fromString(parser.text());
} else if ("boost_mode".equals(currentFieldName) || "boostMode".equals(currentFieldName)) { } else if ("boost_mode".equals(currentFieldName) || "boostMode".equals(currentFieldName)) {
@ -179,7 +179,7 @@ public class FunctionScoreQueryParser extends BaseQueryParser<FunctionScoreQuery
functionWeight = parser.floatValue(); functionWeight = parser.floatValue();
} else { } else {
if ("filter".equals(currentFieldName)) { if ("filter".equals(currentFieldName)) {
filter = parseContext.parseInnerFilterToQueryBuilder(); filter = parseContext.parseInnerQueryBuilder();
} else { } else {
if (scoreFunction != null) { if (scoreFunction != null) {
throw new ParsingException(parser.getTokenLocation(), "failed to parse function_score functions. already found [{}], now encountering [{}].", scoreFunction.getName(), currentFieldName); throw new ParsingException(parser.getTokenLocation(), "failed to parse function_score functions. already found [{}], now encountering [{}].", scoreFunction.getName(), currentFieldName);

View File

@ -77,7 +77,7 @@ public class NestedInnerQueryParseSupport {
if (path != null) { if (path != null) {
setPathLevel(); setPathLevel();
try { try {
innerQuery = parseContext.parseInnerQuery(); innerQuery = parseContext.parseInnerQueryBuilder().toQuery(this.shardContext);
} finally { } finally {
resetPathLevel(); resetPathLevel();
} }
@ -92,7 +92,7 @@ public class NestedInnerQueryParseSupport {
if (path != null) { if (path != null) {
setPathLevel(); setPathLevel();
try { try {
innerFilter = parseContext.parseInnerFilter(); innerFilter = parseContext.parseInnerQueryBuilder().toFilter(this.shardContext);
} finally { } finally {
resetPathLevel(); resetPathLevel();
} }
@ -120,7 +120,7 @@ public class NestedInnerQueryParseSupport {
parseContext.parser(innerParser); parseContext.parser(innerParser);
setPathLevel(); setPathLevel();
try { try {
innerQuery = parseContext.parseInnerQuery(); innerQuery = parseContext.parseInnerQueryBuilder().toQuery(this.shardContext);
} finally { } finally {
resetPathLevel(); resetPathLevel();
} }
@ -148,7 +148,7 @@ public class NestedInnerQueryParseSupport {
try { try {
XContentParser innerParser = XContentHelper.createParser(source); XContentParser innerParser = XContentHelper.createParser(source);
parseContext.parser(innerParser); parseContext.parser(innerParser);
innerFilter = parseContext.parseInnerFilter(); innerFilter = parseContext.parseInnerQueryBuilder().toFilter(this.shardContext);
filterParsed = true; filterParsed = true;
return innerFilter; return innerFilter;
} finally { } finally {

View File

@ -23,19 +23,8 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType; import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField; import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.*;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.search.*;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.similarities.BM25Similarity; import org.apache.lucene.search.similarities.BM25Similarity;
import org.apache.lucene.search.similarities.DefaultSimilarity; import org.apache.lucene.search.similarities.DefaultSimilarity;
import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity;
@ -45,11 +34,7 @@ import org.elasticsearch.test.ESTestCase;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.*;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;

View File

@ -404,6 +404,7 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
/** /**
* Serialize the given query builder and asserts that both are equal * Serialize the given query builder and asserts that both are equal
*/ */
@SuppressWarnings("unchecked")
protected QB assertSerialization(QB testQuery) throws IOException { protected QB assertSerialization(QB testQuery) throws IOException {
try (BytesStreamOutput output = new BytesStreamOutput()) { try (BytesStreamOutput output = new BytesStreamOutput()) {
testQuery.writeTo(output); testQuery.writeTo(output);

View File

@ -19,15 +19,17 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanQuery; import org.hamcrest.Matchers;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.*; import java.util.*;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.instanceOf;
@ -80,6 +82,7 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase<BoolQueryBuilde
} else { } else {
assertThat(query, instanceOf(BooleanQuery.class)); assertThat(query, instanceOf(BooleanQuery.class));
BooleanQuery booleanQuery = (BooleanQuery) query; BooleanQuery booleanQuery = (BooleanQuery) query;
assertThat(booleanQuery.isCoordDisabled(), equalTo(queryBuilder.disableCoord()));
if (queryBuilder.adjustPureNegative()) { if (queryBuilder.adjustPureNegative()) {
boolean isNegative = true; boolean isNegative = true;
for (BooleanClause clause : clauses) { for (BooleanClause clause : clauses) {
@ -173,4 +176,30 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase<BoolQueryBuilde
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
} }
} }
// https://github.com/elasticsearch/elasticsearch/issues/7240
@Test
public void testEmptyBooleanQuery() throws Exception {
String query = jsonBuilder().startObject().startObject("bool").endObject().endObject().string();
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
assertThat(parsedQuery, Matchers.instanceOf(MatchAllDocsQuery.class));
}
public void testDefaultMinShouldMatch() throws Exception {
// Queries have a minShouldMatch of 0
BooleanQuery bq = (BooleanQuery) parseQuery(boolQuery().must(termQuery("foo", "bar")).buildAsBytes()).toQuery(createShardContext());
assertEquals(0, bq.getMinimumNumberShouldMatch());
bq = (BooleanQuery) parseQuery(boolQuery().should(termQuery("foo", "bar")).buildAsBytes()).toQuery(createShardContext());
assertEquals(0, bq.getMinimumNumberShouldMatch());
// Filters have a minShouldMatch of 0/1
ConstantScoreQuery csq = (ConstantScoreQuery) parseQuery(constantScoreQuery(boolQuery().must(termQuery("foo", "bar"))).buildAsBytes()).toQuery(createShardContext());
bq = (BooleanQuery) csq.getQuery();
assertEquals(0, bq.getMinimumNumberShouldMatch());
csq = (ConstantScoreQuery) parseQuery(constantScoreQuery(boolQuery().should(termQuery("foo", "bar"))).buildAsBytes()).toQuery(createShardContext());
bq = (BooleanQuery) csq.getQuery();
assertEquals(1, bq.getMinimumNumberShouldMatch());
}
} }

View File

@ -25,8 +25,11 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import static org.elasticsearch.index.query.QueryBuilders.commonTermsQuery;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.nullValue;
public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase<CommonTermsQueryBuilder> { public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase<CommonTermsQueryBuilder> {
@ -110,4 +113,44 @@ public class CommonTermsQueryBuilderTests extends AbstractQueryTestCase<CommonTe
context.setAllowUnmappedFields(true); context.setAllowUnmappedFields(true);
assertNull(builder.toQuery(context)); assertNull(builder.toQuery(context));
} }
@Test
public void testCommonTermsQuery1() throws IOException {
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query1.json");
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class));
ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery;
assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), nullValue());
assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("2"));
}
@Test
public void testCommonTermsQuery2() throws IOException {
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query2.json");
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class));
ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery;
assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), equalTo("50%"));
assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("5<20%"));
}
@Test
public void testCommonTermsQuery3() throws IOException {
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/commonTerms-query3.json");
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class));
ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery;
assertThat(ectQuery.getHighFreqMinimumNumberShouldMatchSpec(), nullValue());
assertThat(ectQuery.getLowFreqMinimumNumberShouldMatchSpec(), equalTo("2"));
}
@Test // see #11730
public void testCommonTermsQuery4() throws IOException {
boolean disableCoord = randomBoolean();
Query parsedQuery = parseQuery(commonTermsQuery("field", "text").disableCoord(disableCoord).buildAsBytes()).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(ExtendedCommonTermsQuery.class));
ExtendedCommonTermsQuery ectQuery = (ExtendedCommonTermsQuery) parsedQuery;
assertThat(ectQuery.isCoordDisabled(), equalTo(disableCoord));
}
} }

View File

@ -19,17 +19,17 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.*;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import static org.hamcrest.CoreMatchers.*; import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.*;
public class DisMaxQueryBuilderTests extends AbstractQueryTestCase<DisMaxQueryBuilder> { public class DisMaxQueryBuilderTests extends AbstractQueryTestCase<DisMaxQueryBuilder> {
@ -113,4 +113,34 @@ public class DisMaxQueryBuilderTests extends AbstractQueryTestCase<DisMaxQueryBu
// expected // expected
} }
} }
@Test
public void testToQueryInnerPrefixQuery() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String queryAsString = "{\n" +
" \"dis_max\":{\n" +
" \"queries\":[\n" +
" {\n" +
" \"prefix\":{\n" +
" \"" + STRING_FIELD_NAME + "\":{\n" +
" \"value\":\"sh\",\n" +
" \"boost\":1.2\n" +
" }\n" +
" }\n" +
" }\n" +
" ]\n" +
" }\n" +
"}";
Query query = parseQuery(queryAsString).toQuery(createShardContext());
assertThat(query, instanceOf(DisjunctionMaxQuery.class));
DisjunctionMaxQuery disjunctionMaxQuery = (DisjunctionMaxQuery) query;
List<Query> disjuncts = disjunctionMaxQuery.getDisjuncts();
assertThat(disjuncts.size(), equalTo(1));
PrefixQuery firstQ = (PrefixQuery) disjuncts.get(0);
// since age is automatically registered in data, we encode it as numeric
assertThat(firstQ.getPrefix(), equalTo(new Term(STRING_FIELD_NAME, "sh")));
assertThat((double) firstQ.getBoost(), closeTo(1.2, 0.00001));
}
} }

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.FuzzyQuery; import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
@ -29,6 +30,7 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuilder> { public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuilder> {
@ -103,4 +105,45 @@ public class FuzzyQueryBuilderTests extends AbstractQueryTestCase<FuzzyQueryBuil
assertThat(e.getMessage(), Matchers.containsString("For input string")); assertThat(e.getMessage(), Matchers.containsString("For input string"));
} }
} }
@Test
public void testToQueryWithStringField() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"fuzzy\":{\n" +
" \"" + STRING_FIELD_NAME + "\":{\n" +
" \"value\":\"sh\",\n" +
" \"fuzziness\": \"AUTO\",\n" +
" \"prefix_length\":1,\n" +
" \"boost\":2.0\n" +
" }\n" +
" }\n" +
"}";
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(FuzzyQuery.class));
FuzzyQuery fuzzyQuery = (FuzzyQuery) parsedQuery;
assertThat(fuzzyQuery.getTerm(), equalTo(new Term(STRING_FIELD_NAME, "sh")));
assertThat(fuzzyQuery.getMaxEdits(), equalTo(Fuzziness.AUTO.asDistance("sh")));
assertThat(fuzzyQuery.getPrefixLength(), equalTo(1));
assertThat(fuzzyQuery.getBoost(), equalTo(2.0f));
}
@Test
public void testToQueryWithNumericField() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"fuzzy\":{\n" +
" \"" + INT_FIELD_NAME + "\":{\n" +
" \"value\":12,\n" +
" \"fuzziness\":5,\n" +
" \"boost\":2.0\n" +
" }\n" +
" }\n" +
"}\n";
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
NumericRangeQuery fuzzyQuery = (NumericRangeQuery) parsedQuery;
assertThat(fuzzyQuery.getMin().longValue(), equalTo(7l));
assertThat(fuzzyQuery.getMax().longValue(), equalTo(17l));
}
} }

View File

@ -21,12 +21,7 @@ package org.elasticsearch.index.query;
import com.spatial4j.core.io.GeohashUtils; import com.spatial4j.core.io.GeohashUtils;
import com.spatial4j.core.shape.Rectangle; import com.spatial4j.core.shape.Rectangle;
import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery; import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
@ -35,6 +30,9 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBoundingBoxQueryBuilder> { public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBoundingBoxQueryBuilder> {
/** Randomly generate either NaN or one of the two infinity values. */ /** Randomly generate either NaN or one of the two infinity values. */
private static Double[] brokenDoubles = {Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY}; private static Double[] brokenDoubles = {Double.NaN, Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY};
@ -258,7 +256,6 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
} }
} }
// Java really could do with function pointers - is there any Java8 feature that would help me here which I don't know of?
public abstract class PointTester { public abstract class PointTester {
private double brokenCoordinate = randomFrom(brokenDoubles); private double brokenCoordinate = randomFrom(brokenDoubles);
private double invalidCoordinate; private double invalidCoordinate;
@ -319,4 +316,106 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
qb.setCorners(qb.topLeft().getLat(), qb.topLeft().getLon(), qb.topLeft().getLat(), coordinate); qb.setCorners(qb.topLeft().getLat(), qb.topLeft().getLon(), qb.topLeft().getLat(), coordinate);
} }
} }
@Test
public void testParsingAndToQuery1() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_left\":[-70, 40],\n" +
" \"bottom_right\":[-80, 30]\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
@Test
public void testParsingAndToQuery2() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_left\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" },\n" +
" \"bottom_right\":{\n" +
" \"lat\":30,\n" +
" \"lon\":-80\n" +
" }\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
@Test
public void testParsingAndToQuery3() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_left\":\"40, -70\",\n" +
" \"bottom_right\":\"30, -80\"\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
@Test
public void testParsingAndToQuery4() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_left\":\"drn5x1g8cu2y\",\n" +
" \"bottom_right\":\"30, -80\"\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
@Test
public void testParsingAndToQuery5() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"top_right\":\"40, -80\",\n" +
" \"bottom_left\":\"30, -70\"\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
@Test
public void testParsingAndToQuery6() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_bounding_box\":{\n" +
" \"" + GEO_POINT_FIELD_NAME+ "\":{\n" +
" \"right\": -80,\n" +
" \"top\": 40,\n" +
" \"left\": -70,\n" +
" \"bottom\": 30\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoBoundingBoxQuery(query);
}
private void assertGeoBoundingBoxQuery(String query) throws IOException {
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
InMemoryGeoBoundingBoxQuery filter = (InMemoryGeoBoundingBoxQuery) parsedQuery;
assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME));
assertThat(filter.topLeft().lat(), closeTo(40, 0.00001));
assertThat(filter.topLeft().lon(), closeTo(-70, 0.00001));
assertThat(filter.bottomRight().lat(), closeTo(30, 0.00001));
assertThat(filter.bottomRight().lon(), closeTo(-80, 0.00001));
}
} }

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import com.spatial4j.core.shape.Point; import com.spatial4j.core.shape.Point;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
@ -31,9 +30,7 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.*;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDistanceQueryBuilder> { public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDistanceQueryBuilder> {
@ -180,4 +177,202 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
assertThat(geoQuery.maxInclusiveDistance(), closeTo(distance, Math.abs(distance) / 1000)); assertThat(geoQuery.maxInclusiveDistance(), closeTo(distance, Math.abs(distance) / 1000));
} }
@Test
public void testParsingAndToQuery1() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"12mi\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery2() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"12mi\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":[-70, 40]\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery3() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"12mi\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":\"40, -70\"\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery4() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"12mi\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":\"drn5x1g8cu2y\"\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery5() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":12,\n" +
" \"unit\":\"mi\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery6() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"12\",\n" +
" \"unit\":\"mi\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery7() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"19.312128\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery;
assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME));
assertThat(filter.lat(), closeTo(40, 0.00001));
assertThat(filter.lon(), closeTo(-70, 0.00001));
assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(0.012, DistanceUnit.MILES), 0.00001));
}
@Test
public void testParsingAndToQuery8() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":19.312128,\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery;
assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME));
assertThat(filter.lat(), closeTo(40, 0.00001));
assertThat(filter.lon(), closeTo(-70, 0.00001));
assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.KILOMETERS.convert(12, DistanceUnit.MILES), 0.00001));
}
@Test
public void testParsingAndToQuery9() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"19.312128\",\n" +
" \"unit\":\"km\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery10() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":19.312128,\n" +
" \"unit\":\"km\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery11() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"19.312128km\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
@Test
public void testParsingAndToQuery12() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_distance\":{\n" +
" \"distance\":\"12mi\",\n" +
" \"unit\":\"km\",\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoDistanceRangeQuery(query);
}
private void assertGeoDistanceRangeQuery(String query) throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
GeoDistanceRangeQuery filter = (GeoDistanceRangeQuery) parsedQuery;
assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME));
assertThat(filter.lat(), closeTo(40, 0.00001));
assertThat(filter.lon(), closeTo(-70, 0.00001));
assertThat(filter.minInclusiveDistance(), equalTo(Double.NEGATIVE_INFINITY));
assertThat(filter.maxInclusiveDistance(), closeTo(DistanceUnit.DEFAULT.convert(12, DistanceUnit.MILES), 0.00001));
}
} }

View File

@ -23,6 +23,7 @@ import com.spatial4j.core.shape.jts.JtsGeometry;
import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Coordinate;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.geo.builders.ShapeBuilder; import org.elasticsearch.common.geo.builders.ShapeBuilder;
@ -37,6 +38,8 @@ import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
@ -70,7 +73,6 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
assertThat(queryPoints[i], equalTo(queryBuilderPoints.get(i))); assertThat(queryPoints[i], equalTo(queryBuilderPoints.get(i)));
} }
} }
} }
/** /**
@ -155,4 +157,114 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
assertEquals("Deprecated field [normalize] used, expected [coerce] instead", ex.getMessage()); assertEquals("Deprecated field [normalize] used, expected [coerce] instead", ex.getMessage());
} }
} }
@Test
public void testParsingAndToQueryParsingExceptions() throws IOException {
String[] brokenFiles = new String[]{
"/org/elasticsearch/index/query/geo_polygon_exception_1.json",
"/org/elasticsearch/index/query/geo_polygon_exception_2.json",
"/org/elasticsearch/index/query/geo_polygon_exception_3.json",
"/org/elasticsearch/index/query/geo_polygon_exception_4.json",
"/org/elasticsearch/index/query/geo_polygon_exception_5.json"
};
for (String brokenFile : brokenFiles) {
String query = copyToStringFromClasspath(brokenFile);
try {
parseQuery(query);
fail("parsing a broken geo_polygon filter didn't fail as expected while parsing: " + brokenFile);
} catch (ParsingException e) {
// success!
}
}
}
@Test
public void testParsingAndToQuery1() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_polygon\":{\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"points\":[\n" +
" [-70, 40],\n" +
" [-80, 30],\n" +
" [-90, 20]\n" +
" ]\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoPolygonQuery(query);
}
@Test
public void testParsingAndToQuery2() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_polygon\":{\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"points\":[\n" +
" {\n" +
" \"lat\":40,\n" +
" \"lon\":-70\n" +
" },\n" +
" {\n" +
" \"lat\":30,\n" +
" \"lon\":-80\n" +
" },\n" +
" {\n" +
" \"lat\":20,\n" +
" \"lon\":-90\n" +
" }\n" +
" ]\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoPolygonQuery(query);
}
@Test
public void testParsingAndToQuery3() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_polygon\":{\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"points\":[\n" +
" \"40, -70\",\n" +
" \"30, -80\",\n" +
" \"20, -90\"\n" +
" ]\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoPolygonQuery(query);
}
@Test
public void testParsingAndToQuery4() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"geo_polygon\":{\n" +
" \"" + GEO_POINT_FIELD_NAME + "\":{\n" +
" \"points\":[\n" +
" \"drn5x1g8cu2y\",\n" +
" \"30, -80\",\n" +
" \"20, -90\"\n" +
" ]\n" +
" }\n" +
" }\n" +
"}\n";
assertGeoPolygonQuery(query);
}
private void assertGeoPolygonQuery(String query) throws IOException {
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
GeoPolygonQuery filter = (GeoPolygonQuery) parsedQuery;
assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME));
assertThat(filter.points().length, equalTo(4));
assertThat(filter.points()[0].lat(), closeTo(40, 0.00001));
assertThat(filter.points()[0].lon(), closeTo(-70, 0.00001));
assertThat(filter.points()[1].lat(), closeTo(30, 0.00001));
assertThat(filter.points()[1].lon(), closeTo(-80, 0.00001));
assertThat(filter.points()[2].lat(), closeTo(20, 0.00001));
assertThat(filter.points()[2].lon(), closeTo(-90, 0.00001));
}
} }

View File

@ -1,172 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.TestSearchContext;
import org.joda.time.DateTime;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
/**
*
*/
public class IndexQueryParserFilterDateRangeFormatTests extends ESSingleNodeTestCase {
private Injector injector;
private IndexQueryParserService queryParser;
@Before
public void setup() throws IOException {
IndexService indexService = createIndex("test");
injector = indexService.injector();
MapperService mapperService = indexService.mapperService();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
mapperService.merge("person", new CompressedXContent(mapping), true, false);
ParsedDocument doc = mapperService.documentMapper("person").parse("test", "person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();
queryParser = injector.getInstance(IndexQueryParserService.class);
}
private IndexQueryParserService queryParser() throws IOException {
return this.queryParser;
}
@Test
public void testDateRangeFilterFormat() throws IOException {
IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_filter_format.json");
queryParser.parse(query).query();
// Sadly from NoCacheFilter, we can not access to the delegate filter so we can not check
// it's the one we are expecting
// Test Invalid format
query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_filter_format_invalid.json");
try {
SearchContext.setCurrent(new TestSearchContext());
// We need to rewrite, because range on date field initially returns LateParsingQuery
queryParser.parse(query).query().rewrite(null);
fail("A Range Filter with a specific format but with an unexpected date should raise a ParsingException");
} catch (ElasticsearchParseException e) {
// We expect it
} finally {
SearchContext.removeCurrent();
}
}
@Test
public void testDateRangeQueryFormat() throws IOException {
IndexQueryParserService queryParser = queryParser();
// We test 01/01/2012 from gte and 2030 for lt
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_format.json");
Query parsedQuery;
try {
SearchContext.setCurrent(new TestSearchContext());
// We need to rewrite, because range on date field initially returns LateParsingQuery
parsedQuery = queryParser.parse(query).query().rewrite(null);
} finally {
SearchContext.removeCurrent();;
}
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
// Min value was 01/01/2012 (dd/MM/yyyy)
DateTime min = DateTime.parse("2012-01-01T00:00:00.000+00");
assertThat(((NumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis()));
// Max value was 2030 (yyyy)
DateTime max = DateTime.parse("2030-01-01T00:00:00.000+00");
assertThat(((NumericRangeQuery) parsedQuery).getMax().longValue(), is(max.getMillis()));
// Test Invalid format
query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_format_invalid.json");
try {
SearchContext.setCurrent(new TestSearchContext());
queryParser.parse(query).query().rewrite(null);
fail("A Range Query with a specific format but with an unexpected date should raise a ParsingException");
} catch (ElasticsearchParseException e) {
// We expect it
} finally {
SearchContext.removeCurrent();
}
}
@Test
public void testDateRangeBoundaries() throws IOException {
IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_boundaries_inclusive.json");
Query parsedQuery;
try {
SearchContext.setCurrent(new TestSearchContext());
// We need to rewrite, because range on date field initially returns LateParsingQuery
parsedQuery = queryParser.parse(query).query().rewrite(null);
} finally {
SearchContext.removeCurrent();
}
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery;
DateTime min = DateTime.parse("2014-11-01T00:00:00.000+00");
assertThat(rangeQuery.getMin().longValue(), is(min.getMillis()));
assertTrue(rangeQuery.includesMin());
DateTime max = DateTime.parse("2014-12-08T23:59:59.999+00");
assertThat(rangeQuery.getMax().longValue(), is(max.getMillis()));
assertTrue(rangeQuery.includesMax());
query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_boundaries_exclusive.json");
try {
SearchContext.setCurrent(new TestSearchContext());
// We need to rewrite, because range on date field initially returns LateParsingQuery
parsedQuery = queryParser.parse(query).query().rewrite(null);
} finally {
SearchContext.removeCurrent();
}
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
rangeQuery = (NumericRangeQuery) parsedQuery;
min = DateTime.parse("2014-11-30T23:59:59.999+00");
assertThat(rangeQuery.getMin().longValue(), is(min.getMillis()));
assertFalse(rangeQuery.includesMin());
max = DateTime.parse("2014-12-08T00:00:00.000+00");
assertThat(rangeQuery.getMax().longValue(), is(max.getMillis()));
assertFalse(rangeQuery.includesMax());
}
}

View File

@ -1,129 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Injector;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.TestSearchContext;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
/**
*
*/
public class IndexQueryParserFilterDateRangeTimezoneTests extends ESSingleNodeTestCase {
private Injector injector;
private IndexQueryParserService queryParser;
@Before
public void setup() throws IOException {
IndexService indexService = createIndex("test");
injector = indexService.injector();
MapperService mapperService = indexService.mapperService();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/mapping.json");
mapperService.merge("person", new CompressedXContent(mapping), true, false);
ParsedDocument doc = mapperService.documentMapper("person").parse("test", "person", "1", new BytesArray(copyToBytesFromClasspath("/org/elasticsearch/index/query/data.json")));
assertNotNull(doc.dynamicMappingsUpdate());
client().admin().indices().preparePutMapping("test").setType("person").setSource(doc.dynamicMappingsUpdate().toString()).get();
queryParser = injector.getInstance(IndexQueryParserService.class);
}
private IndexQueryParserService queryParser() throws IOException {
return this.queryParser;
}
@Test
public void testDateRangeFilterTimezone() throws IOException {
IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_filter_timezone.json");
queryParser.parse(query).query();
// Sadly from NoCacheFilter, we can not access to the delegate filter so we can not check
// it's the one we are expecting
query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_filter_timezone_numeric_field.json");
try {
SearchContext.setCurrent(new TestSearchContext());
queryParser.parse(query).query();
fail("A Range Filter on a numeric field with a TimeZone should raise a ParsingException");
} catch (QueryShardException e) {
// We expect it
} finally {
SearchContext.removeCurrent();
}
}
@Test
public void testDateRangeQueryTimezone() throws IOException {
long startDate = System.currentTimeMillis();
IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_timezone.json");
Query parsedQuery;
try {
SearchContext.setCurrent(new TestSearchContext());
parsedQuery = queryParser.parse(query).query().rewrite(null);
} finally {
SearchContext.removeCurrent();
}
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
// Min value was 2012-01-01 (UTC) so we need to remove one hour
DateTime min = DateTime.parse("2012-01-01T00:00:00.000+01:00");
// Max value is when we started the test. So it should be some ms from now
DateTime max = new DateTime(startDate, DateTimeZone.UTC);
assertThat(((NumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis()));
// We should not have a big difference here (should be some ms)
assertThat(((NumericRangeQuery) parsedQuery).getMax().longValue() - max.getMillis(), lessThanOrEqualTo(60000L));
query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_timezone_numeric_field.json");
try {
SearchContext.setCurrent(new TestSearchContext());
queryParser.parse(query).query();
fail("A Range Query on a numeric field with a TimeZone should raise a ParsingException");
} catch (QueryShardException e) {
// We expect it
} finally {
SearchContext.removeCurrent();
}
}
}

View File

@ -23,6 +23,8 @@ import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.instanceOf;
@ -33,6 +35,16 @@ public class MatchAllQueryBuilderTests extends AbstractQueryTestCase<MatchAllQue
return new MatchAllQueryBuilder(); return new MatchAllQueryBuilder();
} }
@Override
protected Map<String, MatchAllQueryBuilder> getAlternateVersions() {
Map<String, MatchAllQueryBuilder> alternateVersions = new HashMap<>();
String queryAsString = "{\n" +
" \"match_all\": []\n" +
"}";
alternateVersions.put(queryAsString, new MatchAllQueryBuilder());
return alternateVersions;
}
@Override @Override
protected void doAssertLuceneQuery(MatchAllQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(MatchAllQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
assertThat(query, instanceOf(MatchAllDocsQuery.class)); assertThat(query, instanceOf(MatchAllDocsQuery.class));

View File

@ -21,12 +21,7 @@ package org.elasticsearch.index.query;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.queries.ExtendedCommonTermsQuery;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.*;
import org.apache.lucene.search.FuzzyQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.PhraseQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery; import org.elasticsearch.common.lucene.search.MultiPhrasePrefixQuery;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
@ -40,6 +35,7 @@ import java.util.Locale;
import static org.hamcrest.CoreMatchers.either; import static org.hamcrest.CoreMatchers.either;
import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.notNullValue;
public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuilder> { public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuilder> {
@ -147,6 +143,9 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
if (query instanceof BooleanQuery) { if (query instanceof BooleanQuery) {
BooleanQuery bq = (BooleanQuery) query; BooleanQuery bq = (BooleanQuery) query;
if (queryBuilder.minimumShouldMatch() != null) {
assertThat(bq.getMinimumNumberShouldMatch(), greaterThan(0));
}
if (queryBuilder.analyzer() == null && queryBuilder.value().toString().length() > 0) { if (queryBuilder.analyzer() == null && queryBuilder.value().toString().length() > 0) {
assertEquals(bq.clauses().size(), queryBuilder.value().toString().split(" ").length); assertEquals(bq.clauses().size(), queryBuilder.value().toString().split(" ").length);
} }
@ -155,13 +154,13 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
if (query instanceof ExtendedCommonTermsQuery) { if (query instanceof ExtendedCommonTermsQuery) {
assertTrue(queryBuilder.cutoffFrequency() != null); assertTrue(queryBuilder.cutoffFrequency() != null);
ExtendedCommonTermsQuery ectq = (ExtendedCommonTermsQuery) query; ExtendedCommonTermsQuery ectq = (ExtendedCommonTermsQuery) query;
assertEquals((float) queryBuilder.cutoffFrequency(), ectq.getMaxTermFrequency(), Float.MIN_VALUE); assertEquals(queryBuilder.cutoffFrequency(), ectq.getMaxTermFrequency(), Float.MIN_VALUE);
} }
if (query instanceof FuzzyQuery) { if (query instanceof FuzzyQuery) {
assertTrue(queryBuilder.fuzziness() != null); assertTrue(queryBuilder.fuzziness() != null);
FuzzyQuery fuzzyQuery = (FuzzyQuery) query; FuzzyQuery fuzzyQuery = (FuzzyQuery) query;
fuzzyQuery.getTerm().equals(new Term(STRING_FIELD_NAME, BytesRefs.toBytesRef(queryBuilder.value()))); assertThat(fuzzyQuery.getTerm(), equalTo(new Term(STRING_FIELD_NAME, BytesRefs.toBytesRef(queryBuilder.value()))));
assertThat(queryBuilder.prefixLength(), equalTo(fuzzyQuery.getPrefixLength())); assertThat(queryBuilder.prefixLength(), equalTo(fuzzyQuery.getPrefixLength()));
assertThat(queryBuilder.fuzzyTranspositions(), equalTo(fuzzyQuery.getTranspositions())); assertThat(queryBuilder.fuzzyTranspositions(), equalTo(fuzzyQuery.getTranspositions()));
} }
@ -223,6 +222,6 @@ public class MatchQueryBuilderTests extends AbstractQueryTestCase<MatchQueryBuil
public void testBadAnalyzer() throws IOException { public void testBadAnalyzer() throws IOException {
MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text"); MatchQueryBuilder matchQuery = new MatchQueryBuilder("fieldName", "text");
matchQuery.analyzer("bogusAnalyzer"); matchQuery.analyzer("bogusAnalyzer");
matchQuery.doToQuery(createShardContext()); matchQuery.toQuery(createShardContext());
} }
} }

View File

@ -41,7 +41,6 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.VersionType; import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item;
import org.hamcrest.Matchers;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -52,6 +51,9 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.stream.Stream; import java.util.stream.Stream;
import static org.elasticsearch.index.query.QueryBuilders.moreLikeThisQuery;
import static org.hamcrest.Matchers.*;
public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLikeThisQueryBuilder> { public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLikeThisQueryBuilder> {
private static String[] randomFields; private static String[] randomFields;
@ -205,7 +207,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
if (request.doc() != null) { if (request.doc() != null) {
generatedFields = generateFields(randomFields, request.doc().toUtf8()); generatedFields = generateFields(randomFields, request.doc().toUtf8());
} else { } else {
generatedFields = generateFields(request.selectedFields().toArray(new String[0]), request.id()); generatedFields = generateFields(request.selectedFields().toArray(new String[request.selectedFields().size()]), request.id());
} }
EnumSet<TermVectorsRequest.Flag> flags = EnumSet.of(TermVectorsRequest.Flag.Positions, TermVectorsRequest.Flag.Offsets); EnumSet<TermVectorsRequest.Flag> flags = EnumSet.of(TermVectorsRequest.Flag.Positions, TermVectorsRequest.Flag.Offsets);
response.setFields(generatedFields, request.selectedFields(), flags, generatedFields); response.setFields(generatedFields, request.selectedFields(), flags, generatedFields);
@ -233,10 +235,10 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
@Override @Override
protected void doAssertLuceneQuery(MoreLikeThisQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(MoreLikeThisQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
if (queryBuilder.likeItems() != null && queryBuilder.likeItems().length > 0) { if (queryBuilder.likeItems() != null && queryBuilder.likeItems().length > 0) {
assertThat(query, Matchers.instanceOf(BooleanQuery.class)); assertThat(query, instanceOf(BooleanQuery.class));
} else { } else {
// we rely on integration tests for a deeper check here // we rely on integration tests for a deeper check here
assertThat(query, Matchers.instanceOf(MoreLikeThisQuery.class)); assertThat(query, instanceOf(MoreLikeThisQuery.class));
} }
} }
@ -262,10 +264,21 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
queryBuilder.toQuery(createShardContext()); queryBuilder.toQuery(createShardContext());
fail("should have failed with IllegalArgumentException for field: " + unsupportedField); fail("should have failed with IllegalArgumentException for field: " + unsupportedField);
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
assertThat(e.getMessage(), Matchers.containsString("more_like_this doesn't support binary/numeric fields")); assertThat(e.getMessage(), containsString("more_like_this doesn't support binary/numeric fields"));
} }
} }
@Test
public void testMoreLikeThisBuilder() throws Exception {
Query parsedQuery = parseQuery(moreLikeThisQuery(new String[]{"name.first", "name.last"}, new String[]{"something"}, null).minTermFreq(1).maxQueryTerms(12).buildAsBytes()).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(MoreLikeThisQuery.class));
MoreLikeThisQuery mltQuery = (MoreLikeThisQuery) parsedQuery;
assertThat(mltQuery.getMoreLikeFields()[0], equalTo("name.first"));
assertThat(mltQuery.getLikeText(), equalTo("something"));
assertThat(mltQuery.getMinTermFrequency(), equalTo(1));
assertThat(mltQuery.getMaxQueryTerms(), equalTo(12));
}
@Test @Test
public void testItemSerialization() throws IOException { public void testItemSerialization() throws IOException {
Item expectedItem = generateRandomItem(); Item expectedItem = generateRandomItem();

View File

@ -28,7 +28,9 @@ import org.elasticsearch.index.search.MatchQuery;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery; import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBooleanSubQuery;
@ -101,6 +103,19 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase<MultiMatc
return query; return query;
} }
@Override
protected Map<String, MultiMatchQueryBuilder> getAlternateVersions() {
Map<String, MultiMatchQueryBuilder> alternateVersions = new HashMap<>();
String query = "{\n" +
" \"multi_match\": {\n" +
" \"query\": \"foo bar\",\n" +
" \"fields\": \"myField\"\n" +
" }\n" +
"}";
alternateVersions.put(query, new MultiMatchQueryBuilder("foo bar", "myField"));
return alternateVersions;
}
@Override @Override
protected void doAssertLuceneQuery(MultiMatchQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(MultiMatchQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
// we rely on integration tests for deeper checks here // we rely on integration tests for deeper checks here
@ -133,6 +148,13 @@ public class MultiMatchQueryBuilderTests extends AbstractQueryTestCase<MultiMatc
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
// expected // expected
} }
try {
new MultiMatchQueryBuilder("value", "field").type(null);
fail("type must not be null");
} catch (IllegalArgumentException e) {
// expected
}
} }
@Override @Override

View File

@ -19,12 +19,16 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.MultiTermQuery;
import org.apache.lucene.search.PrefixQuery; import org.apache.lucene.search.PrefixQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
@ -47,6 +51,7 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase<PrefixQueryBu
assertThat(query, instanceOf(PrefixQuery.class)); assertThat(query, instanceOf(PrefixQuery.class));
PrefixQuery prefixQuery = (PrefixQuery) query; PrefixQuery prefixQuery = (PrefixQuery) query;
assertThat(prefixQuery.getPrefix().field(), equalTo(queryBuilder.fieldName())); assertThat(prefixQuery.getPrefix().field(), equalTo(queryBuilder.fieldName()));
assertThat(prefixQuery.getPrefix().text(), equalTo(queryBuilder.value()));
} }
@Test @Test
@ -69,4 +74,15 @@ public class PrefixQueryBuilderTests extends AbstractQueryTestCase<PrefixQueryBu
// expected // expected
} }
} }
@Test
public void testBlendedRewriteMethod() throws IOException {
for (String rewrite : Arrays.asList("top_terms_blended_freqs_10", "topTermsBlendedFreqs10")) {
Query parsedQuery = parseQuery(prefixQuery("field", "val").rewrite(rewrite).buildAsBytes()).toQuery(createShardContext());
assertThat(parsedQuery, instanceOf(PrefixQuery.class));
PrefixQuery prefixQuery = (PrefixQuery) parsedQuery;
assertThat(prefixQuery.getPrefix(), equalTo(new Term("field", "val")));
assertThat(prefixQuery.getRewriteMethod(), instanceOf(MultiTermQuery.TopTermsBlendedFreqScoringRewrite.class));
}
}
} }

View File

@ -24,6 +24,7 @@ import org.apache.lucene.search.*;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
import org.elasticsearch.common.lucene.all.AllTermQuery; import org.elasticsearch.common.lucene.all.AllTermQuery;
import org.hamcrest.Matchers; import org.hamcrest.Matchers;
import org.joda.time.DateTimeZone;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
@ -296,7 +297,37 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
public void testToQueryNumericRangeQuery() throws Exception { public void testToQueryNumericRangeQuery() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
Query query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME).toQuery(createShardContext()); Query query = queryStringQuery("12~0.2").defaultField(INT_FIELD_NAME).toQuery(createShardContext());
assertThat(query, instanceOf(NumericRangeQuery.class)); NumericRangeQuery fuzzyQuery = (NumericRangeQuery) query;
assertThat(fuzzyQuery.getMin().longValue(), equalTo(12l));
assertThat(fuzzyQuery.getMax().longValue(), equalTo(12l));
} }
@Test
public void testTimezone() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String queryAsString = "{\n" +
" \"query_string\":{\n" +
" \"time_zone\":\"Europe/Paris\",\n" +
" \"query\":\"" + DATE_FIELD_NAME + ":[2012 TO 2014]\"\n" +
" }\n" +
"}";
QueryBuilder<?> queryBuilder = parseQuery(queryAsString);
assertThat(queryBuilder, instanceOf(QueryStringQueryBuilder.class));
QueryStringQueryBuilder queryStringQueryBuilder = (QueryStringQueryBuilder) queryBuilder;
assertThat(queryStringQueryBuilder.timeZone(), equalTo(DateTimeZone.forID("Europe/Paris")));
try {
queryAsString = "{\n" +
" \"query_string\":{\n" +
" \"time_zone\":\"This timezone does not exist\",\n" +
" \"query\":\"" + DATE_FIELD_NAME + ":[2012 TO 2014]\"\n" +
" }\n" +
"}";
parseQuery(queryAsString);
fail("we expect a ParsingException as we are providing an unknown time_zome");
} catch (IllegalArgumentException e) {
// We expect this one
}
}
} }

View File

@ -61,10 +61,27 @@ public class RandomQueryBuilder {
public static MultiTermQueryBuilder createMultiTermQuery(Random r) { public static MultiTermQueryBuilder createMultiTermQuery(Random r) {
// for now, only use String Rangequeries for MultiTerm test, numeric and date makes little sense // for now, only use String Rangequeries for MultiTerm test, numeric and date makes little sense
// see issue #12123 for discussion // see issue #12123 for discussion
// Prefix / Fuzzy / RegEx / Wildcard can go here later once refactored and they have random query generators switch(RandomInts.randomIntBetween(r, 0, 5)) {
RangeQueryBuilder query = new RangeQueryBuilder(AbstractQueryTestCase.STRING_FIELD_NAME); case 0:
query.from("a" + RandomStrings.randomAsciiOfLengthBetween(r, 1, 10)); RangeQueryBuilder stringRangeQuery = new RangeQueryBuilder(AbstractQueryTestCase.STRING_FIELD_NAME);
query.to("z" + RandomStrings.randomAsciiOfLengthBetween(r, 1, 10)); stringRangeQuery.from("a" + RandomStrings.randomAsciiOfLengthBetween(r, 1, 10));
return query; stringRangeQuery.to("z" + RandomStrings.randomAsciiOfLengthBetween(r, 1, 10));
return stringRangeQuery;
case 1:
RangeQueryBuilder numericRangeQuery = new RangeQueryBuilder(AbstractQueryTestCase.INT_FIELD_NAME);
numericRangeQuery.from(RandomInts.randomIntBetween(r, 1, 100));
numericRangeQuery.to(RandomInts.randomIntBetween(r, 101, 200));
return numericRangeQuery;
case 2:
return new FuzzyQueryBuilder(AbstractQueryTestCase.INT_FIELD_NAME, RandomInts.randomInt(r, 1000));
case 3:
return new FuzzyQueryBuilder(AbstractQueryTestCase.STRING_FIELD_NAME, RandomStrings.randomAsciiOfLengthBetween(r, 1, 10));
case 4:
return new PrefixQueryBuilderTests().createTestQueryBuilder();
case 5:
return new WildcardQueryBuilderTests().createTestQueryBuilder();
default:
throw new UnsupportedOperationException();
}
} }
} }

View File

@ -22,13 +22,18 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.NumericRangeQuery; import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.lucene.BytesRefs;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.Matchers.instanceOf; import static org.elasticsearch.index.query.QueryBuilders.rangeQuery;
import static org.hamcrest.Matchers.*;
public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuilder> { public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuilder> {
@ -38,17 +43,10 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
// switch between numeric and date ranges // switch between numeric and date ranges
switch (randomIntBetween(0, 2)) { switch (randomIntBetween(0, 2)) {
case 0: case 0:
if (randomBoolean()) { // use mapped integer field for numeric range queries
// use mapped integer field for numeric range queries query = new RangeQueryBuilder(INT_FIELD_NAME);
query = new RangeQueryBuilder(INT_FIELD_NAME); query.from(randomIntBetween(1, 100));
query.from(randomIntBetween(1, 100)); query.to(randomIntBetween(101, 200));
query.to(randomIntBetween(101, 200));
} else {
// use unmapped field for numeric range queries
query = new RangeQueryBuilder(randomAsciiOfLengthBetween(1, 10));
query.from(0.0 - randomDouble());
query.to(randomDouble());
}
break; break;
case 1: case 1:
// use mapped date field, using date string representation // use mapped date field, using date string representation
@ -83,14 +81,46 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
return query; return query;
} }
@Override
protected Map<String, RangeQueryBuilder> getAlternateVersions() {
Map<String, RangeQueryBuilder> alternateVersions = new HashMap<>();
RangeQueryBuilder rangeQueryBuilder = new RangeQueryBuilder(INT_FIELD_NAME);
rangeQueryBuilder.from(randomIntBetween(1, 100)).to(randomIntBetween(101, 200));
rangeQueryBuilder.includeLower(randomBoolean());
rangeQueryBuilder.includeUpper(randomBoolean());
String query =
"{\n" +
" \"range\":{\n" +
" \"" + INT_FIELD_NAME + "\": {\n" +
" \"" + (rangeQueryBuilder.includeLower() ? "gte" : "gt") + "\": " + rangeQueryBuilder.from() + ",\n" +
" \"" + (rangeQueryBuilder.includeUpper() ? "lte" : "lt") + "\": " + rangeQueryBuilder.to() + "\n" +
" }\n" +
" }\n" +
"}";
alternateVersions.put(query, rangeQueryBuilder);
return alternateVersions;
}
@Override @Override
protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(RangeQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
if (getCurrentTypes().length == 0 || (queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false && queryBuilder.fieldName().equals(INT_FIELD_NAME) == false)) { if (getCurrentTypes().length == 0 || (queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false && queryBuilder.fieldName().equals(INT_FIELD_NAME) == false)) {
assertThat(query, instanceOf(TermRangeQuery.class)); assertThat(query, instanceOf(TermRangeQuery.class));
TermRangeQuery termRangeQuery = (TermRangeQuery) query;
assertThat(termRangeQuery.getField(), equalTo(queryBuilder.fieldName()));
assertThat(termRangeQuery.getLowerTerm(), equalTo(BytesRefs.toBytesRef(queryBuilder.from())));
assertThat(termRangeQuery.getUpperTerm(), equalTo(BytesRefs.toBytesRef(queryBuilder.to())));
assertThat(termRangeQuery.includesLower(), equalTo(queryBuilder.includeLower()));
assertThat(termRangeQuery.includesUpper(), equalTo(queryBuilder.includeUpper()));
} else if (queryBuilder.fieldName().equals(DATE_FIELD_NAME)) { } else if (queryBuilder.fieldName().equals(DATE_FIELD_NAME)) {
//we can't properly test unmapped dates because LateParsingQuery is package private //we can't properly test unmapped dates because LateParsingQuery is package private
} else if (queryBuilder.fieldName().equals(INT_FIELD_NAME)) { } else if (queryBuilder.fieldName().equals(INT_FIELD_NAME)) {
assertThat(query, instanceOf(NumericRangeQuery.class)); assertThat(query, instanceOf(NumericRangeQuery.class));
NumericRangeQuery numericRangeQuery = (NumericRangeQuery) query;
assertThat(numericRangeQuery.getField(), equalTo(queryBuilder.fieldName()));
assertThat(numericRangeQuery.getMin(), equalTo(queryBuilder.from()));
assertThat(numericRangeQuery.getMax(), equalTo(queryBuilder.to()));
assertThat(numericRangeQuery.includesMin(), equalTo(queryBuilder.includeLower()));
assertThat(numericRangeQuery.includesMax(), equalTo(queryBuilder.includeUpper()));
} else { } else {
throw new UnsupportedOperationException(); throw new UnsupportedOperationException();
} }
@ -152,4 +182,147 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
query.from(1).to(10).timeZone("UTC"); query.from(1).to(10).timeZone("UTC");
query.toQuery(createShardContext()); query.toQuery(createShardContext());
} }
@Test
public void testToQueryNumericField() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
Query parsedQuery = rangeQuery(INT_FIELD_NAME).from(23).to(54).includeLower(true).includeUpper(false).toQuery(createShardContext());
// since age is automatically registered in data, we encode it as numeric
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery;
assertThat(rangeQuery.getField(), equalTo(INT_FIELD_NAME));
assertThat(rangeQuery.getMin().intValue(), equalTo(23));
assertThat(rangeQuery.getMax().intValue(), equalTo(54));
assertThat(rangeQuery.includesMin(), equalTo(true));
assertThat(rangeQuery.includesMax(), equalTo(false));
}
@Test
public void testDateRangeQueryFormat() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
// We test 01/01/2012 from gte and 2030 for lt
String query = "{\n" +
" \"range\" : {\n" +
" \"" + DATE_FIELD_NAME + "\" : {\n" +
" \"gte\": \"01/01/2012\",\n" +
" \"lt\": \"2030\",\n" +
" \"format\": \"dd/MM/yyyy||yyyy\"\n" +
" }\n" +
" }\n" +
"}";
Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null);
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
// Min value was 01/01/2012 (dd/MM/yyyy)
DateTime min = DateTime.parse("2012-01-01T00:00:00.000+00");
assertThat(((NumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis()));
// Max value was 2030 (yyyy)
DateTime max = DateTime.parse("2030-01-01T00:00:00.000+00");
assertThat(((NumericRangeQuery) parsedQuery).getMax().longValue(), is(max.getMillis()));
// Test Invalid format
query = "{\n" +
" \"range\" : {\n" +
" \"" + DATE_FIELD_NAME + "\" : {\n" +
" \"gte\": \"01/01/2012\",\n" +
" \"lt\": \"2030\",\n" +
" \"format\": \"yyyy\"\n" +
" }\n" +
" }\n" +
"}";
try {
parseQuery(query).toQuery(createShardContext()).rewrite(null);
fail("A Range Query with a specific format but with an unexpected date should raise a ParsingException");
} catch (ElasticsearchParseException e) {
// We expect it
}
}
@Test
public void testDateRangeBoundaries() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
String query = "{\n" +
" \"range\" : {\n" +
" \"" + DATE_FIELD_NAME + "\" : {\n" +
" \"gte\": \"2014-11-05||/M\",\n" +
" \"lte\": \"2014-12-08||/d\"\n" +
" }\n" +
" }\n" +
"}\n";
Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null);
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
NumericRangeQuery rangeQuery = (NumericRangeQuery) parsedQuery;
DateTime min = DateTime.parse("2014-11-01T00:00:00.000+00");
assertThat(rangeQuery.getMin().longValue(), is(min.getMillis()));
assertTrue(rangeQuery.includesMin());
DateTime max = DateTime.parse("2014-12-08T23:59:59.999+00");
assertThat(rangeQuery.getMax().longValue(), is(max.getMillis()));
assertTrue(rangeQuery.includesMax());
query = "{\n" +
" \"range\" : {\n" +
" \"" + DATE_FIELD_NAME + "\" : {\n" +
" \"gt\": \"2014-11-05||/M\",\n" +
" \"lt\": \"2014-12-08||/d\"\n" +
" }\n" +
" }\n" +
"}";
parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null);
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
rangeQuery = (NumericRangeQuery) parsedQuery;
min = DateTime.parse("2014-11-30T23:59:59.999+00");
assertThat(rangeQuery.getMin().longValue(), is(min.getMillis()));
assertFalse(rangeQuery.includesMin());
max = DateTime.parse("2014-12-08T00:00:00.000+00");
assertThat(rangeQuery.getMax().longValue(), is(max.getMillis()));
assertFalse(rangeQuery.includesMax());
}
@Test
public void testDateRangeQueryTimezone() throws IOException {
long startDate = System.currentTimeMillis();
String query = "{\n" +
" \"range\" : {\n" +
" \"" + DATE_FIELD_NAME + "\" : {\n" +
" \"gte\": \"2012-01-01\",\n" +
" \"lte\": \"now\",\n" +
" \"time_zone\": \"+01:00\"\n" +
" }\n" +
" }\n" +
"}";
Query parsedQuery = parseQuery(query).toQuery(createShardContext()).rewrite(null);
assertThat(parsedQuery, instanceOf(NumericRangeQuery.class));
// Min value was 2012-01-01 (UTC) so we need to remove one hour
DateTime min = DateTime.parse("2012-01-01T00:00:00.000+01:00");
// Max value is when we started the test. So it should be some ms from now
DateTime max = new DateTime(startDate, DateTimeZone.UTC);
assertThat(((NumericRangeQuery) parsedQuery).getMin().longValue(), is(min.getMillis()));
// We should not have a big difference here (should be some ms)
assertThat(((NumericRangeQuery) parsedQuery).getMax().longValue() - max.getMillis(), lessThanOrEqualTo(60000L));
query = "{\n" +
" \"range\" : {\n" +
" \"" + INT_FIELD_NAME + "\" : {\n" +
" \"gte\": \"0\",\n" +
" \"lte\": \"100\",\n" +
" \"time_zone\": \"-01:00\"\n" +
" }\n" +
" }\n" +
"}";
try {
parseQuery(query).toQuery(createShardContext());
fail("A Range Query on a numeric field with a TimeZone should raise a ParsingException");
} catch (QueryShardException e) {
// We expect it
}
}
} }

View File

@ -27,6 +27,7 @@ import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBuilder> { public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBuilder> {
@ -58,6 +59,8 @@ public class RegexpQueryBuilderTests extends AbstractQueryTestCase<RegexpQueryBu
@Override @Override
protected void doAssertLuceneQuery(RegexpQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(RegexpQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
assertThat(query, instanceOf(RegexpQuery.class)); assertThat(query, instanceOf(RegexpQuery.class));
RegexpQuery regexpQuery = (RegexpQuery) query;
assertThat(regexpQuery.getField(), equalTo(queryBuilder.fieldName()));
} }
@Test @Test

View File

@ -56,8 +56,6 @@ import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
/** /**
@ -131,7 +129,7 @@ public class TemplateQueryParserTests extends ESTestCase {
templateSourceParser.nextToken(); templateSourceParser.nextToken();
TemplateQueryParser parser = injector.getInstance(TemplateQueryParser.class); TemplateQueryParser parser = injector.getInstance(TemplateQueryParser.class);
Query query = parser.parse(context); Query query = parser.fromXContent(context.parseContext()).toQuery(context);
assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery);
} }
@ -143,7 +141,7 @@ public class TemplateQueryParserTests extends ESTestCase {
context.reset(templateSourceParser); context.reset(templateSourceParser);
TemplateQueryParser parser = injector.getInstance(TemplateQueryParser.class); TemplateQueryParser parser = injector.getInstance(TemplateQueryParser.class);
Query query = parser.parse(context); Query query = parser.fromXContent(context.parseContext()).toQuery(context);
assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery);
} }
@ -161,7 +159,7 @@ public class TemplateQueryParserTests extends ESTestCase {
context.reset(templateSourceParser); context.reset(templateSourceParser);
TemplateQueryParser parser = injector.getInstance(TemplateQueryParser.class); TemplateQueryParser parser = injector.getInstance(TemplateQueryParser.class);
parser.parse(context); parser.fromXContent(context.parseContext()).toQuery(context);
} }
@Test @Test
@ -173,7 +171,7 @@ public class TemplateQueryParserTests extends ESTestCase {
templateSourceParser.nextToken(); templateSourceParser.nextToken();
TemplateQueryParser parser = injector.getInstance(TemplateQueryParser.class); TemplateQueryParser parser = injector.getInstance(TemplateQueryParser.class);
Query query = parser.parse(context); Query query = parser.fromXContent(context.parseContext()).toQuery(context);
assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery); assertTrue("Parsing template query failed.", query instanceof MatchAllDocsQuery);
} }
} }

View File

@ -22,8 +22,10 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.BytesRefs;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.junit.Test;
import java.io.IOException; import java.io.IOException;
@ -53,4 +55,14 @@ public class TermQueryBuilderTests extends AbstractTermQueryTestCase<TermQueryBu
assertThat(termQuery.getTerm().bytes(), equalTo(BytesRefs.toBytesRef(queryBuilder.value()))); assertThat(termQuery.getTerm().bytes(), equalTo(BytesRefs.toBytesRef(queryBuilder.value())));
} }
} }
@Test(expected = ParsingException.class)
public void testTermArray() throws IOException {
String queryAsString = "{\n" +
" \"term\": {\n" +
" \"age\": [34, 35]\n" +
" }\n" +
"}";
parseQuery(queryAsString);
}
} }

View File

@ -31,6 +31,7 @@ import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -110,6 +111,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
// compare whether we have the expected list of terms returned // compare whether we have the expected list of terms returned
final List<Term> booleanTerms = new ArrayList<>(); final List<Term> booleanTerms = new ArrayList<>();
for (BooleanClause booleanClause : booleanQuery) { for (BooleanClause booleanClause : booleanQuery) {
assertThat(booleanClause.getOccur(), equalTo(BooleanClause.Occur.SHOULD));
assertThat(booleanClause.getQuery(), instanceOf(TermQuery.class)); assertThat(booleanClause.getQuery(), instanceOf(TermQuery.class));
Term term = ((TermQuery) booleanClause.getQuery()).getTerm(); Term term = ((TermQuery) booleanClause.getQuery()).getTerm();
booleanTerms.add(term); booleanTerms.add(term);
@ -212,15 +214,21 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
TermsQueryBuilder copy = assertSerialization(queryBuilder); TermsQueryBuilder copy = assertSerialization(queryBuilder);
assertTrue(queryBuilder.disableCoord()); assertTrue(queryBuilder.disableCoord());
assertTrue(copy.disableCoord()); assertTrue(copy.disableCoord());
Query luceneQuery = queryBuilder.toQuery(createShardContext());
assertThat(luceneQuery, instanceOf(BooleanQuery.class));
BooleanQuery booleanQuery = (BooleanQuery) luceneQuery;
assertThat(booleanQuery.isCoordDisabled(), equalTo(true));
String randomMinShouldMatch = RandomPicks.randomFrom(random(), Arrays.asList("min_match", "min_should_match", "minimum_should_match")); String randomMinShouldMatch = RandomPicks.randomFrom(random(), Arrays.asList("min_match", "min_should_match", "minimum_should_match"));
query = "{\n" + query = "{\n" +
" \"terms\": {\n" + " \"terms\": {\n" +
" \"field\": [\n" + " \"field\": [\n" +
" \"blue\",\n" + " \"value1\",\n" +
" \"pill\"\n" + " \"value2\",\n" +
" \"value3\",\n" +
" \"value4\"\n" +
" ],\n" + " ],\n" +
" \"" + randomMinShouldMatch +"\": \"42%\"\n" + " \"" + randomMinShouldMatch +"\": \"25%\"\n" +
" }\n" + " }\n" +
"}"; "}";
try { try {
@ -231,8 +239,12 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
} }
queryBuilder = (TermsQueryBuilder) parseQuery(query, ParseFieldMatcher.EMPTY); queryBuilder = (TermsQueryBuilder) parseQuery(query, ParseFieldMatcher.EMPTY);
copy = assertSerialization(queryBuilder); copy = assertSerialization(queryBuilder);
assertEquals("42%", queryBuilder.minimumShouldMatch()); assertEquals("25%", queryBuilder.minimumShouldMatch());
assertEquals("42%", copy.minimumShouldMatch()); assertEquals("25%", copy.minimumShouldMatch());
luceneQuery = queryBuilder.toQuery(createShardContext());
assertThat(luceneQuery, instanceOf(BooleanQuery.class));
booleanQuery = (BooleanQuery) luceneQuery;
assertThat(booleanQuery.getMinimumNumberShouldMatch(), equalTo(1));
} }
@Override @Override
@ -241,7 +253,7 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
try { try {
XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint();
builder.startObject(); builder.startObject();
builder.array(termsPath, randomTerms.toArray(new Object[0])); builder.array(termsPath, randomTerms.toArray(new Object[randomTerms.size()]));
builder.endObject(); builder.endObject();
json = builder.string(); json = builder.string();
} catch (IOException ex) { } catch (IOException ex) {
@ -276,5 +288,18 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
assertEquals(Arrays.asList(1l, 3l, 4l), values); assertEquals(Arrays.asList(1l, 3l, 4l), values);
} }
} }
@Test
public void testTermsQueryWithMultipleFields() throws IOException {
String query = XContentFactory.jsonBuilder().startObject()
.startObject("terms").array("foo", 123).array("bar", 456).endObject()
.endObject().string();
try {
parseQuery(query);
fail("parsing should have failed");
} catch (ParsingException ex) {
assertThat(ex.getMessage(), equalTo("[terms] query does not support multiple fields"));
}
}
} }

View File

@ -25,6 +25,7 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.instanceOf;
public class WildcardQueryBuilderTests extends AbstractQueryTestCase<WildcardQueryBuilder> { public class WildcardQueryBuilderTests extends AbstractQueryTestCase<WildcardQueryBuilder> {
@ -49,6 +50,10 @@ public class WildcardQueryBuilderTests extends AbstractQueryTestCase<WildcardQue
@Override @Override
protected void doAssertLuceneQuery(WildcardQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(WildcardQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
assertThat(query, instanceOf(WildcardQuery.class)); assertThat(query, instanceOf(WildcardQuery.class));
WildcardQuery wildcardQuery = (WildcardQuery) query;
assertThat(wildcardQuery.getField(), equalTo(queryBuilder.fieldName()));
assertThat(wildcardQuery.getTerm().field(), equalTo(queryBuilder.fieldName()));
assertThat(wildcardQuery.getTerm().text(), equalTo(queryBuilder.value()));
} }
@Test @Test

View File

@ -23,7 +23,6 @@ import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.IndicesService;
@ -78,7 +77,7 @@ public class CustomQueryParserIT extends ESIntegTestCase {
@Test //see #11120 @Test //see #11120
public void testConstantScoreParsesFilter() throws Exception { public void testConstantScoreParsesFilter() throws Exception {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
Query q = queryParser.parse(constantScoreQuery(new DummyQueryParserPlugin.DummyQueryBuilder())).query(); Query q = constantScoreQuery(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext());
Query inner = ((ConstantScoreQuery) q).getQuery(); Query inner = ((ConstantScoreQuery) q).getQuery();
assertThat(inner, instanceOf(DummyQueryParserPlugin.DummyQuery.class)); assertThat(inner, instanceOf(DummyQueryParserPlugin.DummyQuery.class));
assertEquals(true, ((DummyQueryParserPlugin.DummyQuery) inner).isFilter); assertEquals(true, ((DummyQueryParserPlugin.DummyQuery) inner).isFilter);
@ -88,11 +87,11 @@ public class CustomQueryParserIT extends ESIntegTestCase {
public void testBooleanParsesFilter() throws Exception { public void testBooleanParsesFilter() throws Exception {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
// single clause, serialized as inner object // single clause, serialized as inner object
Query q = queryParser.parse(boolQuery() Query q = boolQuery()
.should(new DummyQueryParserPlugin.DummyQueryBuilder()) .should(new DummyQueryParserPlugin.DummyQueryBuilder())
.must(new DummyQueryParserPlugin.DummyQueryBuilder()) .must(new DummyQueryParserPlugin.DummyQueryBuilder())
.filter(new DummyQueryParserPlugin.DummyQueryBuilder()) .filter(new DummyQueryParserPlugin.DummyQueryBuilder())
.mustNot(new DummyQueryParserPlugin.DummyQueryBuilder())).query(); .mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext());
assertThat(q, instanceOf(BooleanQuery.class)); assertThat(q, instanceOf(BooleanQuery.class));
BooleanQuery bq = (BooleanQuery) q; BooleanQuery bq = (BooleanQuery) q;
assertEquals(4, bq.clauses().size()); assertEquals(4, bq.clauses().size());
@ -113,11 +112,11 @@ public class CustomQueryParserIT extends ESIntegTestCase {
} }
// multiple clauses, serialized as inner arrays // multiple clauses, serialized as inner arrays
q = queryParser.parse(boolQuery() q = boolQuery()
.should(new DummyQueryParserPlugin.DummyQueryBuilder()).should(new DummyQueryParserPlugin.DummyQueryBuilder()) .should(new DummyQueryParserPlugin.DummyQueryBuilder()).should(new DummyQueryParserPlugin.DummyQueryBuilder())
.must(new DummyQueryParserPlugin.DummyQueryBuilder()).must(new DummyQueryParserPlugin.DummyQueryBuilder()) .must(new DummyQueryParserPlugin.DummyQueryBuilder()).must(new DummyQueryParserPlugin.DummyQueryBuilder())
.filter(new DummyQueryParserPlugin.DummyQueryBuilder()).filter(new DummyQueryParserPlugin.DummyQueryBuilder()) .filter(new DummyQueryParserPlugin.DummyQueryBuilder()).filter(new DummyQueryParserPlugin.DummyQueryBuilder())
.mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).mustNot(new DummyQueryParserPlugin.DummyQueryBuilder())).query(); .mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).mustNot(new DummyQueryParserPlugin.DummyQueryBuilder()).toQuery(queryParser.getShardContext());
assertThat(q, instanceOf(BooleanQuery.class)); assertThat(q, instanceOf(BooleanQuery.class));
bq = (BooleanQuery) q; bq = (BooleanQuery) q;
assertEquals(8, bq.clauses().size()); assertEquals(8, bq.clauses().size());

View File

@ -28,9 +28,8 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.BaseQueryParser;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.query.QueryParser;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.IndicesModule; import org.elasticsearch.indices.IndicesModule;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
@ -92,14 +91,14 @@ public class DummyQueryParserPlugin extends Plugin {
} }
} }
public static class DummyQueryParser extends BaseQueryParser { public static class DummyQueryParser implements QueryParser<DummyQueryBuilder> {
@Override @Override
public String[] names() { public String[] names() {
return new String[]{DummyQueryBuilder.NAME}; return new String[]{DummyQueryBuilder.NAME};
} }
@Override @Override
public QueryBuilder fromXContent(QueryParseContext parseContext) throws IOException { public DummyQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
XContentParser.Token token = parseContext.parser().nextToken(); XContentParser.Token token = parseContext.parser().nextToken();
assert token == XContentParser.Token.END_OBJECT; assert token == XContentParser.Token.END_OBJECT;
return new DummyQueryBuilder(); return new DummyQueryBuilder();

View File

@ -0,0 +1,85 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.Arrays;
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
public class MultiMatchQueryTests extends ESSingleNodeTestCase {
private IndexQueryParserService queryParser;
private IndexService indexService;
@Before
public void setup() throws IOException {
IndexService indexService = createIndex("test");
MapperService mapperService = indexService.mapperService();
String mapping = "{\n" +
" \"person\":{\n" +
" \"properties\":{\n" +
" \"name\":{\n" +
" \"properties\":{\n" +
" \"first\": {\n" +
" \"type\":\"string\"\n" +
" }," +
" \"last\": {\n" +
" \"type\":\"string\"\n" +
" }" +
" }" +
" }\n" +
" }\n" +
" }\n" +
"}";
mapperService.merge("person", new CompressedXContent(mapping), true, false);
this.indexService = indexService;
queryParser = indexService.queryParserService();
}
public void testCrossFieldMultiMatchQuery() throws IOException {
QueryShardContext queryShardContext = new QueryShardContext(new Index("test"), queryParser);
queryShardContext.setAllowUnmappedFields(true);
Query parsedQuery = multiMatchQuery("banon").field("name.first", 2).field("name.last", 3).field("foobar").type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext);
try (Engine.Searcher searcher = indexService.shardSafe(0).acquireSearcher("test")) {
Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery);
BooleanQuery.Builder expected = new BooleanQuery.Builder();
expected.add(new TermQuery(new Term("foobar", "banon")), BooleanClause.Occur.SHOULD);
Query tq1 = new BoostQuery(new TermQuery(new Term("name.first", "banon")), 2);
Query tq2 = new BoostQuery(new TermQuery(new Term("name.last", "banon")), 3);
expected.add(new DisjunctionMaxQuery(Arrays.<Query>asList(tq1, tq2), 0f), BooleanClause.Occur.SHOULD);
assertEquals(expected.build(), rewrittenQuery);
}
}
}

View File

@ -20,7 +20,6 @@
package org.elasticsearch.indices; package org.elasticsearch.indices;
import org.apache.lucene.analysis.hunspell.Dictionary; import org.apache.lucene.analysis.hunspell.Dictionary;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.inject.ModuleTestCase; import org.elasticsearch.common.inject.ModuleTestCase;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.*; import org.elasticsearch.index.query.*;
@ -42,11 +41,6 @@ public class IndicesModuleTests extends ModuleTestCase {
return null; return null;
} }
@Override
public Query parse(QueryShardContext context) throws IOException {
return null;
}
@Override @Override
public QueryBuilder getBuilderPrototype() { public QueryBuilder getBuilderPrototype() {
return null; return null;

View File

@ -19,16 +19,16 @@
package org.elasticsearch.search.query; package org.elasticsearch.search.query;
import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.*;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.query.MultiMatchQueryBuilder; import org.elasticsearch.index.query.*;
import org.elasticsearch.index.query.Operator;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.search.MatchQuery; import org.elasticsearch.index.search.MatchQuery;
import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHits;
@ -41,6 +41,7 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;

View File

@ -1,35 +0,0 @@
{
filtered:{
query:{
term:{
"name.first":"shay"
}
},
filter:{
bool:{
must:[
{
term:{
"name.first":"shay1"
}
},
{
term:{
"name.first":"shay4"
}
}
],
must_not:{
term:{
"name.first":"shay2"
}
},
should:{
term:{
"name.first":"shay3"
}
}
}
}
}
}

View File

@ -1,30 +0,0 @@
{
bool:{
must:[
{
query_string:{
default_field:"content",
query:"test1"
}
},
{
query_string:{
default_field:"content",
query:"test4"
}
}
],
must_not:{
query_string:{
default_field:"content",
query:"test2"
}
},
should:{
query_string:{
default_field:"content",
query:"test3"
}
}
}
}

View File

@ -1,15 +0,0 @@
{
"boosting":{
"positive":{
"term":{
"field1":"value1"
}
},
"negative":{
"term":{
"field2":"value2"
}
},
"negative_boost":0.2
}
}

View File

@ -1,12 +0,0 @@
{
"child":{
"properties":{
"field":{
"type":"string"
}
},
"_parent" : {
"type" : "person"
}
}
}

View File

@ -1,9 +0,0 @@
{
constant_score:{
filter:{
term:{
"name.last":"banon"
}
}
}
}

View File

@ -1,43 +0,0 @@
{
name:{
first:"shay",
last:"banon"
},
address:{
first:{
location:"first location"
},
last:{
location:"last location"
}
},
age:32,
birthDate:"1977-11-15",
nerd:true,
dogs:["buck", "mia"],
complex:[
{
value1:"value1"
},
{
value2:"value2"
}
],
complex2:[
[
{
value1:"value1"
}
],
[
{
value2:"value2"
}
]
],
nullValue:null,
"location":{
"lat":1.1,
"lon":1.2
}
}

View File

@ -1,13 +0,0 @@
{
"constant_score": {
"filter": {
"range" : {
"born" : {
"gte": "01/01/2012",
"lt": "2030",
"format": "dd/MM/yyyy||yyyy"
}
}
}
}
}

View File

@ -1,13 +0,0 @@
{
"constant_score": {
"filter": {
"range" : {
"born" : {
"gte": "01/01/2012",
"lt": "2030",
"format": "yyyy"
}
}
}
}
}

View File

@ -1,13 +0,0 @@
{
"constant_score": {
"filter": {
"range" : {
"born" : {
"gte": "2012-01-01",
"lte": "now",
"time_zone": "+01:00"
}
}
}
}
}

View File

@ -1,13 +0,0 @@
{
"constant_score": {
"filter": {
"range" : {
"age" : {
"gte": "0",
"lte": "100",
"time_zone": "-01:00"
}
}
}
}
}

View File

@ -1,8 +0,0 @@
{
"range" : {
"born" : {
"gt": "2014-11-05||/M",
"lt": "2014-12-08||/d"
}
}
}

Some files were not shown because too many files have changed in this diff Show More