Move the bottom third of queries to registerQuery
Mostly this is just moving fromXContent, making it public, and moving the ParseFields used in parsing and making them private.
This commit is contained in:
parent
c5ce78ed44
commit
c68a58b67e
|
@ -23,11 +23,14 @@ import org.apache.lucene.search.BooleanClause;
|
|||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -41,11 +44,13 @@ import java.util.Objects;
|
|||
public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder> {
|
||||
|
||||
public static final String NAME = "exists";
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
|
||||
public static final ExistsQueryBuilder PROTOTYPE = new ExistsQueryBuilder("field");
|
||||
|
||||
public static final ParseField FIELD_FIELD = new ParseField("field");
|
||||
|
||||
private final String fieldName;
|
||||
|
||||
static final ExistsQueryBuilder PROTOTYPE = new ExistsQueryBuilder("field");
|
||||
|
||||
public ExistsQueryBuilder(String fieldName) {
|
||||
if (Strings.isEmpty(fieldName)) {
|
||||
throw new IllegalArgumentException("field name is null or empty");
|
||||
|
@ -63,11 +68,50 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
|
|||
@Override
|
||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
builder.field(ExistsQueryParser.FIELD_FIELD.getPreferredName(), fieldName);
|
||||
builder.field(FIELD_FIELD.getPreferredName(), fieldName);
|
||||
printBoostAndQueryName(builder);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
public static ExistsQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldPattern = null;
|
||||
String queryName = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, FIELD_FIELD)) {
|
||||
fieldPattern = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME +
|
||||
"] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
if (fieldPattern == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME + "] must be provided with a [field]");
|
||||
}
|
||||
|
||||
ExistsQueryBuilder builder = new ExistsQueryBuilder(fieldPattern);
|
||||
builder.queryName(queryName);
|
||||
builder.boost(boost);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
return newFilter(context, fieldName);
|
||||
|
|
|
@ -1,80 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Parser for exists query
|
||||
*/
|
||||
public class ExistsQueryParser implements QueryParser<ExistsQueryBuilder> {
|
||||
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(ExistsQueryBuilder.NAME);
|
||||
public static final ParseField FIELD_FIELD = new ParseField("field");
|
||||
|
||||
@Override
|
||||
public ExistsQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldPattern = null;
|
||||
String queryName = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, FIELD_FIELD)) {
|
||||
fieldPattern = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME +
|
||||
"] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
if (fieldPattern == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + ExistsQueryBuilder.NAME + "] must be provided with a [field]");
|
||||
}
|
||||
|
||||
ExistsQueryBuilder builder = new ExistsQueryBuilder(fieldPattern);
|
||||
builder.queryName(queryName);
|
||||
builder.boost(boost);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExistsQueryBuilder getBuilderPrototype() {
|
||||
return ExistsQueryBuilder.PROTOTYPE;
|
||||
}
|
||||
}
|
|
@ -19,16 +19,20 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.geopoint.search.GeoPointInBBoxQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Numbers;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
|
||||
|
@ -49,10 +53,26 @@ import java.util.Objects;
|
|||
public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBoundingBoxQueryBuilder> {
|
||||
/** Name of the query. */
|
||||
public static final String NAME = "geo_bounding_box";
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME, "geo_bbox");
|
||||
|
||||
/** Default type for executing this query (memory as of this writing). */
|
||||
public static final GeoExecType DEFAULT_TYPE = GeoExecType.MEMORY;
|
||||
/** Needed for serialization. */
|
||||
static final GeoBoundingBoxQueryBuilder PROTOTYPE = new GeoBoundingBoxQueryBuilder("");
|
||||
public static final GeoBoundingBoxQueryBuilder PROTOTYPE = new GeoBoundingBoxQueryBuilder("");
|
||||
|
||||
private static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed");
|
||||
private static final ParseField TYPE_FIELD = new ParseField("type");
|
||||
private static final ParseField VALIDATION_METHOD_FIELD = new ParseField("validation_method");
|
||||
private static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize");
|
||||
private static final ParseField FIELD_FIELD = new ParseField("field");
|
||||
private static final ParseField TOP_FIELD = new ParseField("top");
|
||||
private static final ParseField BOTTOM_FIELD = new ParseField("bottom");
|
||||
private static final ParseField LEFT_FIELD = new ParseField("left");
|
||||
private static final ParseField RIGHT_FIELD = new ParseField("right");
|
||||
private static final ParseField TOP_LEFT_FIELD = new ParseField("top_left");
|
||||
private static final ParseField BOTTOM_RIGHT_FIELD = new ParseField("bottom_right");
|
||||
private static final ParseField TOP_RIGHT_FIELD = new ParseField("top_right");
|
||||
private static final ParseField BOTTOM_LEFT_FIELD = new ParseField("bottom_left");
|
||||
|
||||
/** Name of field holding geo coordinates to compute the bounding box on.*/
|
||||
private final String fieldName;
|
||||
|
@ -298,17 +318,127 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
|
|||
builder.startObject(NAME);
|
||||
|
||||
builder.startObject(fieldName);
|
||||
builder.array(GeoBoundingBoxQueryParser.TOP_LEFT_FIELD.getPreferredName(), topLeft.getLon(), topLeft.getLat());
|
||||
builder.array(GeoBoundingBoxQueryParser.BOTTOM_RIGHT_FIELD.getPreferredName(), bottomRight.getLon(), bottomRight.getLat());
|
||||
builder.array(TOP_LEFT_FIELD.getPreferredName(), topLeft.getLon(), topLeft.getLat());
|
||||
builder.array(BOTTOM_RIGHT_FIELD.getPreferredName(), bottomRight.getLon(), bottomRight.getLat());
|
||||
builder.endObject();
|
||||
builder.field(GeoBoundingBoxQueryParser.VALIDATION_METHOD_FIELD.getPreferredName(), validationMethod);
|
||||
builder.field(GeoBoundingBoxQueryParser.TYPE_FIELD.getPreferredName(), type);
|
||||
builder.field(VALIDATION_METHOD_FIELD.getPreferredName(), validationMethod);
|
||||
builder.field(TYPE_FIELD.getPreferredName(), type);
|
||||
|
||||
printBoostAndQueryName(builder);
|
||||
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
public static GeoBoundingBoxQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = null;
|
||||
|
||||
double top = Double.NaN;
|
||||
double bottom = Double.NaN;
|
||||
double left = Double.NaN;
|
||||
double right = Double.NaN;
|
||||
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String queryName = null;
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
GeoValidationMethod validationMethod = null;
|
||||
|
||||
GeoPoint sparse = new GeoPoint();
|
||||
|
||||
String type = "memory";
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
fieldName = currentFieldName;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FIELD_FIELD)) {
|
||||
fieldName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TOP_FIELD)) {
|
||||
top = parser.doubleValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOTTOM_FIELD)) {
|
||||
bottom = parser.doubleValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LEFT_FIELD)) {
|
||||
left = parser.doubleValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, RIGHT_FIELD)) {
|
||||
right = parser.doubleValue();
|
||||
} else {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, TOP_LEFT_FIELD)) {
|
||||
GeoUtils.parseGeoPoint(parser, sparse);
|
||||
top = sparse.getLat();
|
||||
left = sparse.getLon();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOTTOM_RIGHT_FIELD)) {
|
||||
GeoUtils.parseGeoPoint(parser, sparse);
|
||||
bottom = sparse.getLat();
|
||||
right = sparse.getLon();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TOP_RIGHT_FIELD)) {
|
||||
GeoUtils.parseGeoPoint(parser, sparse);
|
||||
top = sparse.getLat();
|
||||
right = sparse.getLon();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOTTOM_LEFT_FIELD)) {
|
||||
GeoUtils.parseGeoPoint(parser, sparse);
|
||||
bottom = sparse.getLat();
|
||||
left = sparse.getLon();
|
||||
} else {
|
||||
throw new ElasticsearchParseException("failed to parse [{}] query. unexpected field [{}]",
|
||||
QUERY_NAME_FIELD.getPreferredName(), currentFieldName);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("failed to parse [{}] query. field name expected but [{}] found",
|
||||
QUERY_NAME_FIELD.getPreferredName(), token);
|
||||
}
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
|
||||
coerce = parser.booleanValue();
|
||||
if (coerce) {
|
||||
ignoreMalformed = true;
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD_FIELD)) {
|
||||
validationMethod = GeoValidationMethod.fromString(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
|
||||
type = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
|
||||
ignoreMalformed = parser.booleanValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. unexpected field [{}]",
|
||||
QUERY_NAME_FIELD.getPreferredName(), currentFieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final GeoPoint topLeft = sparse.reset(top, left); //just keep the object
|
||||
final GeoPoint bottomRight = new GeoPoint(bottom, right);
|
||||
GeoBoundingBoxQueryBuilder builder = new GeoBoundingBoxQueryBuilder(fieldName);
|
||||
builder.setCorners(topLeft, bottomRight);
|
||||
builder.queryName(queryName);
|
||||
builder.boost(boost);
|
||||
builder.type(GeoExecType.fromString(type));
|
||||
if (validationMethod != null) {
|
||||
// ignore deprecated coerce/ignoreMalformed settings if validationMethod is set
|
||||
builder.setValidationMethod(validationMethod);
|
||||
} else {
|
||||
builder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(GeoBoundingBoxQueryBuilder other) {
|
||||
return Objects.equals(topLeft, other.topLeft) &&
|
||||
|
|
|
@ -1,163 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class GeoBoundingBoxQueryParser implements QueryParser<GeoBoundingBoxQueryBuilder> {
|
||||
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(GeoBoundingBoxQueryBuilder.NAME, "geo_bbox");
|
||||
public static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed");
|
||||
public static final ParseField TYPE_FIELD = new ParseField("type");
|
||||
public static final ParseField VALIDATION_METHOD_FIELD = new ParseField("validation_method");
|
||||
public static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize");
|
||||
public static final ParseField FIELD_FIELD = new ParseField("field");
|
||||
public static final ParseField TOP_FIELD = new ParseField("top");
|
||||
public static final ParseField BOTTOM_FIELD = new ParseField("bottom");
|
||||
public static final ParseField LEFT_FIELD = new ParseField("left");
|
||||
public static final ParseField RIGHT_FIELD = new ParseField("right");
|
||||
public static final ParseField TOP_LEFT_FIELD = new ParseField("top_left");
|
||||
public static final ParseField BOTTOM_RIGHT_FIELD = new ParseField("bottom_right");
|
||||
public static final ParseField TOP_RIGHT_FIELD = new ParseField("top_right");
|
||||
public static final ParseField BOTTOM_LEFT_FIELD = new ParseField("bottom_left");
|
||||
|
||||
@Override
|
||||
public GeoBoundingBoxQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = null;
|
||||
|
||||
double top = Double.NaN;
|
||||
double bottom = Double.NaN;
|
||||
double left = Double.NaN;
|
||||
double right = Double.NaN;
|
||||
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String queryName = null;
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
GeoValidationMethod validationMethod = null;
|
||||
|
||||
GeoPoint sparse = new GeoPoint();
|
||||
|
||||
String type = "memory";
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
fieldName = currentFieldName;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FIELD_FIELD)) {
|
||||
fieldName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TOP_FIELD)) {
|
||||
top = parser.doubleValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOTTOM_FIELD)) {
|
||||
bottom = parser.doubleValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LEFT_FIELD)) {
|
||||
left = parser.doubleValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, RIGHT_FIELD)) {
|
||||
right = parser.doubleValue();
|
||||
} else {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, TOP_LEFT_FIELD)) {
|
||||
GeoUtils.parseGeoPoint(parser, sparse);
|
||||
top = sparse.getLat();
|
||||
left = sparse.getLon();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOTTOM_RIGHT_FIELD)) {
|
||||
GeoUtils.parseGeoPoint(parser, sparse);
|
||||
bottom = sparse.getLat();
|
||||
right = sparse.getLon();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TOP_RIGHT_FIELD)) {
|
||||
GeoUtils.parseGeoPoint(parser, sparse);
|
||||
top = sparse.getLat();
|
||||
right = sparse.getLon();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOTTOM_LEFT_FIELD)) {
|
||||
GeoUtils.parseGeoPoint(parser, sparse);
|
||||
bottom = sparse.getLat();
|
||||
left = sparse.getLon();
|
||||
} else {
|
||||
throw new ElasticsearchParseException("failed to parse [{}] query. unexpected field [{}]",
|
||||
QUERY_NAME_FIELD.getPreferredName(), currentFieldName);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("failed to parse [{}] query. field name expected but [{}] found",
|
||||
QUERY_NAME_FIELD.getPreferredName(), token);
|
||||
}
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
|
||||
coerce = parser.booleanValue();
|
||||
if (coerce) {
|
||||
ignoreMalformed = true;
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD_FIELD)) {
|
||||
validationMethod = GeoValidationMethod.fromString(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
|
||||
type = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
|
||||
ignoreMalformed = parser.booleanValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "failed to parse [{}] query. unexpected field [{}]",
|
||||
QUERY_NAME_FIELD.getPreferredName(), currentFieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final GeoPoint topLeft = sparse.reset(top, left); //just keep the object
|
||||
final GeoPoint bottomRight = new GeoPoint(bottom, right);
|
||||
GeoBoundingBoxQueryBuilder builder = new GeoBoundingBoxQueryBuilder(fieldName);
|
||||
builder.setCorners(topLeft, bottomRight);
|
||||
builder.queryName(queryName);
|
||||
builder.boost(boost);
|
||||
builder.type(GeoExecType.fromString(type));
|
||||
if (validationMethod != null) {
|
||||
// ignore deprecated coerce/ignoreMalformed settings if validationMethod is set
|
||||
builder.setValidationMethod(validationMethod);
|
||||
} else {
|
||||
builder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed));
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoBoundingBoxQueryBuilder getBuilderPrototype() {
|
||||
return GeoBoundingBoxQueryBuilder.PROTOTYPE;
|
||||
}
|
||||
}
|
|
@ -19,10 +19,12 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
|
@ -31,9 +33,11 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
|
||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||
|
||||
|
@ -49,6 +53,8 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
|
|||
|
||||
/** Name of the query in the query dsl. */
|
||||
public static final String NAME = "geo_distance";
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
/** Default for latitude normalization (as of this writing true).*/
|
||||
public static final boolean DEFAULT_NORMALIZE_LAT = true;
|
||||
/** Default for longitude normalization (as of this writing true). */
|
||||
|
@ -60,6 +66,16 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
|
|||
/** Default for optimising query through pre computed bounding box query. */
|
||||
public static final String DEFAULT_OPTIMIZE_BBOX = "memory";
|
||||
|
||||
public static final GeoDistanceQueryBuilder PROTOTYPE = new GeoDistanceQueryBuilder("_na_");
|
||||
|
||||
private static final ParseField VALIDATION_METHOD_FIELD = new ParseField("validation_method");
|
||||
private static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed");
|
||||
private static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize");
|
||||
private static final ParseField OPTIMIZE_BBOX_FIELD = new ParseField("optimize_bbox");
|
||||
private static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type");
|
||||
private static final ParseField UNIT_FIELD = new ParseField("unit");
|
||||
private static final ParseField DISTANCE_FIELD = new ParseField("distance");
|
||||
|
||||
private final String fieldName;
|
||||
/** Distance from center to cover. */
|
||||
private double distance;
|
||||
|
@ -72,8 +88,6 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
|
|||
/** How strict should geo coordinate validation be? */
|
||||
private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT;
|
||||
|
||||
static final GeoDistanceQueryBuilder PROTOTYPE = new GeoDistanceQueryBuilder("_na_");
|
||||
|
||||
/**
|
||||
* Construct new GeoDistanceQueryBuilder.
|
||||
* @param fieldName name of indexed geo field to operate distance computation on.
|
||||
|
@ -250,14 +264,128 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
|
|||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
builder.startArray(fieldName).value(center.lon()).value(center.lat()).endArray();
|
||||
builder.field(GeoDistanceQueryParser.DISTANCE_FIELD.getPreferredName(), distance);
|
||||
builder.field(GeoDistanceQueryParser.DISTANCE_TYPE_FIELD.getPreferredName(), geoDistance.name().toLowerCase(Locale.ROOT));
|
||||
builder.field(GeoDistanceQueryParser.OPTIMIZE_BBOX_FIELD.getPreferredName(), optimizeBbox);
|
||||
builder.field(GeoDistanceQueryParser.VALIDATION_METHOD_FIELD.getPreferredName(), validationMethod);
|
||||
builder.field(DISTANCE_FIELD.getPreferredName(), distance);
|
||||
builder.field(DISTANCE_TYPE_FIELD.getPreferredName(), geoDistance.name().toLowerCase(Locale.ROOT));
|
||||
builder.field(OPTIMIZE_BBOX_FIELD.getPreferredName(), optimizeBbox);
|
||||
builder.field(VALIDATION_METHOD_FIELD.getPreferredName(), validationMethod);
|
||||
printBoostAndQueryName(builder);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
public static GeoDistanceQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token;
|
||||
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String queryName = null;
|
||||
String currentFieldName = null;
|
||||
GeoPoint point = new GeoPoint(Double.NaN, Double.NaN);
|
||||
String fieldName = null;
|
||||
Object vDistance = null;
|
||||
DistanceUnit unit = GeoDistanceQueryBuilder.DEFAULT_DISTANCE_UNIT;
|
||||
GeoDistance geoDistance = GeoDistanceQueryBuilder.DEFAULT_GEO_DISTANCE;
|
||||
String optimizeBbox = GeoDistanceQueryBuilder.DEFAULT_OPTIMIZE_BBOX;
|
||||
boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
GeoValidationMethod validationMethod = null;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
fieldName = currentFieldName;
|
||||
GeoUtils.parseGeoPoint(parser, point);
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
// the json in the format of -> field : { lat : 30, lon : 12 }
|
||||
String currentName = parser.currentName();
|
||||
assert currentFieldName != null;
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (currentName.equals(GeoPointFieldMapper.Names.LAT)) {
|
||||
point.resetLat(parser.doubleValue());
|
||||
} else if (currentName.equals(GeoPointFieldMapper.Names.LON)) {
|
||||
point.resetLon(parser.doubleValue());
|
||||
} else if (currentName.equals(GeoPointFieldMapper.Names.GEOHASH)) {
|
||||
point.resetFromGeoHash(parser.text());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[geo_distance] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, DISTANCE_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
vDistance = parser.text(); // a String
|
||||
} else {
|
||||
vDistance = parser.numberValue(); // a Number
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, UNIT_FIELD)) {
|
||||
unit = DistanceUnit.fromString(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DISTANCE_TYPE_FIELD)) {
|
||||
geoDistance = GeoDistance.fromString(parser.text());
|
||||
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) {
|
||||
point.resetLat(parser.doubleValue());
|
||||
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LAT_SUFFIX.length());
|
||||
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LON_SUFFIX)) {
|
||||
point.resetLon(parser.doubleValue());
|
||||
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LON_SUFFIX.length());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, OPTIMIZE_BBOX_FIELD)) {
|
||||
optimizeBbox = parser.textOrNull();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
|
||||
coerce = parser.booleanValue();
|
||||
if (coerce == true) {
|
||||
ignoreMalformed = true;
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
|
||||
ignoreMalformed = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD_FIELD)) {
|
||||
validationMethod = GeoValidationMethod.fromString(parser.text());
|
||||
} else {
|
||||
if (fieldName == null) {
|
||||
point.resetFromString(parser.text());
|
||||
fieldName = currentFieldName;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceQueryBuilder.NAME +
|
||||
"] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (vDistance == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "geo_distance requires 'distance' to be specified");
|
||||
}
|
||||
|
||||
GeoDistanceQueryBuilder qb = new GeoDistanceQueryBuilder(fieldName);
|
||||
if (vDistance instanceof Number) {
|
||||
qb.distance(((Number) vDistance).doubleValue(), unit);
|
||||
} else {
|
||||
qb.distance((String) vDistance, unit);
|
||||
}
|
||||
qb.point(point);
|
||||
if (validationMethod != null) {
|
||||
qb.setValidationMethod(validationMethod);
|
||||
} else {
|
||||
qb.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed));
|
||||
}
|
||||
qb.optimizeBbox(optimizeBbox);
|
||||
qb.geoDistance(geoDistance);
|
||||
qb.boost(boost);
|
||||
qb.queryName(queryName);
|
||||
return qb;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(center, geoDistance, optimizeBbox, distance, validationMethod);
|
||||
|
|
|
@ -1,173 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Parses a GeoDistanceQuery. See also
|
||||
*
|
||||
* <pre>
|
||||
* {
|
||||
* "name.lat" : 1.1,
|
||||
* "name.lon" : 1.2,
|
||||
* }
|
||||
* </pre>
|
||||
*/
|
||||
public class GeoDistanceQueryParser implements QueryParser<GeoDistanceQueryBuilder> {
|
||||
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(GeoDistanceQueryBuilder.NAME);
|
||||
public static final ParseField VALIDATION_METHOD_FIELD = new ParseField("validation_method");
|
||||
public static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed");
|
||||
public static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize");
|
||||
public static final ParseField OPTIMIZE_BBOX_FIELD = new ParseField("optimize_bbox");
|
||||
public static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type");
|
||||
public static final ParseField UNIT_FIELD = new ParseField("unit");
|
||||
public static final ParseField DISTANCE_FIELD = new ParseField("distance");
|
||||
|
||||
@Override
|
||||
public GeoDistanceQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token;
|
||||
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String queryName = null;
|
||||
String currentFieldName = null;
|
||||
GeoPoint point = new GeoPoint(Double.NaN, Double.NaN);
|
||||
String fieldName = null;
|
||||
Object vDistance = null;
|
||||
DistanceUnit unit = GeoDistanceQueryBuilder.DEFAULT_DISTANCE_UNIT;
|
||||
GeoDistance geoDistance = GeoDistanceQueryBuilder.DEFAULT_GEO_DISTANCE;
|
||||
String optimizeBbox = GeoDistanceQueryBuilder.DEFAULT_OPTIMIZE_BBOX;
|
||||
boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
GeoValidationMethod validationMethod = null;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
fieldName = currentFieldName;
|
||||
GeoUtils.parseGeoPoint(parser, point);
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
// the json in the format of -> field : { lat : 30, lon : 12 }
|
||||
String currentName = parser.currentName();
|
||||
assert currentFieldName != null;
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (currentName.equals(GeoPointFieldMapper.Names.LAT)) {
|
||||
point.resetLat(parser.doubleValue());
|
||||
} else if (currentName.equals(GeoPointFieldMapper.Names.LON)) {
|
||||
point.resetLon(parser.doubleValue());
|
||||
} else if (currentName.equals(GeoPointFieldMapper.Names.GEOHASH)) {
|
||||
point.resetFromGeoHash(parser.text());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[geo_distance] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, DISTANCE_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_STRING) {
|
||||
vDistance = parser.text(); // a String
|
||||
} else {
|
||||
vDistance = parser.numberValue(); // a Number
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, UNIT_FIELD)) {
|
||||
unit = DistanceUnit.fromString(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DISTANCE_TYPE_FIELD)) {
|
||||
geoDistance = GeoDistance.fromString(parser.text());
|
||||
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) {
|
||||
point.resetLat(parser.doubleValue());
|
||||
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LAT_SUFFIX.length());
|
||||
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LON_SUFFIX)) {
|
||||
point.resetLon(parser.doubleValue());
|
||||
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LON_SUFFIX.length());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, OPTIMIZE_BBOX_FIELD)) {
|
||||
optimizeBbox = parser.textOrNull();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
|
||||
coerce = parser.booleanValue();
|
||||
if (coerce == true) {
|
||||
ignoreMalformed = true;
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
|
||||
ignoreMalformed = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD_FIELD)) {
|
||||
validationMethod = GeoValidationMethod.fromString(parser.text());
|
||||
} else {
|
||||
if (fieldName == null) {
|
||||
point.resetFromString(parser.text());
|
||||
fieldName = currentFieldName;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceQueryBuilder.NAME +
|
||||
"] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (vDistance == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "geo_distance requires 'distance' to be specified");
|
||||
}
|
||||
|
||||
GeoDistanceQueryBuilder qb = new GeoDistanceQueryBuilder(fieldName);
|
||||
if (vDistance instanceof Number) {
|
||||
qb.distance(((Number) vDistance).doubleValue(), unit);
|
||||
} else {
|
||||
qb.distance((String) vDistance, unit);
|
||||
}
|
||||
qb.point(point);
|
||||
if (validationMethod != null) {
|
||||
qb.setValidationMethod(validationMethod);
|
||||
} else {
|
||||
qb.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed));
|
||||
}
|
||||
qb.optimizeBbox(optimizeBbox);
|
||||
qb.geoDistance(geoDistance);
|
||||
qb.boost(boost);
|
||||
qb.queryName(queryName);
|
||||
return qb;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoDistanceQueryBuilder getBuilderPrototype() {
|
||||
return GeoDistanceQueryBuilder.PROTOTYPE;
|
||||
}
|
||||
}
|
|
@ -24,6 +24,8 @@ import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
|||
import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceRangeQuery;
|
||||
import org.apache.lucene.spatial.util.GeoDistanceUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
|
@ -32,9 +34,11 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
|
||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||
|
||||
|
@ -47,12 +51,33 @@ import static org.apache.lucene.spatial.util.GeoEncodingUtils.TOLERANCE;
|
|||
public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistanceRangeQueryBuilder> {
|
||||
|
||||
public static final String NAME = "geo_distance_range";
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public static final boolean DEFAULT_INCLUDE_LOWER = true;
|
||||
public static final boolean DEFAULT_INCLUDE_UPPER = true;
|
||||
public static final GeoDistance DEFAULT_GEO_DISTANCE = GeoDistance.DEFAULT;
|
||||
public static final DistanceUnit DEFAULT_UNIT = DistanceUnit.DEFAULT;
|
||||
public static final String DEFAULT_OPTIMIZE_BBOX = "memory";
|
||||
|
||||
public static final GeoDistanceRangeQueryBuilder PROTOTYPE = new GeoDistanceRangeQueryBuilder("_na_", new GeoPoint());
|
||||
|
||||
private static final ParseField FROM_FIELD = new ParseField("from");
|
||||
private static final ParseField TO_FIELD = new ParseField("to");
|
||||
private static final ParseField INCLUDE_LOWER_FIELD = new ParseField("include_lower");
|
||||
private static final ParseField INCLUDE_UPPER_FIELD = new ParseField("include_upper");
|
||||
private static final ParseField GT_FIELD = new ParseField("gt");
|
||||
private static final ParseField GTE_FIELD = new ParseField("gte", "ge");
|
||||
private static final ParseField LT_FIELD = new ParseField("lt");
|
||||
private static final ParseField LTE_FIELD = new ParseField("lte", "le");
|
||||
private static final ParseField UNIT_FIELD = new ParseField("unit");
|
||||
private static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type");
|
||||
private static final ParseField NAME_FIELD = new ParseField("_name");
|
||||
private static final ParseField BOOST_FIELD = new ParseField("boost");
|
||||
private static final ParseField OPTIMIZE_BBOX_FIELD = new ParseField("optimize_bbox");
|
||||
private static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize");
|
||||
private static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed");
|
||||
private static final ParseField VALIDATION_METHOD = new ParseField("validation_method");
|
||||
|
||||
private final String fieldName;
|
||||
|
||||
private Object from;
|
||||
|
@ -70,8 +95,6 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
|
|||
|
||||
private GeoValidationMethod validationMethod = GeoValidationMethod.DEFAULT;
|
||||
|
||||
static final GeoDistanceRangeQueryBuilder PROTOTYPE = new GeoDistanceRangeQueryBuilder("_na_", new GeoPoint());
|
||||
|
||||
public GeoDistanceRangeQueryBuilder(String fieldName, GeoPoint point) {
|
||||
if (Strings.isEmpty(fieldName)) {
|
||||
throw new IllegalArgumentException("fieldName must not be null");
|
||||
|
@ -290,18 +313,229 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
|
|||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
builder.startArray(fieldName).value(point.lon()).value(point.lat()).endArray();
|
||||
builder.field(GeoDistanceRangeQueryParser.FROM_FIELD.getPreferredName(), from);
|
||||
builder.field(GeoDistanceRangeQueryParser.TO_FIELD.getPreferredName(), to);
|
||||
builder.field(GeoDistanceRangeQueryParser.INCLUDE_LOWER_FIELD.getPreferredName(), includeLower);
|
||||
builder.field(GeoDistanceRangeQueryParser.INCLUDE_UPPER_FIELD.getPreferredName(), includeUpper);
|
||||
builder.field(GeoDistanceRangeQueryParser.UNIT_FIELD.getPreferredName(), unit);
|
||||
builder.field(GeoDistanceRangeQueryParser.DISTANCE_TYPE_FIELD.getPreferredName(), geoDistance.name().toLowerCase(Locale.ROOT));
|
||||
builder.field(GeoDistanceRangeQueryParser.OPTIMIZE_BBOX_FIELD.getPreferredName(), optimizeBbox);
|
||||
builder.field(GeoDistanceRangeQueryParser.VALIDATION_METHOD.getPreferredName(), validationMethod);
|
||||
builder.field(FROM_FIELD.getPreferredName(), from);
|
||||
builder.field(TO_FIELD.getPreferredName(), to);
|
||||
builder.field(INCLUDE_LOWER_FIELD.getPreferredName(), includeLower);
|
||||
builder.field(INCLUDE_UPPER_FIELD.getPreferredName(), includeUpper);
|
||||
builder.field(UNIT_FIELD.getPreferredName(), unit);
|
||||
builder.field(DISTANCE_TYPE_FIELD.getPreferredName(), geoDistance.name().toLowerCase(Locale.ROOT));
|
||||
builder.field(OPTIMIZE_BBOX_FIELD.getPreferredName(), optimizeBbox);
|
||||
builder.field(VALIDATION_METHOD.getPreferredName(), validationMethod);
|
||||
printBoostAndQueryName(builder);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
public static GeoDistanceRangeQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token;
|
||||
|
||||
Float boost = null;
|
||||
String queryName = null;
|
||||
String currentFieldName = null;
|
||||
GeoPoint point = null;
|
||||
String fieldName = null;
|
||||
Object vFrom = null;
|
||||
Object vTo = null;
|
||||
Boolean includeLower = null;
|
||||
Boolean includeUpper = null;
|
||||
DistanceUnit unit = null;
|
||||
GeoDistance geoDistance = null;
|
||||
String optimizeBbox = null;
|
||||
boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
GeoValidationMethod validationMethod = null;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (fieldName == null) {
|
||||
if (point == null) {
|
||||
point = new GeoPoint();
|
||||
}
|
||||
GeoUtils.parseGeoPoint(parser, point);
|
||||
fieldName = currentFieldName;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME +
|
||||
"] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
// the json in the format of -> field : { lat : 30, lon : 12 }
|
||||
if (fieldName == null) {
|
||||
fieldName = currentFieldName;
|
||||
if (point == null) {
|
||||
point = new GeoPoint();
|
||||
}
|
||||
GeoUtils.parseGeoPoint(parser, point);
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME +
|
||||
"] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, FROM_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
vFrom = parser.text(); // a String
|
||||
} else {
|
||||
vFrom = parser.numberValue(); // a Number
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TO_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
vTo = parser.text(); // a String
|
||||
} else {
|
||||
vTo = parser.numberValue(); // a Number
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INCLUDE_LOWER_FIELD)) {
|
||||
includeLower = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INCLUDE_UPPER_FIELD)) {
|
||||
includeUpper = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, GT_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
vFrom = parser.text(); // a String
|
||||
} else {
|
||||
vFrom = parser.numberValue(); // a Number
|
||||
}
|
||||
includeLower = false;
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, GTE_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
vFrom = parser.text(); // a String
|
||||
} else {
|
||||
vFrom = parser.numberValue(); // a Number
|
||||
}
|
||||
includeLower = true;
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LT_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
vTo = parser.text(); // a String
|
||||
} else {
|
||||
vTo = parser.numberValue(); // a Number
|
||||
}
|
||||
includeUpper = false;
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LTE_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
vTo = parser.text(); // a String
|
||||
} else {
|
||||
vTo = parser.numberValue(); // a Number
|
||||
}
|
||||
includeUpper = true;
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, UNIT_FIELD)) {
|
||||
unit = DistanceUnit.fromString(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DISTANCE_TYPE_FIELD)) {
|
||||
geoDistance = GeoDistance.fromString(parser.text());
|
||||
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) {
|
||||
String maybeFieldName = currentFieldName.substring(0,
|
||||
currentFieldName.length() - GeoPointFieldMapper.Names.LAT_SUFFIX.length());
|
||||
if (fieldName == null || fieldName.equals(maybeFieldName)) {
|
||||
fieldName = maybeFieldName;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME +
|
||||
"] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]");
|
||||
}
|
||||
if (point == null) {
|
||||
point = new GeoPoint();
|
||||
}
|
||||
point.resetLat(parser.doubleValue());
|
||||
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LON_SUFFIX)) {
|
||||
String maybeFieldName = currentFieldName.substring(0,
|
||||
currentFieldName.length() - GeoPointFieldMapper.Names.LON_SUFFIX.length());
|
||||
if (fieldName == null || fieldName.equals(maybeFieldName)) {
|
||||
fieldName = maybeFieldName;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME +
|
||||
"] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]");
|
||||
}
|
||||
if (point == null) {
|
||||
point = new GeoPoint();
|
||||
}
|
||||
point.resetLon(parser.doubleValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, OPTIMIZE_BBOX_FIELD)) {
|
||||
optimizeBbox = parser.textOrNull();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
|
||||
coerce = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
|
||||
ignoreMalformed = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD)) {
|
||||
validationMethod = GeoValidationMethod.fromString(parser.text());
|
||||
} else {
|
||||
if (fieldName == null) {
|
||||
if (point == null) {
|
||||
point = new GeoPoint();
|
||||
}
|
||||
point.resetFromString(parser.text());
|
||||
fieldName = currentFieldName;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME +
|
||||
"] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
GeoDistanceRangeQueryBuilder queryBuilder = new GeoDistanceRangeQueryBuilder(fieldName, point);
|
||||
if (boost != null) {
|
||||
queryBuilder.boost(boost);
|
||||
}
|
||||
|
||||
if (queryName != null) {
|
||||
queryBuilder.queryName(queryName);
|
||||
}
|
||||
|
||||
if (vFrom != null) {
|
||||
if (vFrom instanceof Number) {
|
||||
queryBuilder.from((Number) vFrom);
|
||||
} else {
|
||||
queryBuilder.from((String) vFrom);
|
||||
}
|
||||
}
|
||||
|
||||
if (vTo != null) {
|
||||
if (vTo instanceof Number) {
|
||||
queryBuilder.to((Number) vTo);
|
||||
} else {
|
||||
queryBuilder.to((String) vTo);
|
||||
}
|
||||
}
|
||||
|
||||
if (includeUpper != null) {
|
||||
queryBuilder.includeUpper(includeUpper);
|
||||
}
|
||||
|
||||
if (includeLower != null) {
|
||||
queryBuilder.includeLower(includeLower);
|
||||
}
|
||||
|
||||
if (unit != null) {
|
||||
queryBuilder.unit(unit);
|
||||
}
|
||||
|
||||
if (geoDistance != null) {
|
||||
queryBuilder.geoDistance(geoDistance);
|
||||
}
|
||||
|
||||
if (optimizeBbox != null) {
|
||||
queryBuilder.optimizeBbox(optimizeBbox);
|
||||
}
|
||||
|
||||
if (validationMethod != null) {
|
||||
// if validation method is set explicitly ignore deprecated coerce/ignore malformed fields if any
|
||||
queryBuilder.setValidationMethod(validationMethod);
|
||||
} else {
|
||||
queryBuilder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed));
|
||||
}
|
||||
return queryBuilder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GeoDistanceRangeQueryBuilder doReadFrom(StreamInput in) throws IOException {
|
||||
GeoDistanceRangeQueryBuilder queryBuilder = new GeoDistanceRangeQueryBuilder(in.readString(), in.readGeoPoint());
|
||||
|
|
|
@ -1,277 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* {
|
||||
* "name.lat" : 1.1,
|
||||
* "name.lon" : 1.2,
|
||||
* }
|
||||
* </pre>
|
||||
*/
|
||||
public class GeoDistanceRangeQueryParser implements QueryParser<GeoDistanceRangeQueryBuilder> {
|
||||
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(GeoDistanceRangeQueryBuilder.NAME);
|
||||
public static final ParseField FROM_FIELD = new ParseField("from");
|
||||
public static final ParseField TO_FIELD = new ParseField("to");
|
||||
public static final ParseField INCLUDE_LOWER_FIELD = new ParseField("include_lower");
|
||||
public static final ParseField INCLUDE_UPPER_FIELD = new ParseField("include_upper");
|
||||
public static final ParseField GT_FIELD = new ParseField("gt");
|
||||
public static final ParseField GTE_FIELD = new ParseField("gte", "ge");
|
||||
public static final ParseField LT_FIELD = new ParseField("lt");
|
||||
public static final ParseField LTE_FIELD = new ParseField("lte", "le");
|
||||
public static final ParseField UNIT_FIELD = new ParseField("unit");
|
||||
public static final ParseField DISTANCE_TYPE_FIELD = new ParseField("distance_type");
|
||||
public static final ParseField NAME_FIELD = new ParseField("_name");
|
||||
public static final ParseField BOOST_FIELD = new ParseField("boost");
|
||||
public static final ParseField OPTIMIZE_BBOX_FIELD = new ParseField("optimize_bbox");
|
||||
public static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize");
|
||||
public static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed");
|
||||
public static final ParseField VALIDATION_METHOD = new ParseField("validation_method");
|
||||
|
||||
@Override
|
||||
public GeoDistanceRangeQueryBuilder getBuilderPrototype() {
|
||||
return GeoDistanceRangeQueryBuilder.PROTOTYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoDistanceRangeQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
XContentParser.Token token;
|
||||
|
||||
Float boost = null;
|
||||
String queryName = null;
|
||||
String currentFieldName = null;
|
||||
GeoPoint point = null;
|
||||
String fieldName = null;
|
||||
Object vFrom = null;
|
||||
Object vTo = null;
|
||||
Boolean includeLower = null;
|
||||
Boolean includeUpper = null;
|
||||
DistanceUnit unit = null;
|
||||
GeoDistance geoDistance = null;
|
||||
String optimizeBbox = null;
|
||||
boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
GeoValidationMethod validationMethod = null;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (fieldName == null) {
|
||||
if (point == null) {
|
||||
point = new GeoPoint();
|
||||
}
|
||||
GeoUtils.parseGeoPoint(parser, point);
|
||||
fieldName = currentFieldName;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME +
|
||||
"] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
// the json in the format of -> field : { lat : 30, lon : 12 }
|
||||
if (fieldName == null) {
|
||||
fieldName = currentFieldName;
|
||||
if (point == null) {
|
||||
point = new GeoPoint();
|
||||
}
|
||||
GeoUtils.parseGeoPoint(parser, point);
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME +
|
||||
"] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, FROM_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
vFrom = parser.text(); // a String
|
||||
} else {
|
||||
vFrom = parser.numberValue(); // a Number
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, TO_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
vTo = parser.text(); // a String
|
||||
} else {
|
||||
vTo = parser.numberValue(); // a Number
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INCLUDE_LOWER_FIELD)) {
|
||||
includeLower = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INCLUDE_UPPER_FIELD)) {
|
||||
includeUpper = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, GT_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
vFrom = parser.text(); // a String
|
||||
} else {
|
||||
vFrom = parser.numberValue(); // a Number
|
||||
}
|
||||
includeLower = false;
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, GTE_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
vFrom = parser.text(); // a String
|
||||
} else {
|
||||
vFrom = parser.numberValue(); // a Number
|
||||
}
|
||||
includeLower = true;
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LT_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
vTo = parser.text(); // a String
|
||||
} else {
|
||||
vTo = parser.numberValue(); // a Number
|
||||
}
|
||||
includeUpper = false;
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LTE_FIELD)) {
|
||||
if (token == XContentParser.Token.VALUE_NULL) {
|
||||
} else if (token == XContentParser.Token.VALUE_STRING) {
|
||||
vTo = parser.text(); // a String
|
||||
} else {
|
||||
vTo = parser.numberValue(); // a Number
|
||||
}
|
||||
includeUpper = true;
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, UNIT_FIELD)) {
|
||||
unit = DistanceUnit.fromString(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DISTANCE_TYPE_FIELD)) {
|
||||
geoDistance = GeoDistance.fromString(parser.text());
|
||||
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) {
|
||||
String maybeFieldName = currentFieldName.substring(0,
|
||||
currentFieldName.length() - GeoPointFieldMapper.Names.LAT_SUFFIX.length());
|
||||
if (fieldName == null || fieldName.equals(maybeFieldName)) {
|
||||
fieldName = maybeFieldName;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME +
|
||||
"] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]");
|
||||
}
|
||||
if (point == null) {
|
||||
point = new GeoPoint();
|
||||
}
|
||||
point.resetLat(parser.doubleValue());
|
||||
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LON_SUFFIX)) {
|
||||
String maybeFieldName = currentFieldName.substring(0,
|
||||
currentFieldName.length() - GeoPointFieldMapper.Names.LON_SUFFIX.length());
|
||||
if (fieldName == null || fieldName.equals(maybeFieldName)) {
|
||||
fieldName = maybeFieldName;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME +
|
||||
"] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]");
|
||||
}
|
||||
if (point == null) {
|
||||
point = new GeoPoint();
|
||||
}
|
||||
point.resetLon(parser.doubleValue());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, OPTIMIZE_BBOX_FIELD)) {
|
||||
optimizeBbox = parser.textOrNull();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
|
||||
coerce = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
|
||||
ignoreMalformed = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD)) {
|
||||
validationMethod = GeoValidationMethod.fromString(parser.text());
|
||||
} else {
|
||||
if (fieldName == null) {
|
||||
if (point == null) {
|
||||
point = new GeoPoint();
|
||||
}
|
||||
point.resetFromString(parser.text());
|
||||
fieldName = currentFieldName;
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoDistanceRangeQueryBuilder.NAME +
|
||||
"] field name already set to [" + fieldName + "] but found [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
GeoDistanceRangeQueryBuilder queryBuilder = new GeoDistanceRangeQueryBuilder(fieldName, point);
|
||||
if (boost != null) {
|
||||
queryBuilder.boost(boost);
|
||||
}
|
||||
|
||||
if (queryName != null) {
|
||||
queryBuilder.queryName(queryName);
|
||||
}
|
||||
|
||||
if (vFrom != null) {
|
||||
if (vFrom instanceof Number) {
|
||||
queryBuilder.from((Number) vFrom);
|
||||
} else {
|
||||
queryBuilder.from((String) vFrom);
|
||||
}
|
||||
}
|
||||
|
||||
if (vTo != null) {
|
||||
if (vTo instanceof Number) {
|
||||
queryBuilder.to((Number) vTo);
|
||||
} else {
|
||||
queryBuilder.to((String) vTo);
|
||||
}
|
||||
}
|
||||
|
||||
if (includeUpper != null) {
|
||||
queryBuilder.includeUpper(includeUpper);
|
||||
}
|
||||
|
||||
if (includeLower != null) {
|
||||
queryBuilder.includeLower(includeLower);
|
||||
}
|
||||
|
||||
if (unit != null) {
|
||||
queryBuilder.unit(unit);
|
||||
}
|
||||
|
||||
if (geoDistance != null) {
|
||||
queryBuilder.geoDistance(geoDistance);
|
||||
}
|
||||
|
||||
if (optimizeBbox != null) {
|
||||
queryBuilder.optimizeBbox(optimizeBbox);
|
||||
}
|
||||
|
||||
if (validationMethod != null) {
|
||||
// if validation method is set explicitly ignore deprecated coerce/ignore malformed fields if any
|
||||
queryBuilder.setValidationMethod(validationMethod);
|
||||
} else {
|
||||
queryBuilder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed));
|
||||
}
|
||||
return queryBuilder;
|
||||
}
|
||||
}
|
|
@ -19,16 +19,20 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
|
||||
import org.apache.lucene.spatial.geopoint.search.GeoPointInPolygonQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
|
||||
|
@ -43,11 +47,17 @@ import java.util.Objects;
|
|||
public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQueryBuilder> {
|
||||
|
||||
public static final String NAME = "geo_polygon";
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
private static final List<GeoPoint> PROTO_SHAPE = Arrays.asList(new GeoPoint(1.0, 1.0), new GeoPoint(1.0, 2.0),
|
||||
new GeoPoint(2.0, 1.0));
|
||||
|
||||
static final GeoPolygonQueryBuilder PROTOTYPE = new GeoPolygonQueryBuilder("field", PROTO_SHAPE);
|
||||
public static final GeoPolygonQueryBuilder PROTOTYPE = new GeoPolygonQueryBuilder("field", PROTO_SHAPE);
|
||||
|
||||
private static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize");
|
||||
private static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed");
|
||||
private static final ParseField VALIDATION_METHOD = new ParseField("validation_method");
|
||||
private static final ParseField POINTS_FIELD = new ParseField("points");
|
||||
|
||||
private final String fieldName;
|
||||
|
||||
|
@ -163,21 +173,101 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQuery
|
|||
builder.startObject(NAME);
|
||||
|
||||
builder.startObject(fieldName);
|
||||
builder.startArray(GeoPolygonQueryParser.POINTS_FIELD.getPreferredName());
|
||||
builder.startArray(POINTS_FIELD.getPreferredName());
|
||||
for (GeoPoint point : shell) {
|
||||
builder.startArray().value(point.lon()).value(point.lat()).endArray();
|
||||
}
|
||||
builder.endArray();
|
||||
builder.endObject();
|
||||
|
||||
builder.field(GeoPolygonQueryParser.COERCE_FIELD.getPreferredName(), GeoValidationMethod.isCoerce(validationMethod));
|
||||
builder.field(GeoPolygonQueryParser.IGNORE_MALFORMED_FIELD.getPreferredName(),
|
||||
builder.field(COERCE_FIELD.getPreferredName(), GeoValidationMethod.isCoerce(validationMethod));
|
||||
builder.field(IGNORE_MALFORMED_FIELD.getPreferredName(),
|
||||
GeoValidationMethod.isIgnoreMalformed(validationMethod));
|
||||
|
||||
printBoostAndQueryName(builder);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
public static GeoPolygonQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = null;
|
||||
|
||||
List<GeoPoint> shell = null;
|
||||
|
||||
Float boost = null;
|
||||
boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
GeoValidationMethod validationMethod = null;
|
||||
String queryName = null;
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
fieldName = currentFieldName;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, POINTS_FIELD)) {
|
||||
shell = new ArrayList<GeoPoint>();
|
||||
while ((token = parser.nextToken()) != Token.END_ARRAY) {
|
||||
shell.add(GeoUtils.parseGeoPoint(parser));
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[geo_polygon] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[geo_polygon] query does not support token type [" + token.name() + "] under [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
|
||||
coerce = parser.booleanValue();
|
||||
if (coerce) {
|
||||
ignoreMalformed = true;
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
|
||||
ignoreMalformed = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD)) {
|
||||
validationMethod = GeoValidationMethod.fromString(parser.text());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[geo_polygon] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[geo_polygon] unexpected token type [" + token.name() + "]");
|
||||
}
|
||||
}
|
||||
GeoPolygonQueryBuilder builder = new GeoPolygonQueryBuilder(fieldName, shell);
|
||||
if (validationMethod != null) {
|
||||
// if GeoValidationMethod was explicitly set ignore deprecated coerce and ignoreMalformed settings
|
||||
builder.setValidationMethod(validationMethod);
|
||||
} else {
|
||||
builder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed));
|
||||
}
|
||||
|
||||
if (queryName != null) {
|
||||
builder.queryName(queryName);
|
||||
}
|
||||
if (boost != null) {
|
||||
builder.boost(boost);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GeoPolygonQueryBuilder doReadFrom(StreamInput in) throws IOException {
|
||||
String fieldName = in.readString();
|
||||
|
|
|
@ -1,138 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* <pre>
|
||||
* {
|
||||
* "pin.location" : {
|
||||
* "points" : [
|
||||
* { "lat" : 12, "lon" : 40},
|
||||
* {}
|
||||
* ]
|
||||
* }
|
||||
* }
|
||||
* </pre>
|
||||
*/
|
||||
public class GeoPolygonQueryParser implements QueryParser<GeoPolygonQueryBuilder> {
|
||||
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(GeoPolygonQueryBuilder.NAME);
|
||||
public static final ParseField COERCE_FIELD = new ParseField("coerce", "normalize");
|
||||
public static final ParseField IGNORE_MALFORMED_FIELD = new ParseField("ignore_malformed");
|
||||
public static final ParseField VALIDATION_METHOD = new ParseField("validation_method");
|
||||
public static final ParseField POINTS_FIELD = new ParseField("points");
|
||||
|
||||
@Override
|
||||
public GeoPolygonQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = null;
|
||||
|
||||
List<GeoPoint> shell = null;
|
||||
|
||||
Float boost = null;
|
||||
boolean coerce = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
boolean ignoreMalformed = GeoValidationMethod.DEFAULT_LENIENT_PARSING;
|
||||
GeoValidationMethod validationMethod = null;
|
||||
String queryName = null;
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
fieldName = currentFieldName;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, POINTS_FIELD)) {
|
||||
shell = new ArrayList<GeoPoint>();
|
||||
while ((token = parser.nextToken()) != Token.END_ARRAY) {
|
||||
shell.add(GeoUtils.parseGeoPoint(parser));
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[geo_polygon] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[geo_polygon] query does not support token type [" + token.name() + "] under [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if ("_name".equals(currentFieldName)) {
|
||||
queryName = parser.text();
|
||||
} else if ("boost".equals(currentFieldName)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, COERCE_FIELD)) {
|
||||
coerce = parser.booleanValue();
|
||||
if (coerce) {
|
||||
ignoreMalformed = true;
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, IGNORE_MALFORMED_FIELD)) {
|
||||
ignoreMalformed = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALIDATION_METHOD)) {
|
||||
validationMethod = GeoValidationMethod.fromString(parser.text());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[geo_polygon] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[geo_polygon] unexpected token type [" + token.name() + "]");
|
||||
}
|
||||
}
|
||||
GeoPolygonQueryBuilder builder = new GeoPolygonQueryBuilder(fieldName, shell);
|
||||
if (validationMethod != null) {
|
||||
// if GeoValidationMethod was explicitly set ignore deprecated coerce and ignoreMalformed settings
|
||||
builder.setValidationMethod(validationMethod);
|
||||
} else {
|
||||
builder.setValidationMethod(GeoValidationMethod.infer(coerce, ignoreMalformed));
|
||||
}
|
||||
|
||||
if (queryName != null) {
|
||||
builder.queryName(queryName);
|
||||
}
|
||||
if (boost != null) {
|
||||
builder.boost(boost);
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoPolygonQueryBuilder getBuilderPrototype() {
|
||||
return GeoPolygonQueryBuilder.PROTOTYPE;
|
||||
}
|
||||
}
|
|
@ -30,6 +30,8 @@ import org.apache.lucene.spatial.query.SpatialOperation;
|
|||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.geo.ShapeRelation;
|
||||
import org.elasticsearch.common.geo.ShapesAvailability;
|
||||
|
@ -53,11 +55,22 @@ import java.util.Objects;
|
|||
public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuilder> {
|
||||
|
||||
public static final String NAME = "geo_shape";
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public static final String DEFAULT_SHAPE_INDEX_NAME = "shapes";
|
||||
public static final String DEFAULT_SHAPE_FIELD_NAME = "shape";
|
||||
public static final ShapeRelation DEFAULT_SHAPE_RELATION = ShapeRelation.INTERSECTS;
|
||||
|
||||
static final GeoShapeQueryBuilder PROTOTYPE = new GeoShapeQueryBuilder("field", new PointBuilder());
|
||||
public static final GeoShapeQueryBuilder PROTOTYPE = new GeoShapeQueryBuilder("field", new PointBuilder());
|
||||
|
||||
private static final ParseField SHAPE_FIELD = new ParseField("shape");
|
||||
private static final ParseField STRATEGY_FIELD = new ParseField("strategy");
|
||||
private static final ParseField RELATION_FIELD = new ParseField("relation");
|
||||
private static final ParseField INDEXED_SHAPE_FIELD = new ParseField("indexed_shape");
|
||||
private static final ParseField SHAPE_ID_FIELD = new ParseField("id");
|
||||
private static final ParseField SHAPE_TYPE_FIELD = new ParseField("type");
|
||||
private static final ParseField SHAPE_INDEX_FIELD = new ParseField("index");
|
||||
private static final ParseField SHAPE_PATH_FIELD = new ParseField("path");
|
||||
|
||||
private final String fieldName;
|
||||
|
||||
|
@ -347,27 +360,27 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
|
|||
builder.startObject(fieldName);
|
||||
|
||||
if (strategy != null) {
|
||||
builder.field(GeoShapeQueryParser.STRATEGY_FIELD.getPreferredName(), strategy.getStrategyName());
|
||||
builder.field(STRATEGY_FIELD.getPreferredName(), strategy.getStrategyName());
|
||||
}
|
||||
|
||||
if (shape != null) {
|
||||
builder.field(GeoShapeQueryParser.SHAPE_FIELD.getPreferredName());
|
||||
builder.field(SHAPE_FIELD.getPreferredName());
|
||||
shape.toXContent(builder, params);
|
||||
} else {
|
||||
builder.startObject(GeoShapeQueryParser.INDEXED_SHAPE_FIELD.getPreferredName())
|
||||
.field(GeoShapeQueryParser.SHAPE_ID_FIELD.getPreferredName(), indexedShapeId)
|
||||
.field(GeoShapeQueryParser.SHAPE_TYPE_FIELD.getPreferredName(), indexedShapeType);
|
||||
builder.startObject(INDEXED_SHAPE_FIELD.getPreferredName())
|
||||
.field(SHAPE_ID_FIELD.getPreferredName(), indexedShapeId)
|
||||
.field(SHAPE_TYPE_FIELD.getPreferredName(), indexedShapeType);
|
||||
if (indexedShapeIndex != null) {
|
||||
builder.field(GeoShapeQueryParser.SHAPE_INDEX_FIELD.getPreferredName(), indexedShapeIndex);
|
||||
builder.field(SHAPE_INDEX_FIELD.getPreferredName(), indexedShapeIndex);
|
||||
}
|
||||
if (indexedShapePath != null) {
|
||||
builder.field(GeoShapeQueryParser.SHAPE_PATH_FIELD.getPreferredName(), indexedShapePath);
|
||||
builder.field(SHAPE_PATH_FIELD.getPreferredName(), indexedShapePath);
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
if(relation != null) {
|
||||
builder.field(GeoShapeQueryParser.RELATION_FIELD.getPreferredName(), relation.getRelationName());
|
||||
builder.field(RELATION_FIELD.getPreferredName(), relation.getRelationName());
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
|
@ -377,6 +390,111 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
|
|||
builder.endObject();
|
||||
}
|
||||
|
||||
public static GeoShapeQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = null;
|
||||
ShapeRelation shapeRelation = null;
|
||||
SpatialStrategy strategy = null;
|
||||
ShapeBuilder shape = null;
|
||||
|
||||
String id = null;
|
||||
String type = null;
|
||||
String index = null;
|
||||
String shapePath = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String queryName = null;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" +
|
||||
GeoShapeQueryBuilder.NAME + "] point specified twice. [" + currentFieldName + "]");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_FIELD)) {
|
||||
shape = ShapeBuilder.parse(parser);
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, STRATEGY_FIELD)) {
|
||||
String strategyName = parser.text();
|
||||
strategy = SpatialStrategy.fromString(strategyName);
|
||||
if (strategy == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown strategy [" + strategyName + " ]");
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, RELATION_FIELD)) {
|
||||
shapeRelation = ShapeRelation.getRelationByName(parser.text());
|
||||
if (shapeRelation == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown shape operation [" + parser.text() + " ]");
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_SHAPE_FIELD)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_ID_FIELD)) {
|
||||
id = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_TYPE_FIELD)) {
|
||||
type = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_INDEX_FIELD)) {
|
||||
index = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_PATH_FIELD)) {
|
||||
shapePath = parser.text();
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME +
|
||||
"] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
GeoShapeQueryBuilder builder;
|
||||
if (shape != null) {
|
||||
builder = new GeoShapeQueryBuilder(fieldName, shape);
|
||||
} else {
|
||||
builder = new GeoShapeQueryBuilder(fieldName, id, type);
|
||||
}
|
||||
if (index != null) {
|
||||
builder.indexedShapeIndex(index);
|
||||
}
|
||||
if (shapePath != null) {
|
||||
builder.indexedShapePath(shapePath);
|
||||
}
|
||||
if (shapeRelation != null) {
|
||||
builder.relation(shapeRelation);
|
||||
}
|
||||
if (strategy != null) {
|
||||
builder.strategy(strategy);
|
||||
}
|
||||
if (queryName != null) {
|
||||
builder.queryName(queryName);
|
||||
}
|
||||
builder.boost(boost);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected GeoShapeQueryBuilder doReadFrom(StreamInput in) throws IOException {
|
||||
String fieldName = in.readString();
|
||||
|
|
|
@ -1,153 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.geo.ShapeRelation;
|
||||
import org.elasticsearch.common.geo.SpatialStrategy;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class GeoShapeQueryParser implements QueryParser<GeoShapeQueryBuilder> {
|
||||
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(GeoShapeQueryBuilder.NAME);
|
||||
public static final ParseField SHAPE_FIELD = new ParseField("shape");
|
||||
public static final ParseField STRATEGY_FIELD = new ParseField("strategy");
|
||||
public static final ParseField RELATION_FIELD = new ParseField("relation");
|
||||
public static final ParseField INDEXED_SHAPE_FIELD = new ParseField("indexed_shape");
|
||||
public static final ParseField SHAPE_ID_FIELD = new ParseField("id");
|
||||
public static final ParseField SHAPE_TYPE_FIELD = new ParseField("type");
|
||||
public static final ParseField SHAPE_INDEX_FIELD = new ParseField("index");
|
||||
public static final ParseField SHAPE_PATH_FIELD = new ParseField("path");
|
||||
|
||||
@Override
|
||||
public GeoShapeQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = null;
|
||||
ShapeRelation shapeRelation = null;
|
||||
SpatialStrategy strategy = null;
|
||||
ShapeBuilder shape = null;
|
||||
|
||||
String id = null;
|
||||
String type = null;
|
||||
String index = null;
|
||||
String shapePath = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String queryName = null;
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (fieldName != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" +
|
||||
GeoShapeQueryBuilder.NAME + "] point specified twice. [" + currentFieldName + "]");
|
||||
}
|
||||
fieldName = currentFieldName;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_FIELD)) {
|
||||
shape = ShapeBuilder.parse(parser);
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, STRATEGY_FIELD)) {
|
||||
String strategyName = parser.text();
|
||||
strategy = SpatialStrategy.fromString(strategyName);
|
||||
if (strategy == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown strategy [" + strategyName + " ]");
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, RELATION_FIELD)) {
|
||||
shapeRelation = ShapeRelation.getRelationByName(parser.text());
|
||||
if (shapeRelation == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown shape operation [" + parser.text() + " ]");
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_SHAPE_FIELD)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_ID_FIELD)) {
|
||||
id = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_TYPE_FIELD)) {
|
||||
type = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_INDEX_FIELD)) {
|
||||
index = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, SHAPE_PATH_FIELD)) {
|
||||
shapePath = parser.text();
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME +
|
||||
"] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + GeoShapeQueryBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
GeoShapeQueryBuilder builder;
|
||||
if (shape != null) {
|
||||
builder = new GeoShapeQueryBuilder(fieldName, shape);
|
||||
} else {
|
||||
builder = new GeoShapeQueryBuilder(fieldName, id, type);
|
||||
}
|
||||
if (index != null) {
|
||||
builder.indexedShapeIndex(index);
|
||||
}
|
||||
if (shapePath != null) {
|
||||
builder.indexedShapePath(shapePath);
|
||||
}
|
||||
if (shapeRelation != null) {
|
||||
builder.relation(shapeRelation);
|
||||
}
|
||||
if (strategy != null) {
|
||||
builder.strategy(strategy);
|
||||
}
|
||||
if (queryName != null) {
|
||||
builder.queryName(queryName);
|
||||
}
|
||||
builder.boost(boost);
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public GeoShapeQueryBuilder getBuilderPrototype() {
|
||||
return GeoShapeQueryBuilder.PROTOTYPE;
|
||||
}
|
||||
}
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.common.ParsingException;
|
|||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
|
@ -61,10 +60,13 @@ import java.util.Objects;
|
|||
public class GeohashCellQuery {
|
||||
|
||||
public static final String NAME = "geohash_cell";
|
||||
public static final ParseField NEIGHBORS_FIELD = new ParseField("neighbors");
|
||||
public static final ParseField PRECISION_FIELD = new ParseField("precision");
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public static final boolean DEFAULT_NEIGHBORS = false;
|
||||
|
||||
private static final ParseField NEIGHBORS_FIELD = new ParseField("neighbors");
|
||||
private static final ParseField PRECISION_FIELD = new ParseField("precision");
|
||||
|
||||
/**
|
||||
* Create a new geohash filter for a given set of geohashes. In general this method
|
||||
* returns a boolean filter combining the geohashes OR-wise.
|
||||
|
@ -96,6 +98,7 @@ public class GeohashCellQuery {
|
|||
* <code>false</code>.
|
||||
*/
|
||||
public static class Builder extends AbstractQueryBuilder<Builder> {
|
||||
public static final Builder PROTOTYPE = new Builder("field", new GeoPoint());
|
||||
// we need to store the geohash rather than the corresponding point,
|
||||
// because a transformation from a geohash to a point an back to the
|
||||
// geohash will extend the accuracy of the hash to max precision
|
||||
|
@ -104,8 +107,6 @@ public class GeohashCellQuery {
|
|||
private String geohash;
|
||||
private Integer levels = null;
|
||||
private boolean neighbors = DEFAULT_NEIGHBORS;
|
||||
private static final Builder PROTOTYPE = new Builder("field", new GeoPoint());
|
||||
|
||||
|
||||
public Builder(String field, GeoPoint point) {
|
||||
this(field, point == null ? null : point.geohash(), false);
|
||||
|
@ -226,59 +227,7 @@ public class GeohashCellQuery {
|
|||
builder.endObject();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Builder doReadFrom(StreamInput in) throws IOException {
|
||||
String field = in.readString();
|
||||
String geohash = in.readString();
|
||||
Builder builder = new Builder(field, geohash);
|
||||
if (in.readBoolean()) {
|
||||
builder.precision(in.readVInt());
|
||||
}
|
||||
builder.neighbors(in.readBoolean());
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeString(fieldName);
|
||||
out.writeString(geohash);
|
||||
boolean hasLevels = levels != null;
|
||||
out.writeBoolean(hasLevels);
|
||||
if (hasLevels) {
|
||||
out.writeVInt(levels);
|
||||
}
|
||||
out.writeBoolean(neighbors);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(Builder other) {
|
||||
return Objects.equals(fieldName, other.fieldName)
|
||||
&& Objects.equals(geohash, other.geohash)
|
||||
&& Objects.equals(levels, other.levels)
|
||||
&& Objects.equals(neighbors, other.neighbors);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(fieldName, geohash, levels, neighbors);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
|
||||
public static class Parser implements QueryParser<Builder> {
|
||||
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
@Inject
|
||||
public Parser() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
public static Builder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String fieldName = null;
|
||||
|
@ -359,8 +308,45 @@ public class GeohashCellQuery {
|
|||
}
|
||||
|
||||
@Override
|
||||
public GeohashCellQuery.Builder getBuilderPrototype() {
|
||||
return Builder.PROTOTYPE;
|
||||
protected Builder doReadFrom(StreamInput in) throws IOException {
|
||||
String field = in.readString();
|
||||
String geohash = in.readString();
|
||||
Builder builder = new Builder(field, geohash);
|
||||
if (in.readBoolean()) {
|
||||
builder.precision(in.readVInt());
|
||||
}
|
||||
builder.neighbors(in.readBoolean());
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeString(fieldName);
|
||||
out.writeString(geohash);
|
||||
boolean hasLevels = levels != null;
|
||||
out.writeBoolean(hasLevels);
|
||||
if (hasLevels) {
|
||||
out.writeVInt(levels);
|
||||
}
|
||||
out.writeBoolean(neighbors);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean doEquals(Builder other) {
|
||||
return Objects.equals(fieldName, other.fieldName)
|
||||
&& Objects.equals(geohash, other.geohash)
|
||||
&& Objects.equals(levels, other.levels)
|
||||
&& Objects.equals(neighbors, other.neighbors);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int doHashCode() {
|
||||
return Objects.hash(fieldName, geohash, levels, neighbors);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,10 +20,13 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -33,6 +36,7 @@ import java.io.IOException;
|
|||
public class MatchNoneQueryBuilder extends AbstractQueryBuilder<MatchNoneQueryBuilder> {
|
||||
|
||||
public static final String NAME = "match_none";
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public static final MatchNoneQueryBuilder PROTOTYPE = new MatchNoneQueryBuilder();
|
||||
|
||||
|
@ -43,6 +47,37 @@ public class MatchNoneQueryBuilder extends AbstractQueryBuilder<MatchNoneQueryBu
|
|||
builder.endObject();
|
||||
}
|
||||
|
||||
public static MatchNoneQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
String queryName = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
while (((token = parser.nextToken()) != XContentParser.Token.END_OBJECT && token != XContentParser.Token.END_ARRAY)) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "["+MatchNoneQueryBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + MatchNoneQueryBuilder.NAME +
|
||||
"] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
MatchNoneQueryBuilder matchNoneQueryBuilder = new MatchNoneQueryBuilder();
|
||||
matchNoneQueryBuilder.boost(boost);
|
||||
matchNoneQueryBuilder.queryName(queryName);
|
||||
return matchNoneQueryBuilder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
return Queries.newMatchNoDocsQuery();
|
||||
|
|
|
@ -1,68 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class MatchNoneQueryParser implements QueryParser<MatchNoneQueryBuilder> {
|
||||
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(MatchNoneQueryBuilder.NAME);
|
||||
|
||||
@Override
|
||||
public MatchNoneQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
String queryName = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
while (((token = parser.nextToken()) != XContentParser.Token.END_OBJECT && token != XContentParser.Token.END_ARRAY)) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "["+MatchNoneQueryBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + MatchNoneQueryBuilder.NAME +
|
||||
"] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
MatchNoneQueryBuilder matchNoneQueryBuilder = new MatchNoneQueryBuilder();
|
||||
matchNoneQueryBuilder.boost(boost);
|
||||
matchNoneQueryBuilder.queryName(queryName);
|
||||
return matchNoneQueryBuilder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MatchNoneQueryBuilder getBuilderPrototype() {
|
||||
return MatchNoneQueryBuilder.PROTOTYPE;
|
||||
}
|
||||
}
|
|
@ -25,9 +25,12 @@ import org.apache.lucene.search.BooleanQuery;
|
|||
import org.apache.lucene.search.DocValuesTermsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
|
@ -38,7 +41,12 @@ import java.util.Objects;
|
|||
public final class ParentIdQueryBuilder extends AbstractQueryBuilder<ParentIdQueryBuilder> {
|
||||
|
||||
public static final String NAME = "parent_id";
|
||||
static final ParentIdQueryBuilder PROTO = new ParentIdQueryBuilder(null, null);
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public static final ParentIdQueryBuilder PROTO = new ParentIdQueryBuilder(null, null);
|
||||
|
||||
private static final ParseField ID_FIELD = new ParseField("id");
|
||||
private static final ParseField TYPE_FIELD = new ParseField("type", "child_type");
|
||||
|
||||
private final String type;
|
||||
private final String id;
|
||||
|
@ -59,12 +67,46 @@ public final class ParentIdQueryBuilder extends AbstractQueryBuilder<ParentIdQue
|
|||
@Override
|
||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
builder.field(ParentIdQueryParser.TYPE_FIELD.getPreferredName(), type);
|
||||
builder.field(ParentIdQueryParser.ID_FIELD.getPreferredName(), id);
|
||||
builder.field(TYPE_FIELD.getPreferredName(), type);
|
||||
builder.field(ID_FIELD.getPreferredName(), id);
|
||||
printBoostAndQueryName(builder);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
public static ParentIdQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String type = null;
|
||||
String id = null;
|
||||
String queryName = null;
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
|
||||
type = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ID_FIELD)) {
|
||||
id = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[parent_id] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[parent_id] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
ParentIdQueryBuilder queryBuilder = new ParentIdQueryBuilder(type, id);
|
||||
queryBuilder.queryName(queryName);
|
||||
queryBuilder.boost(boost);
|
||||
return queryBuilder;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
DocumentMapper childDocMapper = context.getMapperService().documentMapper(type);
|
||||
|
|
|
@ -1,72 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public final class ParentIdQueryParser implements QueryParser<ParentIdQueryBuilder> {
|
||||
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(ParentIdQueryBuilder.NAME);
|
||||
public static final ParseField ID_FIELD = new ParseField("id");
|
||||
public static final ParseField TYPE_FIELD = new ParseField("type", "child_type");
|
||||
|
||||
@Override
|
||||
public ParentIdQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String type = null;
|
||||
String id = null;
|
||||
String queryName = null;
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, TYPE_FIELD)) {
|
||||
type = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ID_FIELD)) {
|
||||
id = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[parent_id] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[parent_id] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
ParentIdQueryBuilder queryBuilder = new ParentIdQueryBuilder(type, id);
|
||||
queryBuilder.queryName(queryName);
|
||||
queryBuilder.boost(boost);
|
||||
return queryBuilder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ParentIdQueryBuilder getBuilderPrototype() {
|
||||
return ParentIdQueryBuilder.PROTO;
|
||||
}
|
||||
}
|
|
@ -41,6 +41,8 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.get.GetResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -69,7 +71,18 @@ import static org.elasticsearch.index.mapper.SourceToParse.source;
|
|||
public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQueryBuilder> {
|
||||
|
||||
public static final String NAME = "percolator";
|
||||
static final PercolatorQueryBuilder PROTO = new PercolatorQueryBuilder(null, null, null, null, null, null, null, null);
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
public static final PercolatorQueryBuilder PROTO = new PercolatorQueryBuilder(null, null, null, null, null, null, null, null);
|
||||
|
||||
private static final ParseField DOCUMENT_FIELD = new ParseField("document");
|
||||
private static final ParseField DOCUMENT_TYPE_FIELD = new ParseField("document_type");
|
||||
private static final ParseField INDEXED_DOCUMENT_FIELD_INDEX = new ParseField("index");
|
||||
private static final ParseField INDEXED_DOCUMENT_FIELD_TYPE = new ParseField("type");
|
||||
private static final ParseField INDEXED_DOCUMENT_FIELD_ID = new ParseField("id");
|
||||
private static final ParseField INDEXED_DOCUMENT_FIELD_ROUTING = new ParseField("routing");
|
||||
private static final ParseField INDEXED_DOCUMENT_FIELD_PREFERENCE = new ParseField("preference");
|
||||
private static final ParseField INDEXED_DOCUMENT_FIELD_VERSION = new ParseField("version");
|
||||
|
||||
private final String documentType;
|
||||
private final BytesReference document;
|
||||
|
@ -139,42 +152,123 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
|
|||
@Override
|
||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
builder.field(PercolatorQueryParser.DOCUMENT_TYPE_FIELD.getPreferredName(), documentType);
|
||||
builder.field(DOCUMENT_TYPE_FIELD.getPreferredName(), documentType);
|
||||
if (document != null) {
|
||||
XContentType contentType = XContentFactory.xContentType(document);
|
||||
if (contentType == builder.contentType()) {
|
||||
builder.rawField(PercolatorQueryParser.DOCUMENT_FIELD.getPreferredName(), document);
|
||||
builder.rawField(DOCUMENT_FIELD.getPreferredName(), document);
|
||||
} else {
|
||||
XContentParser parser = XContentFactory.xContent(contentType).createParser(document);
|
||||
parser.nextToken();
|
||||
builder.field(PercolatorQueryParser.DOCUMENT_FIELD.getPreferredName());
|
||||
builder.field(DOCUMENT_FIELD.getPreferredName());
|
||||
builder.copyCurrentStructure(parser);
|
||||
}
|
||||
}
|
||||
if (indexedDocumentIndex != null || indexedDocumentType != null || indexedDocumentId != null) {
|
||||
if (indexedDocumentIndex != null) {
|
||||
builder.field(PercolatorQueryParser.INDEXED_DOCUMENT_FIELD_INDEX.getPreferredName(), indexedDocumentIndex);
|
||||
builder.field(INDEXED_DOCUMENT_FIELD_INDEX.getPreferredName(), indexedDocumentIndex);
|
||||
}
|
||||
if (indexedDocumentType != null) {
|
||||
builder.field(PercolatorQueryParser.INDEXED_DOCUMENT_FIELD_TYPE.getPreferredName(), indexedDocumentType);
|
||||
builder.field(INDEXED_DOCUMENT_FIELD_TYPE.getPreferredName(), indexedDocumentType);
|
||||
}
|
||||
if (indexedDocumentId != null) {
|
||||
builder.field(PercolatorQueryParser.INDEXED_DOCUMENT_FIELD_ID.getPreferredName(), indexedDocumentId);
|
||||
builder.field(INDEXED_DOCUMENT_FIELD_ID.getPreferredName(), indexedDocumentId);
|
||||
}
|
||||
if (indexedDocumentRouting != null) {
|
||||
builder.field(PercolatorQueryParser.INDEXED_DOCUMENT_FIELD_ROUTING.getPreferredName(), indexedDocumentRouting);
|
||||
builder.field(INDEXED_DOCUMENT_FIELD_ROUTING.getPreferredName(), indexedDocumentRouting);
|
||||
}
|
||||
if (indexedDocumentPreference != null) {
|
||||
builder.field(PercolatorQueryParser.INDEXED_DOCUMENT_FIELD_PREFERENCE.getPreferredName(), indexedDocumentPreference);
|
||||
builder.field(INDEXED_DOCUMENT_FIELD_PREFERENCE.getPreferredName(), indexedDocumentPreference);
|
||||
}
|
||||
if (indexedDocumentVersion != null) {
|
||||
builder.field(PercolatorQueryParser.INDEXED_DOCUMENT_FIELD_VERSION.getPreferredName(), indexedDocumentVersion);
|
||||
builder.field(INDEXED_DOCUMENT_FIELD_VERSION.getPreferredName(), indexedDocumentVersion);
|
||||
}
|
||||
}
|
||||
printBoostAndQueryName(builder);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
public static PercolatorQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
|
||||
String documentType = null;
|
||||
|
||||
String indexedDocumentIndex = null;
|
||||
String indexedDocumentType = null;
|
||||
String indexedDocumentId = null;
|
||||
String indexedDocumentRouting = null;
|
||||
String indexedDocumentPreference = null;
|
||||
Long indexedDocumentVersion = null;
|
||||
|
||||
BytesReference source = null;
|
||||
|
||||
String queryName = null;
|
||||
String currentFieldName = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, DOCUMENT_FIELD)) {
|
||||
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
|
||||
builder.copyCurrentStructure(parser);
|
||||
builder.flush();
|
||||
source = builder.bytes();
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + PercolatorQueryBuilder.NAME +
|
||||
"] query does not support [" + token + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, DOCUMENT_TYPE_FIELD)) {
|
||||
documentType = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_INDEX)) {
|
||||
indexedDocumentIndex = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_TYPE)) {
|
||||
indexedDocumentType = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_ID)) {
|
||||
indexedDocumentId = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_ROUTING)) {
|
||||
indexedDocumentRouting = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_PREFERENCE)) {
|
||||
indexedDocumentPreference = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_VERSION)) {
|
||||
indexedDocumentVersion = parser.longValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + PercolatorQueryBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + PercolatorQueryBuilder.NAME +
|
||||
"] query does not support [" + token + "]");
|
||||
}
|
||||
}
|
||||
|
||||
if (documentType == null) {
|
||||
throw new IllegalArgumentException("[" + PercolatorQueryBuilder.NAME + "] query is missing required [" +
|
||||
DOCUMENT_TYPE_FIELD.getPreferredName() + "] parameter");
|
||||
}
|
||||
|
||||
PercolatorQueryBuilder queryBuilder;
|
||||
if (source != null) {
|
||||
queryBuilder = new PercolatorQueryBuilder(documentType, source);
|
||||
} else if (indexedDocumentId != null) {
|
||||
queryBuilder = new PercolatorQueryBuilder(documentType, indexedDocumentIndex, indexedDocumentType,
|
||||
indexedDocumentId, indexedDocumentRouting, indexedDocumentPreference, indexedDocumentVersion);
|
||||
} else {
|
||||
throw new IllegalArgumentException("[" + PercolatorQueryBuilder.NAME + "] query, nothing to percolate");
|
||||
}
|
||||
queryBuilder.queryName(queryName);
|
||||
queryBuilder.boost(boost);
|
||||
return queryBuilder;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PercolatorQueryBuilder doReadFrom(StreamInput in) throws IOException {
|
||||
String docType = in.readString();
|
||||
|
|
|
@ -1,130 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class PercolatorQueryParser implements QueryParser<PercolatorQueryBuilder> {
|
||||
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(PercolatorQueryBuilder.NAME);
|
||||
public static final ParseField DOCUMENT_FIELD = new ParseField("document");
|
||||
public static final ParseField DOCUMENT_TYPE_FIELD = new ParseField("document_type");
|
||||
public static final ParseField INDEXED_DOCUMENT_FIELD_INDEX = new ParseField("index");
|
||||
public static final ParseField INDEXED_DOCUMENT_FIELD_TYPE = new ParseField("type");
|
||||
public static final ParseField INDEXED_DOCUMENT_FIELD_ID = new ParseField("id");
|
||||
public static final ParseField INDEXED_DOCUMENT_FIELD_ROUTING = new ParseField("routing");
|
||||
public static final ParseField INDEXED_DOCUMENT_FIELD_PREFERENCE = new ParseField("preference");
|
||||
public static final ParseField INDEXED_DOCUMENT_FIELD_VERSION = new ParseField("version");
|
||||
|
||||
@Override
|
||||
public PercolatorQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
|
||||
String documentType = null;
|
||||
|
||||
String indexedDocumentIndex = null;
|
||||
String indexedDocumentType = null;
|
||||
String indexedDocumentId = null;
|
||||
String indexedDocumentRouting = null;
|
||||
String indexedDocumentPreference = null;
|
||||
Long indexedDocumentVersion = null;
|
||||
|
||||
BytesReference source = null;
|
||||
|
||||
String queryName = null;
|
||||
String currentFieldName = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, DOCUMENT_FIELD)) {
|
||||
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
|
||||
builder.copyCurrentStructure(parser);
|
||||
builder.flush();
|
||||
source = builder.bytes();
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + PercolatorQueryBuilder.NAME +
|
||||
"] query does not support [" + token + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, DOCUMENT_TYPE_FIELD)) {
|
||||
documentType = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_INDEX)) {
|
||||
indexedDocumentIndex = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_TYPE)) {
|
||||
indexedDocumentType = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_ID)) {
|
||||
indexedDocumentId = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_ROUTING)) {
|
||||
indexedDocumentRouting = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_PREFERENCE)) {
|
||||
indexedDocumentPreference = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, INDEXED_DOCUMENT_FIELD_VERSION)) {
|
||||
indexedDocumentVersion = parser.longValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + PercolatorQueryBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + PercolatorQueryBuilder.NAME +
|
||||
"] query does not support [" + token + "]");
|
||||
}
|
||||
}
|
||||
|
||||
if (documentType == null) {
|
||||
throw new IllegalArgumentException("[" + PercolatorQueryBuilder.NAME + "] query is missing required [" +
|
||||
DOCUMENT_TYPE_FIELD.getPreferredName() + "] parameter");
|
||||
}
|
||||
|
||||
PercolatorQueryBuilder queryBuilder;
|
||||
if (source != null) {
|
||||
queryBuilder = new PercolatorQueryBuilder(documentType, source);
|
||||
} else if (indexedDocumentId != null) {
|
||||
queryBuilder = new PercolatorQueryBuilder(documentType, indexedDocumentIndex, indexedDocumentType,
|
||||
indexedDocumentId, indexedDocumentRouting, indexedDocumentPreference, indexedDocumentVersion);
|
||||
} else {
|
||||
throw new IllegalArgumentException("[" + PercolatorQueryBuilder.NAME + "] query, nothing to percolate");
|
||||
}
|
||||
queryBuilder.queryName(queryName);
|
||||
queryBuilder.boost(boost);
|
||||
return queryBuilder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public PercolatorQueryBuilder getBuilderPrototype() {
|
||||
return PercolatorQueryBuilder.PROTO;
|
||||
}
|
||||
|
||||
}
|
|
@ -25,26 +25,36 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.RandomAccessWeight;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.Bits;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.script.LeafSearchScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.Script.ScriptField;
|
||||
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptParameterParser;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder> {
|
||||
|
||||
public static final String NAME = "script";
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
static final ScriptQueryBuilder PROTOTYPE = new ScriptQueryBuilder(new Script(""));
|
||||
public static final ScriptQueryBuilder PROTOTYPE = new ScriptQueryBuilder(new Script(""));
|
||||
|
||||
private static final ParseField PARAMS_FIELD = new ParseField("params");
|
||||
|
||||
private final Script script;
|
||||
|
||||
|
@ -72,6 +82,66 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder<ScriptQueryBuilder>
|
|||
builder.endObject();
|
||||
}
|
||||
|
||||
public static ScriptQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
|
||||
|
||||
// also, when caching, since its isCacheable is false, will result in loading all bit set...
|
||||
Script script = null;
|
||||
Map<String, Object> params = null;
|
||||
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String queryName = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, parseContext.parseFieldMatcher());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, PARAMS_FIELD)) {
|
||||
// TODO remove in 3.0 (here to support old script APIs)
|
||||
params = parser.map();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (!scriptParameterParser.token(currentFieldName, token, parser, parseContext.parseFieldMatcher())) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (script == null) { // Didn't find anything using the new API so try using the old one instead
|
||||
ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue();
|
||||
if (scriptValue != null) {
|
||||
if (params == null) {
|
||||
params = new HashMap<>();
|
||||
}
|
||||
script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params);
|
||||
}
|
||||
} else if (params != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"script params must be specified inside script object in a [script] filter");
|
||||
}
|
||||
|
||||
if (script == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "script must be provided with a [script] filter");
|
||||
}
|
||||
|
||||
return new ScriptQueryBuilder(script)
|
||||
.boost(boost)
|
||||
.queryName(queryName);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||
return new ScriptQuery(script, context.getScriptService(), context.lookup());
|
||||
|
|
|
@ -1,107 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.Script.ScriptField;
|
||||
import org.elasticsearch.script.ScriptParameterParser;
|
||||
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Parser for script query
|
||||
*/
|
||||
public class ScriptQueryParser implements QueryParser<ScriptQueryBuilder> {
|
||||
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(ScriptQueryBuilder.NAME);
|
||||
public static final ParseField PARAMS_FIELD = new ParseField("params");
|
||||
|
||||
@Override
|
||||
public ScriptQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
ScriptParameterParser scriptParameterParser = new ScriptParameterParser();
|
||||
|
||||
// also, when caching, since its isCacheable is false, will result in loading all bit set...
|
||||
Script script = null;
|
||||
Map<String, Object> params = null;
|
||||
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String queryName = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
String currentFieldName = null;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseContext.isDeprecatedSetting(currentFieldName)) {
|
||||
// skip
|
||||
} else if (token == XContentParser.Token.START_OBJECT) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, ScriptField.SCRIPT)) {
|
||||
script = Script.parse(parser, parseContext.parseFieldMatcher());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, PARAMS_FIELD)) {
|
||||
// TODO remove in 3.0 (here to support old script APIs)
|
||||
params = parser.map();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (!scriptParameterParser.token(currentFieldName, token, parser, parseContext.parseFieldMatcher())) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[script] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (script == null) { // Didn't find anything using the new API so try using the old one instead
|
||||
ScriptParameterValue scriptValue = scriptParameterParser.getDefaultScriptParameterValue();
|
||||
if (scriptValue != null) {
|
||||
if (params == null) {
|
||||
params = new HashMap<>();
|
||||
}
|
||||
script = new Script(scriptValue.script(), scriptValue.scriptType(), scriptParameterParser.lang(), params);
|
||||
}
|
||||
} else if (params != null) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"script params must be specified inside script object in a [script] filter");
|
||||
}
|
||||
|
||||
if (script == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "script must be provided with a [script] filter");
|
||||
}
|
||||
|
||||
return new ScriptQueryBuilder(script)
|
||||
.boost(boost)
|
||||
.queryName(queryName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ScriptQueryBuilder getBuilderPrototype() {
|
||||
return ScriptQueryBuilder.PROTOTYPE;
|
||||
}
|
||||
}
|
|
@ -22,12 +22,15 @@ package org.elasticsearch.index.query;
|
|||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.regex.Regex;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.query.SimpleQueryParser.Settings;
|
||||
|
||||
|
@ -40,7 +43,33 @@ import java.util.TreeMap;
|
|||
|
||||
/**
|
||||
* SimpleQuery is a query parser that acts similar to a query_string query, but
|
||||
* won't throw exceptions for any weird string syntax.
|
||||
* won't throw exceptions for any weird string syntax. It supports
|
||||
* the following:
|
||||
* <ul>
|
||||
* <li>'{@code +}' specifies {@code AND} operation: <tt>token1+token2</tt>
|
||||
* <li>'{@code |}' specifies {@code OR} operation: <tt>token1|token2</tt>
|
||||
* <li>'{@code -}' negates a single token: <tt>-token0</tt>
|
||||
* <li>'{@code "}' creates phrases of terms: <tt>"term1 term2 ..."</tt>
|
||||
* <li>'{@code *}' at the end of terms specifies prefix query: <tt>term*</tt>
|
||||
* <li>'{@code (}' and '{@code)}' specifies precedence: <tt>token1 + (token2 | token3)</tt>
|
||||
* <li>'{@code ~}N' at the end of terms specifies fuzzy query: <tt>term~1</tt>
|
||||
* <li>'{@code ~}N' at the end of phrases specifies near/slop query: <tt>"term1 term2"~5</tt>
|
||||
* </ul>
|
||||
* <p>
|
||||
* See: {@link SimpleQueryParser} for more information.
|
||||
* <p>
|
||||
* This query supports these options:
|
||||
* <p>
|
||||
* Required:
|
||||
* {@code query} - query text to be converted into other queries
|
||||
* <p>
|
||||
* Optional:
|
||||
* {@code analyzer} - anaylzer to be used for analyzing tokens to determine
|
||||
* which kind of query they should be converted into, defaults to "standard"
|
||||
* {@code default_operator} - default operator for boolean queries, defaults
|
||||
* to OR
|
||||
* {@code fields} - fields to search, defaults to _all if not set, allows
|
||||
* boosting a field with ^n
|
||||
*
|
||||
* For more detailed explanation of the query string syntax see also the <a
|
||||
* href=
|
||||
|
@ -60,10 +89,23 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
public static final Operator DEFAULT_OPERATOR = Operator.OR;
|
||||
/** Default for search flags to use. */
|
||||
public static final int DEFAULT_FLAGS = SimpleQueryStringFlag.ALL.value;
|
||||
|
||||
/** Name for (de-)serialization. */
|
||||
public static final String NAME = "simple_query_string";
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
static final SimpleQueryStringBuilder PROTOTYPE = new SimpleQueryStringBuilder("");
|
||||
public static final SimpleQueryStringBuilder PROTOTYPE = new SimpleQueryStringBuilder("");
|
||||
|
||||
private static final ParseField MINIMUM_SHOULD_MATCH_FIELD = new ParseField("minimum_should_match");
|
||||
private static final ParseField ANALYZE_WILDCARD_FIELD = new ParseField("analyze_wildcard");
|
||||
private static final ParseField LENIENT_FIELD = new ParseField("lenient");
|
||||
private static final ParseField LOWERCASE_EXPANDED_TERMS_FIELD = new ParseField("lowercase_expanded_terms");
|
||||
private static final ParseField LOCALE_FIELD = new ParseField("locale");
|
||||
private static final ParseField FLAGS_FIELD = new ParseField("flags");
|
||||
private static final ParseField DEFAULT_OPERATOR_FIELD = new ParseField("default_operator");
|
||||
private static final ParseField ANALYZER_FIELD = new ParseField("analyzer");
|
||||
private static final ParseField QUERY_FIELD = new ParseField("query");
|
||||
private static final ParseField FIELDS_FIELD = new ParseField("fields");
|
||||
|
||||
/** Query text to parse. */
|
||||
private final String queryText;
|
||||
|
@ -306,10 +348,10 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
|
||||
builder.field(SimpleQueryStringParser.QUERY_FIELD.getPreferredName(), queryText);
|
||||
builder.field(QUERY_FIELD.getPreferredName(), queryText);
|
||||
|
||||
if (fieldsAndWeights.size() > 0) {
|
||||
builder.startArray(SimpleQueryStringParser.FIELDS_FIELD.getPreferredName());
|
||||
builder.startArray(FIELDS_FIELD.getPreferredName());
|
||||
for (Map.Entry<String, Float> entry : fieldsAndWeights.entrySet()) {
|
||||
builder.value(entry.getKey() + "^" + entry.getValue());
|
||||
}
|
||||
|
@ -317,24 +359,124 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
|
|||
}
|
||||
|
||||
if (analyzer != null) {
|
||||
builder.field(SimpleQueryStringParser.ANALYZER_FIELD.getPreferredName(), analyzer);
|
||||
builder.field(ANALYZER_FIELD.getPreferredName(), analyzer);
|
||||
}
|
||||
|
||||
builder.field(SimpleQueryStringParser.FLAGS_FIELD.getPreferredName(), flags);
|
||||
builder.field(SimpleQueryStringParser.DEFAULT_OPERATOR_FIELD.getPreferredName(), defaultOperator.name().toLowerCase(Locale.ROOT));
|
||||
builder.field(SimpleQueryStringParser.LOWERCASE_EXPANDED_TERMS_FIELD.getPreferredName(), settings.lowercaseExpandedTerms());
|
||||
builder.field(SimpleQueryStringParser.LENIENT_FIELD.getPreferredName(), settings.lenient());
|
||||
builder.field(SimpleQueryStringParser.ANALYZE_WILDCARD_FIELD.getPreferredName(), settings.analyzeWildcard());
|
||||
builder.field(SimpleQueryStringParser.LOCALE_FIELD.getPreferredName(), (settings.locale().toLanguageTag()));
|
||||
builder.field(FLAGS_FIELD.getPreferredName(), flags);
|
||||
builder.field(DEFAULT_OPERATOR_FIELD.getPreferredName(), defaultOperator.name().toLowerCase(Locale.ROOT));
|
||||
builder.field(LOWERCASE_EXPANDED_TERMS_FIELD.getPreferredName(), settings.lowercaseExpandedTerms());
|
||||
builder.field(LENIENT_FIELD.getPreferredName(), settings.lenient());
|
||||
builder.field(ANALYZE_WILDCARD_FIELD.getPreferredName(), settings.analyzeWildcard());
|
||||
builder.field(LOCALE_FIELD.getPreferredName(), (settings.locale().toLanguageTag()));
|
||||
|
||||
if (minimumShouldMatch != null) {
|
||||
builder.field(SimpleQueryStringParser.MINIMUM_SHOULD_MATCH_FIELD.getPreferredName(), minimumShouldMatch);
|
||||
builder.field(MINIMUM_SHOULD_MATCH_FIELD.getPreferredName(), minimumShouldMatch);
|
||||
}
|
||||
|
||||
printBoostAndQueryName(builder);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
public static SimpleQueryStringBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String currentFieldName = null;
|
||||
String queryBody = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String queryName = null;
|
||||
String minimumShouldMatch = null;
|
||||
Map<String, Float> fieldsAndWeights = new HashMap<>();
|
||||
Operator defaultOperator = null;
|
||||
String analyzerName = null;
|
||||
int flags = SimpleQueryStringFlag.ALL.value();
|
||||
boolean lenient = SimpleQueryStringBuilder.DEFAULT_LENIENT;
|
||||
boolean lowercaseExpandedTerms = SimpleQueryStringBuilder.DEFAULT_LOWERCASE_EXPANDED_TERMS;
|
||||
boolean analyzeWildcard = SimpleQueryStringBuilder.DEFAULT_ANALYZE_WILDCARD;
|
||||
Locale locale = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
String fField = null;
|
||||
float fBoost = 1;
|
||||
char[] text = parser.textCharacters();
|
||||
int end = parser.textOffset() + parser.textLength();
|
||||
for (int i = parser.textOffset(); i < end; i++) {
|
||||
if (text[i] == '^') {
|
||||
int relativeLocation = i - parser.textOffset();
|
||||
fField = new String(text, parser.textOffset(), relativeLocation);
|
||||
fBoost = Float.parseFloat(new String(text, i + 1, parser.textLength() - relativeLocation - 1));
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (fField == null) {
|
||||
fField = parser.text();
|
||||
}
|
||||
fieldsAndWeights.put(fField, fBoost);
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
|
||||
queryBody = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
|
||||
analyzerName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DEFAULT_OPERATOR_FIELD)) {
|
||||
defaultOperator = Operator.fromString(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FLAGS_FIELD)) {
|
||||
if (parser.currentToken() != XContentParser.Token.VALUE_NUMBER) {
|
||||
// Possible options are:
|
||||
// ALL, NONE, AND, OR, PREFIX, PHRASE, PRECEDENCE, ESCAPE, WHITESPACE, FUZZY, NEAR, SLOP
|
||||
flags = SimpleQueryStringFlag.resolveFlags(parser.text());
|
||||
} else {
|
||||
flags = parser.intValue();
|
||||
if (flags < 0) {
|
||||
flags = SimpleQueryStringFlag.ALL.value();
|
||||
}
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LOCALE_FIELD)) {
|
||||
String localeStr = parser.text();
|
||||
locale = Locale.forLanguageTag(localeStr);
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LOWERCASE_EXPANDED_TERMS_FIELD)) {
|
||||
lowercaseExpandedTerms = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LENIENT_FIELD)) {
|
||||
lenient = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ANALYZE_WILDCARD_FIELD)) {
|
||||
analyzeWildcard = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
|
||||
minimumShouldMatch = parser.textOrNull();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME +
|
||||
"] unsupported field [" + parser.currentName() + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME +
|
||||
"] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
// Query text is required
|
||||
if (queryBody == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME + "] query text missing");
|
||||
}
|
||||
|
||||
SimpleQueryStringBuilder qb = new SimpleQueryStringBuilder(queryBody);
|
||||
qb.boost(boost).fields(fieldsAndWeights).analyzer(analyzerName).queryName(queryName).minimumShouldMatch(minimumShouldMatch);
|
||||
qb.flags(flags).defaultOperator(defaultOperator).locale(locale).lowercaseExpandedTerms(lowercaseExpandedTerms);
|
||||
qb.lenient(lenient).analyzeWildcard(analyzeWildcard).boost(boost);
|
||||
return qb;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
|
|
|
@ -1,180 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* SimpleQueryStringParser is a query parser that acts similar to a query_string
|
||||
* query, but won't throw exceptions for any weird string syntax. It supports
|
||||
* the following:
|
||||
* <ul>
|
||||
* <li>'{@code +}' specifies {@code AND} operation: <tt>token1+token2</tt>
|
||||
* <li>'{@code |}' specifies {@code OR} operation: <tt>token1|token2</tt>
|
||||
* <li>'{@code -}' negates a single token: <tt>-token0</tt>
|
||||
* <li>'{@code "}' creates phrases of terms: <tt>"term1 term2 ..."</tt>
|
||||
* <li>'{@code *}' at the end of terms specifies prefix query: <tt>term*</tt>
|
||||
* <li>'{@code (}' and '{@code)}' specifies precedence: <tt>token1 + (token2 | token3)</tt>
|
||||
* <li>'{@code ~}N' at the end of terms specifies fuzzy query: <tt>term~1</tt>
|
||||
* <li>'{@code ~}N' at the end of phrases specifies near/slop query: <tt>"term1 term2"~5</tt>
|
||||
* </ul>
|
||||
* <p>
|
||||
* See: {@link SimpleQueryParser} for more information.
|
||||
* <p>
|
||||
* This query supports these options:
|
||||
* <p>
|
||||
* Required:
|
||||
* {@code query} - query text to be converted into other queries
|
||||
* <p>
|
||||
* Optional:
|
||||
* {@code analyzer} - anaylzer to be used for analyzing tokens to determine
|
||||
* which kind of query they should be converted into, defaults to "standard"
|
||||
* {@code default_operator} - default operator for boolean queries, defaults
|
||||
* to OR
|
||||
* {@code fields} - fields to search, defaults to _all if not set, allows
|
||||
* boosting a field with ^n
|
||||
*/
|
||||
public class SimpleQueryStringParser implements QueryParser<SimpleQueryStringBuilder> {
|
||||
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(SimpleQueryStringBuilder.NAME);
|
||||
public static final ParseField MINIMUM_SHOULD_MATCH_FIELD = new ParseField("minimum_should_match");
|
||||
public static final ParseField ANALYZE_WILDCARD_FIELD = new ParseField("analyze_wildcard");
|
||||
public static final ParseField LENIENT_FIELD = new ParseField("lenient");
|
||||
public static final ParseField LOWERCASE_EXPANDED_TERMS_FIELD = new ParseField("lowercase_expanded_terms");
|
||||
public static final ParseField LOCALE_FIELD = new ParseField("locale");
|
||||
public static final ParseField FLAGS_FIELD = new ParseField("flags");
|
||||
public static final ParseField DEFAULT_OPERATOR_FIELD = new ParseField("default_operator");
|
||||
public static final ParseField ANALYZER_FIELD = new ParseField("analyzer");
|
||||
public static final ParseField QUERY_FIELD = new ParseField("query");
|
||||
public static final ParseField FIELDS_FIELD = new ParseField("fields");
|
||||
|
||||
@Override
|
||||
public SimpleQueryStringBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
|
||||
String currentFieldName = null;
|
||||
String queryBody = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
String queryName = null;
|
||||
String minimumShouldMatch = null;
|
||||
Map<String, Float> fieldsAndWeights = new HashMap<>();
|
||||
Operator defaultOperator = null;
|
||||
String analyzerName = null;
|
||||
int flags = SimpleQueryStringFlag.ALL.value();
|
||||
boolean lenient = SimpleQueryStringBuilder.DEFAULT_LENIENT;
|
||||
boolean lowercaseExpandedTerms = SimpleQueryStringBuilder.DEFAULT_LOWERCASE_EXPANDED_TERMS;
|
||||
boolean analyzeWildcard = SimpleQueryStringBuilder.DEFAULT_ANALYZE_WILDCARD;
|
||||
Locale locale = null;
|
||||
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token == XContentParser.Token.START_ARRAY) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, FIELDS_FIELD)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
String fField = null;
|
||||
float fBoost = 1;
|
||||
char[] text = parser.textCharacters();
|
||||
int end = parser.textOffset() + parser.textLength();
|
||||
for (int i = parser.textOffset(); i < end; i++) {
|
||||
if (text[i] == '^') {
|
||||
int relativeLocation = i - parser.textOffset();
|
||||
fField = new String(text, parser.textOffset(), relativeLocation);
|
||||
fBoost = Float.parseFloat(new String(text, i + 1, parser.textLength() - relativeLocation - 1));
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (fField == null) {
|
||||
fField = parser.text();
|
||||
}
|
||||
fieldsAndWeights.put(fField, fBoost);
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME +
|
||||
"] query does not support [" + currentFieldName + "]");
|
||||
}
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, QUERY_FIELD)) {
|
||||
queryBody = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ANALYZER_FIELD)) {
|
||||
analyzerName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, DEFAULT_OPERATOR_FIELD)) {
|
||||
defaultOperator = Operator.fromString(parser.text());
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, FLAGS_FIELD)) {
|
||||
if (parser.currentToken() != XContentParser.Token.VALUE_NUMBER) {
|
||||
// Possible options are:
|
||||
// ALL, NONE, AND, OR, PREFIX, PHRASE, PRECEDENCE, ESCAPE, WHITESPACE, FUZZY, NEAR, SLOP
|
||||
flags = SimpleQueryStringFlag.resolveFlags(parser.text());
|
||||
} else {
|
||||
flags = parser.intValue();
|
||||
if (flags < 0) {
|
||||
flags = SimpleQueryStringFlag.ALL.value();
|
||||
}
|
||||
}
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LOCALE_FIELD)) {
|
||||
String localeStr = parser.text();
|
||||
locale = Locale.forLanguageTag(localeStr);
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LOWERCASE_EXPANDED_TERMS_FIELD)) {
|
||||
lowercaseExpandedTerms = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, LENIENT_FIELD)) {
|
||||
lenient = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, ANALYZE_WILDCARD_FIELD)) {
|
||||
analyzeWildcard = parser.booleanValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, MINIMUM_SHOULD_MATCH_FIELD)) {
|
||||
minimumShouldMatch = parser.textOrNull();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME +
|
||||
"] unsupported field [" + parser.currentName() + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME +
|
||||
"] unknown token [" + token + "] after [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
// Query text is required
|
||||
if (queryBody == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[" + SimpleQueryStringBuilder.NAME + "] query text missing");
|
||||
}
|
||||
|
||||
SimpleQueryStringBuilder qb = new SimpleQueryStringBuilder(queryBody);
|
||||
qb.boost(boost).fields(fieldsAndWeights).analyzer(analyzerName).queryName(queryName).minimumShouldMatch(minimumShouldMatch);
|
||||
qb.flags(flags).defaultOperator(defaultOperator).locale(locale).lowercaseExpandedTerms(lowercaseExpandedTerms);
|
||||
qb.lenient(lenient).analyzeWildcard(analyzeWildcard).boost(boost);
|
||||
return qb;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SimpleQueryStringBuilder getBuilderPrototype() {
|
||||
return SimpleQueryStringBuilder.PROTOTYPE;
|
||||
}
|
||||
}
|
|
@ -19,6 +19,8 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -32,6 +34,7 @@ import org.elasticsearch.script.Template;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
||||
|
@ -39,14 +42,21 @@ import java.util.Objects;
|
|||
* Facilitates creating template query requests.
|
||||
* */
|
||||
public class TemplateQueryBuilder extends AbstractQueryBuilder<TemplateQueryBuilder> {
|
||||
|
||||
/** Name to reference this type of query. */
|
||||
public static final String NAME = "template";
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
private final static Map<String, ScriptService.ScriptType> parametersToTypes = new HashMap<>();
|
||||
static {
|
||||
parametersToTypes.put("query", ScriptService.ScriptType.INLINE);
|
||||
parametersToTypes.put("file", ScriptService.ScriptType.FILE);
|
||||
parametersToTypes.put("id", ScriptService.ScriptType.INDEXED);
|
||||
}
|
||||
|
||||
/** Template to fill. */
|
||||
private final Template template;
|
||||
|
||||
static final TemplateQueryBuilder PROTOTYPE = new TemplateQueryBuilder(new Template("proto"));
|
||||
public static final TemplateQueryBuilder PROTOTYPE = new TemplateQueryBuilder(new Template("proto"));
|
||||
|
||||
/**
|
||||
* @param template
|
||||
|
@ -95,6 +105,42 @@ public class TemplateQueryBuilder extends AbstractQueryBuilder<TemplateQueryBuil
|
|||
template.toXContent(builder, builderParams);
|
||||
}
|
||||
|
||||
/**
|
||||
* In the simplest case, parse template string and variables from the request,
|
||||
* compile the template and execute the template against the given variables.
|
||||
*/
|
||||
public static TemplateQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
Template template = parse(parser, parseContext.parseFieldMatcher());
|
||||
return new TemplateQueryBuilder(template);
|
||||
}
|
||||
|
||||
public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, String... parameters) throws IOException {
|
||||
Map<String, ScriptService.ScriptType> parameterMap = new HashMap<>(parametersToTypes);
|
||||
for (String parameter : parameters) {
|
||||
parameterMap.put(parameter, ScriptService.ScriptType.INLINE);
|
||||
}
|
||||
return parse(parser, parameterMap, parseFieldMatcher);
|
||||
}
|
||||
|
||||
public static Template parse(String defaultLang, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, String... parameters) throws IOException {
|
||||
Map<String, ScriptService.ScriptType> parameterMap = new HashMap<>(parametersToTypes);
|
||||
for (String parameter : parameters) {
|
||||
parameterMap.put(parameter, ScriptService.ScriptType.INLINE);
|
||||
}
|
||||
return Template.parse(parser, parameterMap, defaultLang, parseFieldMatcher);
|
||||
}
|
||||
|
||||
public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
|
||||
return parse(parser, parametersToTypes, parseFieldMatcher);
|
||||
}
|
||||
|
||||
public static Template parse(XContentParser parser, Map<String, ScriptService.ScriptType> parameterMap,
|
||||
ParseFieldMatcher parseFieldMatcher) throws IOException {
|
||||
return Template.parse(parser, parameterMap, parseFieldMatcher);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
|
|
|
@ -1,95 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.Template;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* In the simplest case, parse template string and variables from the request,
|
||||
* compile the template and execute the template against the given variables.
|
||||
* */
|
||||
public class TemplateQueryParser implements QueryParser<TemplateQueryBuilder> {
|
||||
|
||||
private final static Map<String, ScriptService.ScriptType> parametersToTypes = new HashMap<>();
|
||||
static {
|
||||
parametersToTypes.put("query", ScriptService.ScriptType.INLINE);
|
||||
parametersToTypes.put("file", ScriptService.ScriptType.FILE);
|
||||
parametersToTypes.put("id", ScriptService.ScriptType.INDEXED);
|
||||
}
|
||||
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(TemplateQueryBuilder.NAME);
|
||||
|
||||
/**
|
||||
* Parses the template query replacing template parameters with provided
|
||||
* values. Handles both submitting the template as part of the request as
|
||||
* well as referencing only the template name.
|
||||
*
|
||||
* @param parseContext parse context containing the templated query.
|
||||
*/
|
||||
@Override
|
||||
@Nullable
|
||||
public TemplateQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
Template template = parse(parser, parseContext.parseFieldMatcher());
|
||||
return new TemplateQueryBuilder(template);
|
||||
}
|
||||
|
||||
public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, String... parameters) throws IOException {
|
||||
Map<String, ScriptService.ScriptType> parameterMap = new HashMap<>(parametersToTypes);
|
||||
for (String parameter : parameters) {
|
||||
parameterMap.put(parameter, ScriptService.ScriptType.INLINE);
|
||||
}
|
||||
return parse(parser, parameterMap, parseFieldMatcher);
|
||||
}
|
||||
|
||||
public static Template parse(String defaultLang, XContentParser parser,
|
||||
ParseFieldMatcher parseFieldMatcher, String... parameters) throws IOException {
|
||||
|
||||
Map<String, ScriptService.ScriptType> parameterMap = new HashMap<>(parametersToTypes);
|
||||
for (String parameter : parameters) {
|
||||
parameterMap.put(parameter, ScriptService.ScriptType.INLINE);
|
||||
}
|
||||
return Template.parse(parser, parameterMap, defaultLang, parseFieldMatcher);
|
||||
}
|
||||
|
||||
public static Template parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException {
|
||||
return parse(parser, parametersToTypes, parseFieldMatcher);
|
||||
}
|
||||
|
||||
public static Template parse(XContentParser parser, Map<String, ScriptService.ScriptType> parameterMap,
|
||||
ParseFieldMatcher parseFieldMatcher) throws IOException {
|
||||
return Template.parse(parser, parameterMap, parseFieldMatcher);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TemplateQueryBuilder getBuilderPrototype() {
|
||||
return TemplateQueryBuilder.PROTOTYPE;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -19,28 +19,30 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.lucene.BytesRefs;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
public class TypeQueryBuilder extends AbstractQueryBuilder<TypeQueryBuilder> {
|
||||
|
||||
public static final String NAME = "type";
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(NAME);
|
||||
|
||||
private static final ParseField VALUE_FIELD = new ParseField("value");
|
||||
|
||||
private final BytesRef type;
|
||||
|
||||
static final TypeQueryBuilder PROTOTYPE = new TypeQueryBuilder("type");
|
||||
public static final TypeQueryBuilder PROTOTYPE = new TypeQueryBuilder("type");
|
||||
|
||||
public TypeQueryBuilder(String type) {
|
||||
if (type == null) {
|
||||
|
@ -63,11 +65,50 @@ public class TypeQueryBuilder extends AbstractQueryBuilder<TypeQueryBuilder> {
|
|||
@Override
|
||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(NAME);
|
||||
builder.field(TypeQueryParser.VALUE_FIELD.getPreferredName(), type.utf8ToString());
|
||||
builder.field(VALUE_FIELD.getPreferredName(), type.utf8ToString());
|
||||
printBoostAndQueryName(builder);
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
public static TypeQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
BytesRef type = null;
|
||||
|
||||
String queryName = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
|
||||
type = parser.utf8Bytes();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + TypeQueryBuilder.NAME + "] filter doesn't support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + TypeQueryBuilder.NAME + "] filter doesn't support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
if (type == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + TypeQueryBuilder.NAME + "] filter needs to be provided with a value for the type");
|
||||
}
|
||||
return new TypeQueryBuilder(type)
|
||||
.boost(boost)
|
||||
.queryName(queryName);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getWriteableName() {
|
||||
return NAME;
|
||||
|
|
|
@ -1,80 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Parser for type query
|
||||
*/
|
||||
public class TypeQueryParser implements QueryParser<TypeQueryBuilder> {
|
||||
|
||||
public static final ParseField QUERY_NAME_FIELD = new ParseField(TypeQueryBuilder.NAME);
|
||||
public static final ParseField VALUE_FIELD = new ParseField("value");
|
||||
|
||||
@Override
|
||||
public TypeQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
BytesRef type = null;
|
||||
|
||||
String queryName = null;
|
||||
float boost = AbstractQueryBuilder.DEFAULT_BOOST;
|
||||
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.NAME_FIELD)) {
|
||||
queryName = parser.text();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, AbstractQueryBuilder.BOOST_FIELD)) {
|
||||
boost = parser.floatValue();
|
||||
} else if (parseContext.parseFieldMatcher().match(currentFieldName, VALUE_FIELD)) {
|
||||
type = parser.utf8Bytes();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + TypeQueryBuilder.NAME + "] filter doesn't support [" + currentFieldName + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + TypeQueryBuilder.NAME + "] filter doesn't support [" + currentFieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
if (type == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(),
|
||||
"[" + TypeQueryBuilder.NAME + "] filter needs to be provided with a value for the type");
|
||||
}
|
||||
return new TypeQueryBuilder(type)
|
||||
.boost(boost)
|
||||
.queryName(queryName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TypeQueryBuilder getBuilderPrototype() {
|
||||
return TypeQueryBuilder.PROTOTYPE;
|
||||
}
|
||||
}
|
|
@ -29,7 +29,6 @@ import org.elasticsearch.common.inject.multibindings.Multibinder;
|
|||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.percolator.PercolatorHighlightSubFetchPhase;
|
||||
import org.elasticsearch.index.query.BoolQueryParser;
|
||||
|
@ -38,33 +37,29 @@ import org.elasticsearch.index.query.CommonTermsQueryParser;
|
|||
import org.elasticsearch.index.query.ConstantScoreQueryParser;
|
||||
import org.elasticsearch.index.query.DisMaxQueryParser;
|
||||
import org.elasticsearch.index.query.EmptyQueryBuilder;
|
||||
import org.elasticsearch.index.query.ExistsQueryParser;
|
||||
import org.elasticsearch.index.query.ExistsQueryBuilder;
|
||||
import org.elasticsearch.index.query.FieldMaskingSpanQueryParser;
|
||||
import org.elasticsearch.index.query.FuzzyQueryParser;
|
||||
import org.elasticsearch.index.query.GeoBoundingBoxQueryBuilder;
|
||||
import org.elasticsearch.index.query.GeoBoundingBoxQueryParser;
|
||||
import org.elasticsearch.index.query.GeoDistanceQueryParser;
|
||||
import org.elasticsearch.index.query.GeoDistanceRangeQueryParser;
|
||||
import org.elasticsearch.index.query.GeoPolygonQueryParser;
|
||||
import org.elasticsearch.index.query.GeoShapeQueryParser;
|
||||
import org.elasticsearch.index.query.GeoDistanceQueryBuilder;
|
||||
import org.elasticsearch.index.query.GeoDistanceRangeQueryBuilder;
|
||||
import org.elasticsearch.index.query.GeoPolygonQueryBuilder;
|
||||
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
|
||||
import org.elasticsearch.index.query.GeohashCellQuery;
|
||||
import org.elasticsearch.index.query.HasChildQueryParser;
|
||||
import org.elasticsearch.index.query.HasParentQueryParser;
|
||||
import org.elasticsearch.index.query.IdsQueryParser;
|
||||
import org.elasticsearch.index.query.IndicesQueryParser;
|
||||
import org.elasticsearch.index.query.MatchAllQueryParser;
|
||||
import org.elasticsearch.index.query.MatchNoneQueryParser;
|
||||
import org.elasticsearch.index.query.MatchNoneQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchPhrasePrefixQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchPhraseQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.MoreLikeThisQueryParser;
|
||||
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.MultiMatchQueryParser;
|
||||
import org.elasticsearch.index.query.NestedQueryBuilder;
|
||||
import org.elasticsearch.index.query.NestedQueryParser;
|
||||
import org.elasticsearch.index.query.ParentIdQueryBuilder;
|
||||
import org.elasticsearch.index.query.ParentIdQueryParser;
|
||||
import org.elasticsearch.index.query.PercolatorQueryParser;
|
||||
import org.elasticsearch.index.query.PercolatorQueryBuilder;
|
||||
import org.elasticsearch.index.query.PrefixQueryParser;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
@ -72,8 +67,8 @@ import org.elasticsearch.index.query.QueryParser;
|
|||
import org.elasticsearch.index.query.QueryStringQueryParser;
|
||||
import org.elasticsearch.index.query.RangeQueryParser;
|
||||
import org.elasticsearch.index.query.RegexpQueryParser;
|
||||
import org.elasticsearch.index.query.ScriptQueryParser;
|
||||
import org.elasticsearch.index.query.SimpleQueryStringParser;
|
||||
import org.elasticsearch.index.query.ScriptQueryBuilder;
|
||||
import org.elasticsearch.index.query.SimpleQueryStringBuilder;
|
||||
import org.elasticsearch.index.query.SpanContainingQueryParser;
|
||||
import org.elasticsearch.index.query.SpanFirstQueryParser;
|
||||
import org.elasticsearch.index.query.SpanMultiTermQueryParser;
|
||||
|
@ -82,11 +77,10 @@ import org.elasticsearch.index.query.SpanNotQueryParser;
|
|||
import org.elasticsearch.index.query.SpanOrQueryParser;
|
||||
import org.elasticsearch.index.query.SpanTermQueryParser;
|
||||
import org.elasticsearch.index.query.SpanWithinQueryParser;
|
||||
import org.elasticsearch.index.query.TemplateQueryParser;
|
||||
import org.elasticsearch.index.query.TemplateQueryBuilder;
|
||||
import org.elasticsearch.index.query.TermQueryParser;
|
||||
import org.elasticsearch.index.query.TermsQueryParser;
|
||||
import org.elasticsearch.index.query.TypeQueryBuilder;
|
||||
import org.elasticsearch.index.query.TypeQueryParser;
|
||||
import org.elasticsearch.index.query.WildcardQueryParser;
|
||||
import org.elasticsearch.index.query.WrapperQueryParser;
|
||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||
|
@ -235,16 +229,11 @@ import org.elasticsearch.search.sort.ScriptSortBuilder;
|
|||
import org.elasticsearch.search.sort.SortBuilder;
|
||||
import org.elasticsearch.search.suggest.Suggester;
|
||||
import org.elasticsearch.search.suggest.Suggesters;
|
||||
import org.elasticsearch.search.suggest.completion.FuzzyOptions;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -558,21 +547,30 @@ public class SearchModule extends AbstractModule {
|
|||
QueryParser<FunctionScoreQueryBuilder> functionScoreParser = (QueryParseContext c) -> FunctionScoreQueryBuilder
|
||||
.fromXContent((String name) -> functionScoreParsers.get(name), c);
|
||||
registerQuery(FunctionScoreQueryBuilder.PROTOTYPE::readFrom, functionScoreParser, FunctionScoreQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQueryParser(new SimpleQueryStringParser(), SimpleQueryStringParser.QUERY_NAME_FIELD);
|
||||
registerQueryParser(new TemplateQueryParser(), TemplateQueryParser.QUERY_NAME_FIELD);
|
||||
registerQueryParser(new TypeQueryParser(), TypeQueryParser.QUERY_NAME_FIELD);
|
||||
registerQueryParser(new ScriptQueryParser(), ScriptQueryParser.QUERY_NAME_FIELD);
|
||||
registerQueryParser(new GeoDistanceQueryParser(), GeoDistanceQueryParser.QUERY_NAME_FIELD);
|
||||
registerQueryParser(new GeoDistanceRangeQueryParser(), GeoDistanceRangeQueryParser.QUERY_NAME_FIELD);
|
||||
registerQueryParser(new GeoBoundingBoxQueryParser(), GeoBoundingBoxQueryParser.QUERY_NAME_FIELD);
|
||||
registerQueryParser(new GeohashCellQuery.Parser(), GeohashCellQuery.Parser.QUERY_NAME_FIELD);
|
||||
registerQueryParser(new GeoPolygonQueryParser(), GeoPolygonQueryParser.QUERY_NAME_FIELD);
|
||||
registerQueryParser(new ExistsQueryParser(), ExistsQueryParser.QUERY_NAME_FIELD);
|
||||
registerQueryParser(new MatchNoneQueryParser(), MatchNoneQueryParser.QUERY_NAME_FIELD);
|
||||
registerQueryParser(new ParentIdQueryParser(), ParentIdQueryParser.QUERY_NAME_FIELD);
|
||||
registerQueryParser(new PercolatorQueryParser(), PercolatorQueryParser.QUERY_NAME_FIELD);
|
||||
registerQuery(SimpleQueryStringBuilder.PROTOTYPE::readFrom, SimpleQueryStringBuilder::fromXContent,
|
||||
SimpleQueryStringBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(TemplateQueryBuilder.PROTOTYPE::readFrom, TemplateQueryBuilder::fromXContent, TemplateQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(TypeQueryBuilder.PROTOTYPE::readFrom, TypeQueryBuilder::fromXContent, TypeQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(ScriptQueryBuilder.PROTOTYPE::readFrom, ScriptQueryBuilder::fromXContent, ScriptQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(GeoDistanceQueryBuilder.PROTOTYPE::readFrom, GeoDistanceQueryBuilder::fromXContent,
|
||||
GeoDistanceQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(GeoDistanceRangeQueryBuilder.PROTOTYPE::readFrom, GeoDistanceRangeQueryBuilder::fromXContent,
|
||||
GeoDistanceRangeQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(GeoBoundingBoxQueryBuilder.PROTOTYPE::readFrom, GeoBoundingBoxQueryBuilder::fromXContent,
|
||||
GeoBoundingBoxQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(GeohashCellQuery.Builder.PROTOTYPE::readFrom, GeohashCellQuery.Builder::fromXContent,
|
||||
GeohashCellQuery.QUERY_NAME_FIELD);
|
||||
registerQuery(GeoPolygonQueryBuilder.PROTOTYPE::readFrom, GeoPolygonQueryBuilder::fromXContent,
|
||||
GeoPolygonQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(ExistsQueryBuilder.PROTOTYPE::readFrom, ExistsQueryBuilder::fromXContent, ExistsQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(MatchNoneQueryBuilder.PROTOTYPE::readFrom, MatchNoneQueryBuilder::fromXContent,
|
||||
MatchNoneQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(ParentIdQueryBuilder.PROTO::readFrom, ParentIdQueryBuilder::fromXContent, ParentIdQueryBuilder.QUERY_NAME_FIELD);
|
||||
registerQuery(PercolatorQueryBuilder.PROTO::readFrom, PercolatorQueryBuilder::fromXContent,
|
||||
PercolatorQueryBuilder.QUERY_NAME_FIELD);
|
||||
if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) {
|
||||
registerQueryParser(new GeoShapeQueryParser(), GeoShapeQueryParser.QUERY_NAME_FIELD);
|
||||
registerQuery(GeoShapeQueryBuilder.PROTOTYPE::readFrom, GeoShapeQueryBuilder::fromXContent,
|
||||
GeoShapeQueryBuilder.QUERY_NAME_FIELD);
|
||||
}
|
||||
// EmptyQueryBuilder is not registered as query parser but used internally.
|
||||
// We need to register it with the NamedWriteableRegistry in order to serialize it
|
||||
|
|
Loading…
Reference in New Issue