Query Refactoring: Refactor of GeohashCellQuery

Moving the query building functionality from the parser to the builders
new toQuery() method analogous to other recent query refactorings.

Relates to #10217

PR goes against the query-refactoring branch
This commit is contained in:
Colin Goodheart-Smithe 2015-09-08 13:00:07 +01:00
parent c2ccb2157c
commit 4f0ae05da5
4 changed files with 283 additions and 66 deletions

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.common.lucene.BytesRefs;
@ -42,6 +43,8 @@ public abstract class AbstractQueryBuilder<QB extends AbstractQueryBuilder> exte
/** Default for boost to apply to resulting Lucene query. Defaults to 1.0*/ /** Default for boost to apply to resulting Lucene query. Defaults to 1.0*/
public static final float DEFAULT_BOOST = 1.0f; public static final float DEFAULT_BOOST = 1.0f;
public static final ParseField NAME_FIELD = new ParseField("_name");
public static final ParseField BOOST_FIELD = new ParseField("boost");
protected String queryName; protected String queryName;
protected float boost = DEFAULT_BOOST; protected float boost = DEFAULT_BOOST;

View File

@ -22,11 +22,14 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.DistanceUnit; import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -37,6 +40,7 @@ import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects;
/** /**
* A geohash cell filter that filters {@link GeoPoint}s by their geohashes. Basically the a * A geohash cell filter that filters {@link GeoPoint}s by their geohashes. Basically the a
@ -56,8 +60,9 @@ import java.util.List;
public class GeohashCellQuery { public class GeohashCellQuery {
public static final String NAME = "geohash_cell"; public static final String NAME = "geohash_cell";
public static final String NEIGHBORS = "neighbors"; public static final ParseField NEIGHBORS_FIELD = new ParseField("neighbors");
public static final String PRECISION = "precision"; public static final ParseField PRECISION_FIELD = new ParseField("precision");
public static final boolean DEFAULT_NEIGHBORS = false;
/** /**
* Create a new geohash filter for a given set of geohashes. In general this method * Create a new geohash filter for a given set of geohashes. In general this method
@ -93,10 +98,10 @@ public class GeohashCellQuery {
// because a transformation from a geohash to a point an back to the // because a transformation from a geohash to a point an back to the
// geohash will extend the accuracy of the hash to max precision // geohash will extend the accuracy of the hash to max precision
// i.e. by filing up with z's. // i.e. by filing up with z's.
private String field; private String fieldName;
private String geohash; private String geohash;
private int levels = -1; private Integer levels = null;
private boolean neighbors; private boolean neighbors = DEFAULT_NEIGHBORS;
private static final Builder PROTOTYPE = new Builder(null); private static final Builder PROTOTYPE = new Builder(null);
@ -114,7 +119,7 @@ public class GeohashCellQuery {
public Builder(String field, String geohash, boolean neighbors) { public Builder(String field, String geohash, boolean neighbors) {
super(); super();
this.field = field; this.fieldName = field;
this.geohash = geohash; this.geohash = geohash;
this.neighbors = neighbors; this.neighbors = neighbors;
} }
@ -134,11 +139,19 @@ public class GeohashCellQuery {
return this; return this;
} }
public String geohash() {
return geohash;
}
public Builder precision(int levels) { public Builder precision(int levels) {
this.levels = levels; this.levels = levels;
return this; return this;
} }
public Integer precision() {
return levels;
}
public Builder precision(String precision) { public Builder precision(String precision) {
double meters = DistanceUnit.parse(precision, DistanceUnit.DEFAULT, DistanceUnit.METERS); double meters = DistanceUnit.parse(precision, DistanceUnit.DEFAULT, DistanceUnit.METERS);
return precision(GeoUtils.geoHashLevelsForPrecision(meters)); return precision(GeoUtils.geoHashLevelsForPrecision(meters));
@ -149,32 +162,123 @@ public class GeohashCellQuery {
return this; return this;
} }
public Builder field(String field) { public boolean neighbors() {
this.field = field; return neighbors;
}
public Builder fieldName(String fieldName) {
this.fieldName = fieldName;
return this; return this;
} }
public String fieldName() {
return fieldName;
}
@Override
public QueryValidationException validate() {
QueryValidationException errors = null;
if (fieldName == null) {
errors = QueryValidationException.addValidationError(NAME, "fieldName must not be null", errors);
}
if (geohash == null) {
errors = QueryValidationException.addValidationError(NAME, "geohash or point must be defined", errors);
}
if (levels != null && levels <= 0) {
errors = QueryValidationException.addValidationError(NAME, "precision must be greater than 0. Found [" + levels + "]",
errors);
}
return errors;
}
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(context, "failed to parse [{}] query. missing [{}] field [{}]", NAME,
GeoPointFieldMapper.CONTENT_TYPE, fieldName);
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(context, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName);
}
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
if (!geoFieldType.isGeohashPrefixEnabled()) {
throw new QueryShardException(context, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME,
fieldName);
}
if (levels != null) {
int len = Math.min(levels, geohash.length());
geohash = geohash.substring(0, len);
}
Query query;
if (neighbors) {
query = create(context, geoFieldType, geohash, GeoHashUtils.addNeighbors(geohash, new ArrayList<CharSequence>(8)));
} else {
query = create(context, geoFieldType, geohash, null);
}
return query;
}
@Override @Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException { protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME); builder.startObject(NAME);
if (neighbors) { builder.field(NEIGHBORS_FIELD.getPreferredName(), neighbors);
builder.field(NEIGHBORS, neighbors); if (levels != null) {
builder.field(PRECISION_FIELD.getPreferredName(), levels);
} }
if(levels > 0) { builder.field(fieldName, geohash);
builder.field(PRECISION, levels);
}
builder.field(field, geohash);
printBoostAndQueryName(builder); printBoostAndQueryName(builder);
builder.endObject(); builder.endObject();
} }
@Override
protected Builder doReadFrom(StreamInput in) throws IOException {
String field = in.readString();
String geohash = in.readString();
Builder builder = new Builder(field, geohash);
if (in.readBoolean()) {
builder.precision(in.readVInt());
}
builder.neighbors(in.readBoolean());
return builder;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeString(fieldName);
out.writeString(geohash);
boolean hasLevels = levels != null;
out.writeBoolean(hasLevels);
if (hasLevels) {
out.writeVInt(levels);
}
out.writeBoolean(neighbors);
}
@Override
protected boolean doEquals(Builder other) {
return Objects.equals(fieldName, other.fieldName)
&& Objects.equals(geohash, other.geohash)
&& Objects.equals(levels, other.levels)
&& Objects.equals(neighbors, other.neighbors);
}
@Override
protected int doHashCode() {
return Objects.hash(fieldName, geohash, levels, neighbors);
}
@Override @Override
public String getWriteableName() { public String getWriteableName() {
return NAME; return NAME;
} }
} }
public static class Parser extends BaseQueryParserTemp { public static class Parser extends BaseQueryParser<Builder> {
@Inject @Inject
public Parser() { public Parser() {
@ -186,16 +290,15 @@ public class GeohashCellQuery {
} }
@Override @Override
public Query parse(QueryShardContext context) throws IOException, QueryParsingException { public Builder fromXContent(QueryParseContext parseContext) throws IOException, QueryParsingException {
QueryParseContext parseContext = context.parseContext();
XContentParser parser = parseContext.parser(); XContentParser parser = parseContext.parser();
String fieldName = null; String fieldName = null;
String geohash = null; String geohash = null;
int levels = -1; Integer levels = null;
boolean neighbors = false; Boolean neighbors = null;
String queryName = null; String queryName = null;
float boost = AbstractQueryBuilder.DEFAULT_BOOST; Float boost = null;
XContentParser.Token token; XContentParser.Token token;
if ((token = parser.currentToken()) != Token.START_OBJECT) { if ((token = parser.currentToken()) != Token.START_OBJECT) {
@ -208,30 +311,31 @@ public class GeohashCellQuery {
if (parseContext.isDeprecatedSetting(field)) { if (parseContext.isDeprecatedSetting(field)) {
// skip // skip
} else if (PRECISION.equals(field)) { } else if (parseContext.parseFieldMatcher().match(field, PRECISION_FIELD)) {
token = parser.nextToken(); token = parser.nextToken();
if(token == Token.VALUE_NUMBER) { if (token == Token.VALUE_NUMBER) {
levels = parser.intValue(); levels = parser.intValue();
} else if(token == Token.VALUE_STRING) { } else if (token == Token.VALUE_STRING) {
double meters = DistanceUnit.parse(parser.text(), DistanceUnit.DEFAULT, DistanceUnit.METERS); double meters = DistanceUnit.parse(parser.text(), DistanceUnit.DEFAULT, DistanceUnit.METERS);
levels = GeoUtils.geoHashLevelsForPrecision(meters); levels = GeoUtils.geoHashLevelsForPrecision(meters);
} }
} else if (NEIGHBORS.equals(field)) { } else if (parseContext.parseFieldMatcher().match(field, NEIGHBORS_FIELD)) {
parser.nextToken(); parser.nextToken();
neighbors = parser.booleanValue(); neighbors = parser.booleanValue();
} else if ("_name".equals(field)) { } else if (parseContext.parseFieldMatcher().match(field, AbstractQueryBuilder.NAME_FIELD)) {
parser.nextToken(); parser.nextToken();
queryName = parser.text(); queryName = parser.text();
} else if ("boost".equals(field)) { } else if (parseContext.parseFieldMatcher().match(field, AbstractQueryBuilder.BOOST_FIELD)) {
parser.nextToken(); parser.nextToken();
boost = parser.floatValue(); boost = parser.floatValue();
} else { } else {
fieldName = field; fieldName = field;
token = parser.nextToken(); token = parser.nextToken();
if(token == Token.VALUE_STRING) { if (token == Token.VALUE_STRING) {
// A string indicates either a geohash or a lat/lon string // A string indicates either a geohash or a lat/lon
// string
String location = parser.text(); String location = parser.text();
if(location.indexOf(",")>0) { if (location.indexOf(",") > 0) {
geohash = GeoUtils.parseGeoPoint(parser).geohash(); geohash = GeoUtils.parseGeoPoint(parser).geohash();
} else { } else {
geohash = location; geohash = location;
@ -244,43 +348,21 @@ public class GeohashCellQuery {
throw new ElasticsearchParseException("failed to parse [{}] query. unexpected token [{}]", NAME, token); throw new ElasticsearchParseException("failed to parse [{}] query. unexpected token [{}]", NAME, token);
} }
} }
Builder builder = new Builder(fieldName);
if (geohash == null) { builder.geohash(geohash);
throw new QueryParsingException(parseContext, "failed to parse [{}] query. missing geohash value", NAME); if (levels != null) {
builder.precision(levels);
} }
if (neighbors != null) {
MappedFieldType fieldType = context.fieldMapper(fieldName); builder.neighbors(neighbors);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "failed to parse [{}] query. missing [{}] field [{}]", NAME, GeoPointFieldMapper.CONTENT_TYPE, fieldName);
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryParsingException(parseContext, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName);
}
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
if (!geoFieldType.isGeohashPrefixEnabled()) {
throw new QueryParsingException(parseContext, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME, fieldName);
}
if(levels > 0) {
int len = Math.min(levels, geohash.length());
geohash = geohash.substring(0, len);
}
Query filter;
if (neighbors) {
filter = create(context, geoFieldType, geohash, GeoHashUtils.addNeighbors(geohash, new ArrayList<CharSequence>(8)));
} else {
filter = create(context, geoFieldType, geohash, null);
} }
if (queryName != null) { if (queryName != null) {
context.addNamedQuery(queryName, filter); builder.queryName(queryName);
} }
if (filter != null) { if (boost != null) {
filter.setBoost(boost); builder.boost(boost);
} }
return filter; return builder;
} }
@Override @Override

View File

@ -19,6 +19,8 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
@ -70,26 +72,35 @@ import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.threadpool.ThreadPoolModule; import org.elasticsearch.threadpool.ThreadPoolModule;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import org.junit.*; import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import java.util.Map; import java.util.Map;
import static org.hamcrest.Matchers.*; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public abstract class BaseQueryTestCase<QB extends AbstractQueryBuilder<QB>> extends ESTestCase { // TODO rename this AbstractQueryTestCase public abstract class BaseQueryTestCase<QB extends AbstractQueryBuilder<QB>> extends ESTestCase { // TODO rename this AbstractQueryTestCase
private static final GeohashGenerator geohashGenerator = new GeohashGenerator();
protected static final String STRING_FIELD_NAME = "mapped_string"; protected static final String STRING_FIELD_NAME = "mapped_string";
protected static final String INT_FIELD_NAME = "mapped_int"; protected static final String INT_FIELD_NAME = "mapped_int";
protected static final String DOUBLE_FIELD_NAME = "mapped_double"; protected static final String DOUBLE_FIELD_NAME = "mapped_double";
protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean"; protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean";
protected static final String DATE_FIELD_NAME = "mapped_date"; protected static final String DATE_FIELD_NAME = "mapped_date";
protected static final String OBJECT_FIELD_NAME = "mapped_object"; protected static final String OBJECT_FIELD_NAME = "mapped_object";
protected static final String GEO_FIELD_NAME = "mapped_geo";
protected static final String[] MAPPED_FIELD_NAMES = new String[] { STRING_FIELD_NAME, INT_FIELD_NAME, protected static final String[] MAPPED_FIELD_NAMES = new String[] { STRING_FIELD_NAME, INT_FIELD_NAME,
DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME }; DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_FIELD_NAME };
protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[] { STRING_FIELD_NAME, INT_FIELD_NAME, protected static final String[] MAPPED_LEAF_FIELD_NAMES = new String[] { STRING_FIELD_NAME, INT_FIELD_NAME,
DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME }; DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, GEO_FIELD_NAME };
private static Injector injector; private static Injector injector;
private static IndexQueryParserService queryParserService; private static IndexQueryParserService queryParserService;
@ -169,7 +180,8 @@ public abstract class BaseQueryTestCase<QB extends AbstractQueryBuilder<QB>> ext
DOUBLE_FIELD_NAME, "type=double", DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean", BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date", DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object" OBJECT_FIELD_NAME, "type=object",
GEO_FIELD_NAME, "type=geo_point,lat_lon=true,geohash=true,geohash_prefix=true"
).string()), false, false); ).string()), false, false);
// also add mappings for two inner field in the object field // also add mappings for two inner field in the object field
mapperService.merge(type, new CompressedXContent("{\"properties\":{\""+OBJECT_FIELD_NAME+"\":{\"type\":\"object\"," mapperService.merge(type, new CompressedXContent("{\"properties\":{\""+OBJECT_FIELD_NAME+"\":{\"type\":\"object\","
@ -521,6 +533,18 @@ public abstract class BaseQueryTestCase<QB extends AbstractQueryBuilder<QB>> ext
return (currentTypes.length == 0) ? MetaData.ALL : randomFrom(currentTypes); return (currentTypes.length == 0) ? MetaData.ALL : randomFrom(currentTypes);
} }
public static String randomGeohash(int minPrecision, int maxPrecision) {
return geohashGenerator.ofStringLength(getRandom(), minPrecision, maxPrecision);
}
public static class GeohashGenerator extends CodepointSetGenerator {
private final static char[] ASCII_SET = "0123456789bcdefghjkmnpqrstuvwxyz".toCharArray();
public GeohashGenerator() {
super(ASCII_SET);
}
}
protected static Fuzziness randomFuzziness(String fieldName) { protected static Fuzziness randomFuzziness(String fieldName) {
Fuzziness fuzziness = Fuzziness.AUTO; Fuzziness fuzziness = Fuzziness.AUTO;
switch (fieldName) { switch (fieldName) {

View File

@ -0,0 +1,108 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.query.GeohashCellQuery.Builder;
import org.junit.Test;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;
public class GeohashCellQueryBuilderTests extends BaseQueryTestCase<GeohashCellQuery.Builder> {
@Override
protected Builder doCreateTestQueryBuilder() {
GeohashCellQuery.Builder builder = new Builder(GEO_FIELD_NAME);
builder.geohash(randomGeohash(1, 12));
if (randomBoolean()) {
builder.neighbors(randomBoolean());
}
if (randomBoolean()) {
if (randomBoolean()) {
builder.precision(randomIntBetween(1, 12));
} else {
builder.precision(randomIntBetween(1, 1000000) + randomFrom(DistanceUnit.values()).toString());
}
}
return builder;
}
@Override
protected void doAssertLuceneQuery(Builder queryBuilder, Query query, QueryShardContext context) throws IOException {
if (queryBuilder.neighbors()) {
assertThat(query, instanceOf(TermsQuery.class));
} else {
assertThat(query, instanceOf(TermQuery.class));
TermQuery termQuery = (TermQuery) query;
Term term = termQuery.getTerm();
assertThat(term.field(), equalTo(queryBuilder.fieldName() + GeoPointFieldMapper.Names.GEOHASH_SUFFIX));
String geohash = queryBuilder.geohash();
if (queryBuilder.precision() != null) {
int len = Math.min(queryBuilder.precision(), geohash.length());
geohash = geohash.substring(0, len);
}
assertThat(term.text(), equalTo(geohash));
}
}
@Test
public void testNullField() {
GeohashCellQuery.Builder builder = new Builder(null);
builder.geohash(randomGeohash(1, 12));
QueryValidationException exception = builder.validate();
assertThat(exception, notNullValue());
assertThat(exception.validationErrors(), notNullValue());
assertThat(exception.validationErrors().size(), equalTo(1));
assertThat(exception.validationErrors().get(0), equalTo("[" + GeohashCellQuery.NAME + "] fieldName must not be null"));
}
@Test
public void testNullGeohash() {
GeohashCellQuery.Builder builder = new Builder(GEO_FIELD_NAME);
QueryValidationException exception = builder.validate();
assertThat(exception, notNullValue());
assertThat(exception.validationErrors(), notNullValue());
assertThat(exception.validationErrors().size(), equalTo(1));
assertThat(exception.validationErrors().get(0), equalTo("[" + GeohashCellQuery.NAME + "] geohash or point must be defined"));
}
@Test
public void testInvalidPrecision() {
GeohashCellQuery.Builder builder = new Builder(GEO_FIELD_NAME);
builder.geohash(randomGeohash(1, 12));
builder.precision(-1);
QueryValidationException exception = builder.validate();
assertThat(exception, notNullValue());
assertThat(exception.validationErrors(), notNullValue());
assertThat(exception.validationErrors().size(), equalTo(1));
assertThat(exception.validationErrors().get(0), equalTo("[" + GeohashCellQuery.NAME + "] precision must be greater than 0. Found ["
+ -1 + "]"));
}
}