Refactor geo_point validate* and normalize* options to ignore_malformed and coerce*

For consistency geo_point mapper's validate and normalize options are converted to ignore_malformed and coerced
This commit is contained in:
Nicholas Knize 2015-08-07 15:55:22 -05:00
parent 68307aa9f3
commit b2ba3847f7
21 changed files with 550 additions and 217 deletions

View File

@ -85,6 +85,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
public static final String LON_SUFFIX = "." + LON;
public static final String GEOHASH = "geohash";
public static final String GEOHASH_SUFFIX = "." + GEOHASH;
public static final String IGNORE_MALFORMED = "ignore_malformed";
public static final String COERCE = "coerce";
}
public static class Defaults {
@ -93,10 +95,9 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
public static final boolean ENABLE_GEOHASH = false;
public static final boolean ENABLE_GEOHASH_PREFIX = false;
public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
public static final boolean NORMALIZE_LAT = true;
public static final boolean NORMALIZE_LON = true;
public static final boolean VALIDATE_LAT = true;
public static final boolean VALIDATE_LON = true;
public static final boolean IGNORE_MALFORMED = false;
public static final boolean COERCE = false;
public static final MappedFieldType FIELD_TYPE = new GeoPointFieldType();
@ -215,6 +216,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = geoPointField(name);
final boolean indexCreatedBeforeV2_0 = parserContext.indexVersionCreated().before(Version.V_2_0_0);
parseField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
@ -245,25 +247,42 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(fieldNode.toString()));
}
iterator.remove();
} else if (fieldName.equals("validate")) {
builder.fieldType().setValidateLat(XContentMapValues.nodeBooleanValue(fieldNode));
builder.fieldType().setValidateLon(XContentMapValues.nodeBooleanValue(fieldNode));
} else if (fieldName.equals(Names.IGNORE_MALFORMED)) {
if (builder.fieldType().coerce == false) {
builder.fieldType().ignoreMalformed = XContentMapValues.nodeBooleanValue(fieldNode);
}
iterator.remove();
} else if (fieldName.equals("validate_lon")) {
builder.fieldType().setValidateLon(XContentMapValues.nodeBooleanValue(fieldNode));
} else if (indexCreatedBeforeV2_0 && fieldName.equals("validate")) {
if (builder.fieldType().ignoreMalformed == false) {
builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode);
}
iterator.remove();
} else if (indexCreatedBeforeV2_0 && fieldName.equals("validate_lon")) {
if (builder.fieldType().ignoreMalformed() == false) {
builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode);
}
iterator.remove();
} else if (fieldName.equals("validate_lat")) {
builder.fieldType().setValidateLat(XContentMapValues.nodeBooleanValue(fieldNode));
} else if (indexCreatedBeforeV2_0 && fieldName.equals("validate_lat")) {
if (builder.fieldType().ignoreMalformed == false) {
builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode);
}
iterator.remove();
} else if (fieldName.equals("normalize")) {
builder.fieldType().setNormalizeLat(XContentMapValues.nodeBooleanValue(fieldNode));
builder.fieldType().setNormalizeLon(XContentMapValues.nodeBooleanValue(fieldNode));
} else if (fieldName.equals(Names.COERCE)) {
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode);
if (builder.fieldType().coerce == true) {
builder.fieldType().ignoreMalformed = true;
}
iterator.remove();
} else if (fieldName.equals("normalize_lat")) {
builder.fieldType().setNormalizeLat(XContentMapValues.nodeBooleanValue(fieldNode));
} else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize")) {
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode);
iterator.remove();
} else if (fieldName.equals("normalize_lon")) {
builder.fieldType().setNormalizeLon(XContentMapValues.nodeBooleanValue(fieldNode));
} else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize_lat")) {
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode);
iterator.remove();
} else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize_lon")) {
if (builder.fieldType().coerce == false) {
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode);
}
iterator.remove();
} else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) {
iterator.remove();
@ -281,10 +300,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
private MappedFieldType latFieldType;
private MappedFieldType lonFieldType;
private boolean validateLon = true;
private boolean validateLat = true;
private boolean normalizeLon = true;
private boolean normalizeLat = true;
private boolean ignoreMalformed = false;
private boolean coerce = false;
public GeoPointFieldType() {}
@ -295,10 +312,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
this.geohashPrefixEnabled = ref.geohashPrefixEnabled;
this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified
this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified
this.validateLon = ref.validateLon;
this.validateLat = ref.validateLat;
this.normalizeLon = ref.normalizeLon;
this.normalizeLat = ref.normalizeLat;
this.coerce = ref.coerce;
this.ignoreMalformed = ref.ignoreMalformed;
}
@Override
@ -312,10 +327,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
GeoPointFieldType that = (GeoPointFieldType) o;
return geohashPrecision == that.geohashPrecision &&
geohashPrefixEnabled == that.geohashPrefixEnabled &&
validateLon == that.validateLon &&
validateLat == that.validateLat &&
normalizeLon == that.normalizeLon &&
normalizeLat == that.normalizeLat &&
coerce == that.coerce &&
ignoreMalformed == that.ignoreMalformed &&
java.util.Objects.equals(geohashFieldType, that.geohashFieldType) &&
java.util.Objects.equals(latFieldType, that.latFieldType) &&
java.util.Objects.equals(lonFieldType, that.lonFieldType);
@ -323,7 +336,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
@Override
public int hashCode() {
return java.util.Objects.hash(super.hashCode(), geohashFieldType, geohashPrecision, geohashPrefixEnabled, latFieldType, lonFieldType, validateLon, validateLat, normalizeLon, normalizeLat);
return java.util.Objects.hash(super.hashCode(), geohashFieldType, geohashPrecision, geohashPrefixEnabled, latFieldType,
lonFieldType, coerce, ignoreMalformed);
}
@Override
@ -347,22 +361,10 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
if (isGeohashPrefixEnabled() != other.isGeohashPrefixEnabled()) {
conflicts.add("mapper [" + names().fullName() + "] has different geohash_prefix");
}
if (normalizeLat() != other.normalizeLat()) {
conflicts.add("mapper [" + names().fullName() + "] has different normalize_lat");
}
if (normalizeLon() != other.normalizeLon()) {
conflicts.add("mapper [" + names().fullName() + "] has different normalize_lon");
}
if (isLatLonEnabled() &&
if (isLatLonEnabled() && other.isLatLonEnabled() &&
latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) {
conflicts.add("mapper [" + names().fullName() + "] has different precision_step");
}
if (validateLat() != other.validateLat()) {
conflicts.add("mapper [" + names().fullName() + "] has different validate_lat");
}
if (validateLon() != other.validateLon()) {
conflicts.add("mapper [" + names().fullName() + "] has different validate_lon");
}
}
public boolean isGeohashEnabled() {
@ -406,40 +408,22 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
this.lonFieldType = lonFieldType;
}
public boolean validateLon() {
return validateLon;
public boolean coerce() {
return this.coerce;
}
public void setValidateLon(boolean validateLon) {
public void setCoerce(boolean coerce) {
checkIfFrozen();
this.validateLon = validateLon;
this.coerce = coerce;
}
public boolean validateLat() {
return validateLat;
public boolean ignoreMalformed() {
return this.ignoreMalformed;
}
public void setValidateLat(boolean validateLat) {
public void setIgnoreMalformed(boolean ignoreMalformed) {
checkIfFrozen();
this.validateLat = validateLat;
}
public boolean normalizeLon() {
return normalizeLon;
}
public void setNormalizeLon(boolean normalizeLon) {
checkIfFrozen();
this.normalizeLon = normalizeLon;
}
public boolean normalizeLat() {
return normalizeLat;
}
public void setNormalizeLat(boolean normalizeLat) {
checkIfFrozen();
this.normalizeLat = normalizeLat;
this.ignoreMalformed = ignoreMalformed;
}
@Override
@ -586,7 +570,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
private final StringFieldMapper geohashMapper;
public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,MultiFields multiFields) {
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,
MultiFields multiFields) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, null);
this.pathType = pathType;
this.latMapper = latMapper;
@ -680,21 +665,22 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
}
private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException {
if (fieldType().normalizeLat() || fieldType().normalizeLon()) {
GeoUtils.normalizePoint(point, fieldType().normalizeLat(), fieldType().normalizeLon());
}
if (fieldType().validateLat()) {
if (fieldType().ignoreMalformed == false) {
if (point.lat() > 90.0 || point.lat() < -90.0) {
throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name());
}
}
if (fieldType().validateLon()) {
if (point.lon() > 180.0 || point.lon() < -180) {
throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name());
}
}
if (fieldType().coerce) {
// by setting coerce to false we are assuming all geopoints are already in a valid coordinate system
// thus this extra step can be skipped
// LUCENE WATCH: This will be folded back into Lucene's GeoPointField
GeoUtils.normalizePoint(point, true, true);
}
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
Field field = new Field(fieldType().names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType());
context.doc().add(field);
@ -755,33 +741,11 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) {
builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep());
}
if (includeDefaults || fieldType().validateLat() != Defaults.VALIDATE_LAT || fieldType().validateLon() != Defaults.VALIDATE_LON) {
if (fieldType().validateLat() && fieldType().validateLon()) {
builder.field("validate", true);
} else if (!fieldType().validateLat() && !fieldType().validateLon()) {
builder.field("validate", false);
} else {
if (includeDefaults || fieldType().validateLat() != Defaults.VALIDATE_LAT) {
builder.field("validate_lat", fieldType().validateLat());
}
if (includeDefaults || fieldType().validateLon() != Defaults.VALIDATE_LON) {
builder.field("validate_lon", fieldType().validateLon());
}
}
if (includeDefaults || fieldType().coerce != Defaults.COERCE) {
builder.field(Names.COERCE, fieldType().coerce);
}
if (includeDefaults || fieldType().normalizeLat() != Defaults.NORMALIZE_LAT || fieldType().normalizeLon() != Defaults.NORMALIZE_LON) {
if (fieldType().normalizeLat() && fieldType().normalizeLon()) {
builder.field("normalize", true);
} else if (!fieldType().normalizeLat() && !fieldType().normalizeLon()) {
builder.field("normalize", false);
} else {
if (includeDefaults || fieldType().normalizeLat() != Defaults.NORMALIZE_LAT) {
builder.field("normalize_lat", fieldType().normalizeLat());
}
if (includeDefaults || fieldType().normalizeLon() != Defaults.NORMALIZE_LON) {
builder.field("normalize_lon", fieldType().normalizeLon());
}
}
if (includeDefaults || fieldType().ignoreMalformed != Defaults.IGNORE_MALFORMED) {
builder.field(Names.IGNORE_MALFORMED, fieldType().ignoreMalformed);
}
}
@ -812,5 +776,4 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
return new BytesRef(bytes);
}
}
}

View File

@ -41,6 +41,8 @@ public class GeoBoundingBoxQueryBuilder extends QueryBuilder {
private String queryName;
private String type;
private Boolean coerce;
private Boolean ignoreMalformed;
public GeoBoundingBoxQueryBuilder(String name) {
this.name = name;
@ -134,6 +136,16 @@ public class GeoBoundingBoxQueryBuilder extends QueryBuilder {
return this;
}
public GeoBoundingBoxQueryBuilder coerce(boolean coerce) {
this.coerce = coerce;
return this;
}
public GeoBoundingBoxQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return this;
}
/**
* Sets the type of executing of the geo bounding box. Can be either `memory` or `indexed`. Defaults
* to `memory`.
@ -169,6 +181,12 @@ public class GeoBoundingBoxQueryBuilder extends QueryBuilder {
if (type != null) {
builder.field("type", type);
}
if (coerce != null) {
builder.field("coerce", coerce);
}
if (ignoreMalformed != null) {
builder.field("ignore_malformed", ignoreMalformed);
}
builder.endObject();
}

View File

@ -21,12 +21,12 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
@ -81,7 +81,9 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
String queryName = null;
String currentFieldName = null;
XContentParser.Token token;
boolean normalize = true;
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
boolean coerce = false;
boolean ignoreMalformed = false;
GeoPoint sparse = new GeoPoint();
@ -137,10 +139,15 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
} else if (token.isValue()) {
if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else if ("normalize".equals(currentFieldName)) {
normalize = parser.booleanValue();
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
coerce = parser.booleanValue();
if (coerce == true) {
ignoreMalformed = true;
}
} else if ("type".equals(currentFieldName)) {
type = parser.text();
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
ignoreMalformed = parser.booleanValue();
} else {
throw new QueryParsingException(parseContext, "failed to parse [{}] query. unexpected field [{}]", NAME, currentFieldName);
}
@ -150,8 +157,24 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
final GeoPoint topLeft = sparse.reset(top, left); //just keep the object
final GeoPoint bottomRight = new GeoPoint(bottom, right);
if (normalize) {
// Special case: if the difference bettween the left and right is 360 and the right is greater than the left, we are asking for
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
if (topLeft.lat() > 90.0 || topLeft.lat() < -90.0) {
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", topLeft.lat(), NAME);
}
if (topLeft.lon() > 180.0 || topLeft.lon() < -180) {
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", topLeft.lon(), NAME);
}
if (bottomRight.lat() > 90.0 || bottomRight.lat() < -90.0) {
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", bottomRight.lat(), NAME);
}
if (bottomRight.lon() > 180.0 || bottomRight.lon() < -180) {
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", bottomRight.lon(), NAME);
}
}
if (coerce) {
// Special case: if the difference between the left and right is 360 and the right is greater than the left, we are asking for
// the complete longitude range so need to set longitude to the complete longditude range
boolean completeLonRange = ((right - left) % 360 == 0 && right > left);
GeoUtils.normalizePoint(topLeft, true, !completeLonRange);

View File

@ -44,6 +44,10 @@ public class GeoDistanceQueryBuilder extends QueryBuilder {
private String queryName;
private Boolean coerce;
private Boolean ignoreMalformed;
public GeoDistanceQueryBuilder(String name) {
this.name = name;
}
@ -97,6 +101,16 @@ public class GeoDistanceQueryBuilder extends QueryBuilder {
return this;
}
public GeoDistanceQueryBuilder coerce(boolean coerce) {
this.coerce = coerce;
return this;
}
public GeoDistanceQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return this;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(GeoDistanceQueryParser.NAME);
@ -115,6 +129,12 @@ public class GeoDistanceQueryBuilder extends QueryBuilder {
if (queryName != null) {
builder.field("_name", queryName);
}
if (coerce != null) {
builder.field("coerce", coerce);
}
if (ignoreMalformed != null) {
builder.field("ignore_malformed", ignoreMalformed);
}
builder.endObject();
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
@ -28,7 +29,6 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
@ -71,8 +71,9 @@ public class GeoDistanceQueryParser implements QueryParser {
DistanceUnit unit = DistanceUnit.DEFAULT;
GeoDistance geoDistance = GeoDistance.DEFAULT;
String optimizeBbox = "memory";
boolean normalizeLon = true;
boolean normalizeLat = true;
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
boolean coerce = false;
boolean ignoreMalformed = false;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
@ -125,9 +126,13 @@ public class GeoDistanceQueryParser implements QueryParser {
queryName = parser.text();
} else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) {
optimizeBbox = parser.textOrNull();
} else if ("normalize".equals(currentFieldName)) {
normalizeLat = parser.booleanValue();
normalizeLon = parser.booleanValue();
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
coerce = parser.booleanValue();
if (coerce == true) {
ignoreMalformed = true;
}
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
ignoreMalformed = parser.booleanValue();
} else {
point.resetFromString(parser.text());
fieldName = currentFieldName;
@ -135,6 +140,20 @@ public class GeoDistanceQueryParser implements QueryParser {
}
}
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
if (point.lat() > 90.0 || point.lat() < -90.0) {
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
}
if (point.lon() > 180.0 || point.lon() < -180) {
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
}
}
if (coerce) {
GeoUtils.normalizePoint(point, coerce, coerce);
}
if (vDistance == null) {
throw new QueryParsingException(parseContext, "geo_distance requires 'distance' to be specified");
} else if (vDistance instanceof Number) {
@ -144,10 +163,6 @@ public class GeoDistanceQueryParser implements QueryParser {
}
distance = geoDistance.normalize(distance, DistanceUnit.DEFAULT);
if (normalizeLat || normalizeLon) {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");

View File

@ -46,6 +46,10 @@ public class GeoDistanceRangeQueryBuilder extends QueryBuilder {
private String optimizeBbox;
private Boolean coerce;
private Boolean ignoreMalformed;
public GeoDistanceRangeQueryBuilder(String name) {
this.name = name;
}
@ -125,6 +129,16 @@ public class GeoDistanceRangeQueryBuilder extends QueryBuilder {
return this;
}
public GeoDistanceRangeQueryBuilder coerce(boolean coerce) {
this.coerce = coerce;
return this;
}
public GeoDistanceRangeQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return this;
}
/**
* Sets the filter name for the filter that can be used when searching for matched_filters per hit.
*/
@ -154,6 +168,12 @@ public class GeoDistanceRangeQueryBuilder extends QueryBuilder {
if (queryName != null) {
builder.field("_name", queryName);
}
if (coerce != null) {
builder.field("coerce", coerce);
}
if (ignoreMalformed != null) {
builder.field("ignore_malformed", ignoreMalformed);
}
builder.endObject();
}
}

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
@ -28,7 +29,6 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
@ -73,8 +73,9 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
DistanceUnit unit = DistanceUnit.DEFAULT;
GeoDistance geoDistance = GeoDistance.DEFAULT;
String optimizeBbox = "memory";
boolean normalizeLon = true;
boolean normalizeLat = true;
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
boolean coerce = false;
boolean ignoreMalformed = false;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
@ -155,9 +156,13 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
queryName = parser.text();
} else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) {
optimizeBbox = parser.textOrNull();
} else if ("normalize".equals(currentFieldName)) {
normalizeLat = parser.booleanValue();
normalizeLon = parser.booleanValue();
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
coerce = parser.booleanValue();
if (coerce == true) {
ignoreMalformed = true;
}
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
ignoreMalformed = parser.booleanValue();
} else {
point.resetFromString(parser.text());
fieldName = currentFieldName;
@ -165,6 +170,20 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
}
}
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
if (point.lat() > 90.0 || point.lat() < -90.0) {
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
}
if (point.lon() > 180.0 || point.lon() < -180) {
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
}
}
if (coerce) {
GeoUtils.normalizePoint(point, coerce, coerce);
}
Double from = null;
Double to = null;
if (vFrom != null) {
@ -184,10 +203,6 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
to = geoDistance.normalize(to, DistanceUnit.DEFAULT);
}
if (normalizeLat || normalizeLon) {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
}
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");

View File

@ -38,6 +38,10 @@ public class GeoPolygonQueryBuilder extends QueryBuilder {
private String queryName;
private Boolean coerce;
private Boolean ignoreMalformed;
public GeoPolygonQueryBuilder(String name) {
this.name = name;
}
@ -70,6 +74,16 @@ public class GeoPolygonQueryBuilder extends QueryBuilder {
return this;
}
public GeoPolygonQueryBuilder coerce(boolean coerce) {
this.coerce = coerce;
return this;
}
public GeoPolygonQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return this;
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(GeoPolygonQueryParser.NAME);
@ -85,6 +99,12 @@ public class GeoPolygonQueryBuilder extends QueryBuilder {
if (queryName != null) {
builder.field("_name", queryName);
}
if (coerce != null) {
builder.field("coerce", coerce);
}
if (ignoreMalformed != null) {
builder.field("ignore_malformed", ignoreMalformed);
}
builder.endObject();
}

View File

@ -22,13 +22,13 @@ package org.elasticsearch.index.query;
import com.google.common.collect.Lists;
import org.apache.lucene.search.Query;
import org.elasticsearch.Version;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoPolygonQuery;
@ -70,9 +70,9 @@ public class GeoPolygonQueryParser implements QueryParser {
List<GeoPoint> shell = Lists.newArrayList();
boolean normalizeLon = true;
boolean normalizeLat = true;
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
boolean coerce = false;
boolean ignoreMalformed = false;
String queryName = null;
String currentFieldName = null;
XContentParser.Token token;
@ -108,9 +108,13 @@ public class GeoPolygonQueryParser implements QueryParser {
} else if (token.isValue()) {
if ("_name".equals(currentFieldName)) {
queryName = parser.text();
} else if ("normalize".equals(currentFieldName)) {
normalizeLat = parser.booleanValue();
normalizeLon = parser.booleanValue();
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
coerce = parser.booleanValue();
if (coerce == true) {
ignoreMalformed = true;
}
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
ignoreMalformed = parser.booleanValue();
} else {
throw new QueryParsingException(parseContext, "[geo_polygon] query does not support [" + currentFieldName + "]");
}
@ -134,9 +138,21 @@ public class GeoPolygonQueryParser implements QueryParser {
}
}
if (normalizeLat || normalizeLon) {
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
for (GeoPoint point : shell) {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
if (point.lat() > 90.0 || point.lat() < -90.0) {
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
}
if (point.lon() > 180.0 || point.lon() < -180) {
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
}
}
}
if (coerce) {
for (GeoPoint point : shell) {
GeoUtils.normalizePoint(point, coerce, coerce);
}
}

View File

@ -47,6 +47,8 @@ public class GeoDistanceSortBuilder extends SortBuilder {
private String sortMode;
private QueryBuilder nestedFilter;
private String nestedPath;
private Boolean coerce;
private Boolean ignoreMalformed;
/**
* Constructs a new distance based sort on a geo point like field.
@ -146,6 +148,16 @@ public class GeoDistanceSortBuilder extends SortBuilder {
return this;
}
public GeoDistanceSortBuilder coerce(boolean coerce) {
this.coerce = coerce;
return this;
}
public GeoDistanceSortBuilder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return this;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject("_geo_distance");
@ -181,6 +193,12 @@ public class GeoDistanceSortBuilder extends SortBuilder {
if (nestedFilter != null) {
builder.field("nested_filter", nestedFilter, params);
}
if (coerce != null) {
builder.field("coerce", coerce);
}
if (ignoreMalformed != null) {
builder.field("ignore_malformed", ignoreMalformed);
}
builder.endObject();
return builder;

View File

@ -29,6 +29,7 @@ import org.apache.lucene.search.SortField;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoDistance.FixedSourceDistance;
import org.elasticsearch.common.geo.GeoPoint;
@ -42,7 +43,6 @@ import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
@ -73,8 +73,9 @@ public class GeoDistanceSortParser implements SortParser {
MultiValueMode sortMode = null;
NestedInnerQueryParseSupport nestedHelper = null;
boolean normalizeLon = true;
boolean normalizeLat = true;
final boolean indexCreatedBeforeV2_0 = context.queryParserService().getIndexCreatedVersion().before(Version.V_2_0_0);
boolean coerce = false;
boolean ignoreMalformed = false;
XContentParser.Token token;
String currentName = parser.currentName();
@ -107,9 +108,13 @@ public class GeoDistanceSortParser implements SortParser {
unit = DistanceUnit.fromString(parser.text());
} else if (currentName.equals("distance_type") || currentName.equals("distanceType")) {
geoDistance = GeoDistance.fromString(parser.text());
} else if ("normalize".equals(currentName)) {
normalizeLat = parser.booleanValue();
normalizeLon = parser.booleanValue();
} else if ("coerce".equals(currentName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentName))) {
coerce = parser.booleanValue();
if (coerce == true) {
ignoreMalformed = true;
}
} else if ("ignore_malformed".equals(currentName) && coerce == false) {
ignoreMalformed = parser.booleanValue();
} else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) {
sortMode = MultiValueMode.fromString(parser.text());
} else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) {
@ -126,9 +131,21 @@ public class GeoDistanceSortParser implements SortParser {
}
}
if (normalizeLat || normalizeLon) {
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
for (GeoPoint point : geoPoints) {
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
if (point.lat() > 90.0 || point.lat() < -90.0) {
throw new ElasticsearchParseException("illegal latitude value [{}] for [GeoDistanceSort]", point.lat());
}
if (point.lon() > 180.0 || point.lon() < -180) {
throw new ElasticsearchParseException("illegal longitude value [{}] for [GeoDistanceSort]", point.lon());
}
}
}
if (coerce) {
for (GeoPoint point : geoPoints) {
GeoUtils.normalizePoint(point, coerce, coerce);
}
}

View File

@ -18,7 +18,10 @@
*/
package org.elasticsearch.index.mapper.geo;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
@ -26,6 +29,7 @@ import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.VersionUtils;
import org.junit.Test;
import java.util.ArrayList;
@ -138,7 +142,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
public void testNormalizeLatLonValuesDefault() throws Exception {
// default to normalize
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("coerce", true)
.field("ignore_malformed", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
@ -171,7 +176,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
@Test
public void testValidateLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("normalize", false).field("validate", true).endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("coerce", false)
.field("ignore_malformed", false).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
@ -231,7 +237,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
@Test
public void testNoValidateLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("normalize", false).field("validate", false).endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("coerce", false)
.field("ignore_malformed", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
@ -472,30 +479,161 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
}
/**
* Test that expected exceptions are thrown when creating a new index with deprecated options
*/
@Test
public void testOptionDeprecation() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
// test deprecation exceptions on newly created indexes
try {
String validateMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(validateMapping);
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate : true]");
}
try {
String validateMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate_lat", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(validateMapping);
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lat : true]");
}
try {
String validateMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate_lon", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(validateMapping);
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lon : true]");
}
// test deprecated normalize
try {
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("normalize", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(normalizeMapping);
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize : true]");
}
try {
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("normalize_lat", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(normalizeMapping);
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lat : true]");
}
try {
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("normalize_lon", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(normalizeMapping);
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
} catch (MapperParsingException e) {
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lon : true]");
}
}
/**
* Test backward compatibility
*/
@Test
public void testBackwardCompatibleOptions() throws Exception {
// backward compatibility testing
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0,
Version.V_1_7_1)).build();
// validate
DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate", false).endObject().endObject()
.endObject().endObject().string();
parser.parse(mapping);
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate_lat", false).endObject().endObject()
.endObject().endObject().string();
parser.parse(mapping);
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate_lon", false).endObject().endObject()
.endObject().endObject().string();
parser.parse(mapping);
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true"));
// normalize
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("normalize", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(mapping);
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("normalize_lat", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(mapping);
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("normalize_lon", true).endObject().endObject()
.endObject().endObject().string();
parser.parse(mapping);
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true"));
}
@Test
public void testGeoPointMapperMerge() throws Exception {
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate", true).endObject().endObject()
.field("ignore_malformed", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper stage1 = parser.parse(stage1Mapping);
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate", false).endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).field("geohash", true)
.field("ignore_malformed", false).endObject().endObject()
.endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.buildConflicts().length, equalTo(2));
assertThat(mergeResult.buildConflicts().length, equalTo(1));
// todo better way of checking conflict?
assertThat("mapper [point] has different validate_lat", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
assertThat("mapper [point] has different lat_lon", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
// correct mapping and ensure no failures
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("validate", true).field("normalize", true).endObject().endObject()
.field("ignore_malformed", true).endObject().endObject()
.endObject().endObject().string();
stage2 = parser.parse(stage2Mapping);
mergeResult = stage1.merge(stage2.mapping(), false, false);

View File

@ -31,7 +31,7 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase {
@Override
protected int numProperties() {
return 6 + super.numProperties();
return 4 + super.numProperties();
}
@Override
@ -40,11 +40,9 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase {
switch (propNum) {
case 0: gft.setGeohashEnabled(new StringFieldMapper.StringFieldType(), 1, true); break;
case 1: gft.setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType()); break;
case 2: gft.setValidateLon(!gft.validateLon()); break;
case 3: gft.setValidateLat(!gft.validateLat()); break;
case 4: gft.setNormalizeLon(!gft.normalizeLon()); break;
case 5: gft.setNormalizeLat(!gft.normalizeLat()); break;
default: super.modifyProperty(ft, propNum - 6);
case 2: gft.setIgnoreMalformed(!gft.ignoreMalformed()); break;
case 3: gft.setCoerce(!gft.coerce()); break;
default: super.modifyProperty(ft, propNum - 4);
}
}
}

View File

@ -339,34 +339,28 @@ public class GeoUtilsTests extends ESTestCase {
@Test
public void testNormalizePoint_outsideNormalRange_withOptions() {
for (int i = 0; i < 100; i++) {
boolean normLat = randomBoolean();
boolean normLon = randomBoolean();
boolean normalize = randomBoolean();
double normalisedLat = (randomDouble() * 180.0) - 90.0;
double normalisedLon = (randomDouble() * 360.0) - 180.0;
int shiftLat = randomIntBetween(1, 10000);
int shiftLon = randomIntBetween(1, 10000);
double testLat = normalisedLat + (180.0 * shiftLat);
double testLon = normalisedLon + (360.0 * shiftLon);
int shift = randomIntBetween(1, 10000);
double testLat = normalisedLat + (180.0 * shift);
double testLon = normalisedLon + (360.0 * shift);
double expectedLat;
double expectedLon;
if (normLat) {
expectedLat = normalisedLat * (shiftLat % 2 == 0 ? 1 : -1);
} else {
expectedLat = testLat;
}
if (normLon) {
expectedLon = normalisedLon + ((normLat && shiftLat % 2 == 1) ? 180 : 0);
if (normalize) {
expectedLat = normalisedLat * (shift % 2 == 0 ? 1 : -1);
expectedLon = normalisedLon + ((shift % 2 == 1) ? 180 : 0);
if (expectedLon > 180.0) {
expectedLon -= 360;
}
} else {
double shiftValue = normalisedLon > 0 ? -180 : 180;
expectedLon = testLon + ((normLat && shiftLat % 2 == 1) ? shiftValue : 0);
expectedLat = testLat;
expectedLon = testLon;
}
GeoPoint testPoint = new GeoPoint(testLat, testLon);
GeoPoint expectedPoint = new GeoPoint(expectedLat, expectedLon);
GeoUtils.normalizePoint(testPoint, normLat, normLon);
GeoUtils.normalizePoint(testPoint, normalize, normalize);
assertThat("Unexpected Latitude", testPoint.lat(), closeTo(expectedPoint.lat(), MAX_ACCEPTABLE_ERROR));
assertThat("Unexpected Longitude", testPoint.lon(), closeTo(expectedPoint.lon(), MAX_ACCEPTABLE_ERROR));
}

View File

@ -574,7 +574,8 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
"type",
jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "string")
.endObject().startObject("date").field("type", "date").endObject().startObject("num").field("type", "double")
.endObject().startObject("geo").field("type", "geo_point").endObject().endObject().endObject().endObject()));
.endObject().startObject("geo").field("type", "geo_point").field("coerce", true).endObject().endObject()
.endObject().endObject()));
ensureYellow();
int numDocs = 200;
List<IndexRequestBuilder> indexBuilders = new ArrayList<>();

View File

@ -289,50 +289,50 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(
filteredQuery(matchAllQuery(),
geoBoundingBoxQuery("location").topLeft(50, -180).bottomRight(-50, 180))
geoBoundingBoxQuery("location").coerce(true).topLeft(50, -180).bottomRight(-50, 180))
).execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch()
.setQuery(
filteredQuery(matchAllQuery(),
geoBoundingBoxQuery("location").topLeft(50, -180).bottomRight(-50, 180).type("indexed"))
geoBoundingBoxQuery("location").coerce(true).topLeft(50, -180).bottomRight(-50, 180).type("indexed"))
).execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch()
.setQuery(
filteredQuery(matchAllQuery(),
geoBoundingBoxQuery("location").topLeft(90, -180).bottomRight(-90, 180))
geoBoundingBoxQuery("location").coerce(true).topLeft(90, -180).bottomRight(-90, 180))
).execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
searchResponse = client().prepareSearch()
.setQuery(
filteredQuery(matchAllQuery(),
geoBoundingBoxQuery("location").topLeft(90, -180).bottomRight(-90, 180).type("indexed"))
geoBoundingBoxQuery("location").coerce(true).topLeft(90, -180).bottomRight(-90, 180).type("indexed"))
).execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
searchResponse = client().prepareSearch()
.setQuery(
filteredQuery(matchAllQuery(),
geoBoundingBoxQuery("location").topLeft(50, 0).bottomRight(-50, 360))
geoBoundingBoxQuery("location").coerce(true).topLeft(50, 0).bottomRight(-50, 360))
).execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch()
.setQuery(
filteredQuery(matchAllQuery(),
geoBoundingBoxQuery("location").topLeft(50, 0).bottomRight(-50, 360).type("indexed"))
geoBoundingBoxQuery("location").coerce(true).topLeft(50, 0).bottomRight(-50, 360).type("indexed"))
).execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
searchResponse = client().prepareSearch()
.setQuery(
filteredQuery(matchAllQuery(),
geoBoundingBoxQuery("location").topLeft(90, 0).bottomRight(-90, 360))
geoBoundingBoxQuery("location").coerce(true).topLeft(90, 0).bottomRight(-90, 360))
).execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
searchResponse = client().prepareSearch()
.setQuery(
filteredQuery(matchAllQuery(),
geoBoundingBoxQuery("location").topLeft(90, 0).bottomRight(-90, 360).type("indexed"))
geoBoundingBoxQuery("location").coerce(true).topLeft(90, 0).bottomRight(-90, 360).type("indexed"))
).execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
}

View File

@ -221,8 +221,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
public void testDistanceSortingMVFields() throws Exception {
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true)
.startObject("fielddata").field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject()
.endObject().endObject();
.field("ignore_malformed", true).field("coerce", true).startObject("fielddata")
.field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject().endObject().endObject();
assertAcked(prepareCreate("test")
.addMapping("type1", xContentBuilder));
ensureGreen();
@ -233,6 +233,11 @@ public class GeoDistanceIT extends ESIntegTestCase {
.endObject()).execute().actionGet();
client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject()
.field("names", "New York 2")
.startObject("locations").field("lat", 400.7143528).field("lon", 285.9990269).endObject()
.endObject()).execute().actionGet();
client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject()
.field("names", "Times Square", "Tribeca")
.startArray("locations")
// to NY: 5.286 km
@ -242,7 +247,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
.endArray()
.endObject()).execute().actionGet();
client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject()
client().prepareIndex("test", "type1", "4").setSource(jsonBuilder().startObject()
.field("names", "Wall Street", "Soho")
.startArray("locations")
// to NY: 1.055 km
@ -253,7 +258,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
.endObject()).execute().actionGet();
client().prepareIndex("test", "type1", "4").setSource(jsonBuilder().startObject()
client().prepareIndex("test", "type1", "5").setSource(jsonBuilder().startObject()
.field("names", "Greenwich Village", "Brooklyn")
.startArray("locations")
// to NY: 2.029 km
@ -270,70 +275,76 @@ public class GeoDistanceIT extends ESIntegTestCase {
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC))
.execute().actionGet();
assertHitCount(searchResponse, 4);
assertOrderedSearchHits(searchResponse, "1", "2", "3", "4");
assertHitCount(searchResponse, 5);
assertOrderedSearchHits(searchResponse, "1", "2", "3", "4", "5");
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d));
// Order: Asc, Mode: max
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max"))
.execute().actionGet();
assertHitCount(searchResponse, 4);
assertOrderedSearchHits(searchResponse, "1", "3", "2", "4");
assertHitCount(searchResponse, 5);
assertOrderedSearchHits(searchResponse, "1", "2", "4", "3", "5");
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d));
// Order: Desc
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC))
.execute().actionGet();
assertHitCount(searchResponse, 4);
assertOrderedSearchHits(searchResponse, "4", "2", "3", "1");
assertHitCount(searchResponse, 5);
assertOrderedSearchHits(searchResponse, "5", "3", "4", "2", "1");
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
// Order: Desc, Mode: min
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min"))
.execute().actionGet();
assertHitCount(searchResponse, 4);
assertOrderedSearchHits(searchResponse, "4", "3", "2", "1");
assertHitCount(searchResponse, 5);
assertOrderedSearchHits(searchResponse, "5", "4", "3", "2", "1");
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC))
.execute().actionGet();
assertHitCount(searchResponse, 4);
assertOrderedSearchHits(searchResponse, "1", "3", "2", "4");
assertHitCount(searchResponse, 5);
assertOrderedSearchHits(searchResponse, "1", "2", "4", "3", "5");
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1157d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(2874d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(5301d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1157d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(2874d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(5301d, 10d));
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.DESC))
.execute().actionGet();
assertHitCount(searchResponse, 4);
assertOrderedSearchHits(searchResponse, "4", "2", "3", "1");
assertHitCount(searchResponse, 5);
assertOrderedSearchHits(searchResponse, "5", "3", "4", "2", "1");
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(5301.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(2874.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1157.0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
assertFailures(client().prepareSearch("test").setQuery(matchAllQuery())
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("sum")),

View File

@ -44,6 +44,25 @@ Then the following simple query can be executed with a
}
--------------------------------------------------
[float]
==== Query Options
[cols="<,<",options="header",]
|=======================================================================
|Option |Description
|`_name` |Optional name field to identify the filter
|`coerce` |Set to `true` to normalize longitude and latitude values to a
standard -180:180 / -90:90 coordinate system. (default is `false`).
|`ignore_malformed` |Set to `true` to
accept geo points with invalid latitude or longitude (default is `false`).
|`type` |Set to one of `indexed` or `memory` to defines whether this filter will
be executed in memory or indexed. See <<Type,Type>> below for further details
Default is `memory`.
|=======================================================================
[float]
==== Accepted Formats

View File

@ -158,6 +158,19 @@ The following are options allowed on the filter:
sure the `geo_point` type index lat lon in this case), or `none` which
disables bounding box optimization.
`_name`::
Optional name field to identify the query
`coerce`::
Set to `true` to normalize longitude and latitude values to a standard -180:180 / -90:90
coordinate system. (default is `false`).
`ignore_malformed`::
Set to `true` to accept geo points with invalid latitude or
longitude (default is `false`).
[float]
==== geo_point Type

View File

@ -24,7 +24,7 @@ Filters documents that exists within a range from a specific point:
}
--------------------------------------------------
Supports the same point location parameter as the
Supports the same point location parameter and query options as the
<<query-dsl-geo-distance-query,geo_distance>>
filter. And also support the common parameters for range (lt, lte, gt,
gte, from, to, include_upper and include_lower).

View File

@ -26,6 +26,20 @@ points. Here is an example:
}
--------------------------------------------------
[float]
==== Query Options
[cols="<,<",options="header",]
|=======================================================================
|Option |Description
|`_name` |Optional name field to identify the filter
|`coerce` |Set to `true` to normalize longitude and latitude values to a
standard -180:180 / -90:90 coordinate system. (default is `false`).
|`ignore_malformed` |Set to `true` to accept geo points with invalid latitude or
longitude (default is `false`).
[float]
==== Allowed Formats