mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-29 03:18:26 +00:00
Refactor geo_point validate* and normalize* options to ignore_malformed and coerce*
For consistency geo_point mapper's validate and normalize options are converted to ignore_malformed and coerced
This commit is contained in:
parent
68307aa9f3
commit
b2ba3847f7
@ -85,6 +85,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||||||
public static final String LON_SUFFIX = "." + LON;
|
public static final String LON_SUFFIX = "." + LON;
|
||||||
public static final String GEOHASH = "geohash";
|
public static final String GEOHASH = "geohash";
|
||||||
public static final String GEOHASH_SUFFIX = "." + GEOHASH;
|
public static final String GEOHASH_SUFFIX = "." + GEOHASH;
|
||||||
|
public static final String IGNORE_MALFORMED = "ignore_malformed";
|
||||||
|
public static final String COERCE = "coerce";
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Defaults {
|
public static class Defaults {
|
||||||
@ -93,10 +95,9 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||||||
public static final boolean ENABLE_GEOHASH = false;
|
public static final boolean ENABLE_GEOHASH = false;
|
||||||
public static final boolean ENABLE_GEOHASH_PREFIX = false;
|
public static final boolean ENABLE_GEOHASH_PREFIX = false;
|
||||||
public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
|
public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
|
||||||
public static final boolean NORMALIZE_LAT = true;
|
|
||||||
public static final boolean NORMALIZE_LON = true;
|
public static final boolean IGNORE_MALFORMED = false;
|
||||||
public static final boolean VALIDATE_LAT = true;
|
public static final boolean COERCE = false;
|
||||||
public static final boolean VALIDATE_LON = true;
|
|
||||||
|
|
||||||
public static final MappedFieldType FIELD_TYPE = new GeoPointFieldType();
|
public static final MappedFieldType FIELD_TYPE = new GeoPointFieldType();
|
||||||
|
|
||||||
@ -215,6 +216,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||||||
@Override
|
@Override
|
||||||
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||||
Builder builder = geoPointField(name);
|
Builder builder = geoPointField(name);
|
||||||
|
final boolean indexCreatedBeforeV2_0 = parserContext.indexVersionCreated().before(Version.V_2_0_0);
|
||||||
parseField(builder, name, node, parserContext);
|
parseField(builder, name, node, parserContext);
|
||||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||||
Map.Entry<String, Object> entry = iterator.next();
|
Map.Entry<String, Object> entry = iterator.next();
|
||||||
@ -245,25 +247,42 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||||||
builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(fieldNode.toString()));
|
builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(fieldNode.toString()));
|
||||||
}
|
}
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (fieldName.equals("validate")) {
|
} else if (fieldName.equals(Names.IGNORE_MALFORMED)) {
|
||||||
builder.fieldType().setValidateLat(XContentMapValues.nodeBooleanValue(fieldNode));
|
if (builder.fieldType().coerce == false) {
|
||||||
builder.fieldType().setValidateLon(XContentMapValues.nodeBooleanValue(fieldNode));
|
builder.fieldType().ignoreMalformed = XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
|
}
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (fieldName.equals("validate_lon")) {
|
} else if (indexCreatedBeforeV2_0 && fieldName.equals("validate")) {
|
||||||
builder.fieldType().setValidateLon(XContentMapValues.nodeBooleanValue(fieldNode));
|
if (builder.fieldType().ignoreMalformed == false) {
|
||||||
|
builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
|
}
|
||||||
|
iterator.remove();
|
||||||
|
} else if (indexCreatedBeforeV2_0 && fieldName.equals("validate_lon")) {
|
||||||
|
if (builder.fieldType().ignoreMalformed() == false) {
|
||||||
|
builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
|
}
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (fieldName.equals("validate_lat")) {
|
} else if (indexCreatedBeforeV2_0 && fieldName.equals("validate_lat")) {
|
||||||
builder.fieldType().setValidateLat(XContentMapValues.nodeBooleanValue(fieldNode));
|
if (builder.fieldType().ignoreMalformed == false) {
|
||||||
|
builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
|
}
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (fieldName.equals("normalize")) {
|
} else if (fieldName.equals(Names.COERCE)) {
|
||||||
builder.fieldType().setNormalizeLat(XContentMapValues.nodeBooleanValue(fieldNode));
|
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
builder.fieldType().setNormalizeLon(XContentMapValues.nodeBooleanValue(fieldNode));
|
if (builder.fieldType().coerce == true) {
|
||||||
|
builder.fieldType().ignoreMalformed = true;
|
||||||
|
}
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (fieldName.equals("normalize_lat")) {
|
} else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize")) {
|
||||||
builder.fieldType().setNormalizeLat(XContentMapValues.nodeBooleanValue(fieldNode));
|
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (fieldName.equals("normalize_lon")) {
|
} else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize_lat")) {
|
||||||
builder.fieldType().setNormalizeLon(XContentMapValues.nodeBooleanValue(fieldNode));
|
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
|
iterator.remove();
|
||||||
|
} else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize_lon")) {
|
||||||
|
if (builder.fieldType().coerce == false) {
|
||||||
|
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode);
|
||||||
|
}
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
} else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) {
|
} else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) {
|
||||||
iterator.remove();
|
iterator.remove();
|
||||||
@ -281,10 +300,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||||||
|
|
||||||
private MappedFieldType latFieldType;
|
private MappedFieldType latFieldType;
|
||||||
private MappedFieldType lonFieldType;
|
private MappedFieldType lonFieldType;
|
||||||
private boolean validateLon = true;
|
private boolean ignoreMalformed = false;
|
||||||
private boolean validateLat = true;
|
private boolean coerce = false;
|
||||||
private boolean normalizeLon = true;
|
|
||||||
private boolean normalizeLat = true;
|
|
||||||
|
|
||||||
public GeoPointFieldType() {}
|
public GeoPointFieldType() {}
|
||||||
|
|
||||||
@ -295,10 +312,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||||||
this.geohashPrefixEnabled = ref.geohashPrefixEnabled;
|
this.geohashPrefixEnabled = ref.geohashPrefixEnabled;
|
||||||
this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified
|
this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified
|
||||||
this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified
|
this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified
|
||||||
this.validateLon = ref.validateLon;
|
this.coerce = ref.coerce;
|
||||||
this.validateLat = ref.validateLat;
|
this.ignoreMalformed = ref.ignoreMalformed;
|
||||||
this.normalizeLon = ref.normalizeLon;
|
|
||||||
this.normalizeLat = ref.normalizeLat;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -312,10 +327,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||||||
GeoPointFieldType that = (GeoPointFieldType) o;
|
GeoPointFieldType that = (GeoPointFieldType) o;
|
||||||
return geohashPrecision == that.geohashPrecision &&
|
return geohashPrecision == that.geohashPrecision &&
|
||||||
geohashPrefixEnabled == that.geohashPrefixEnabled &&
|
geohashPrefixEnabled == that.geohashPrefixEnabled &&
|
||||||
validateLon == that.validateLon &&
|
coerce == that.coerce &&
|
||||||
validateLat == that.validateLat &&
|
ignoreMalformed == that.ignoreMalformed &&
|
||||||
normalizeLon == that.normalizeLon &&
|
|
||||||
normalizeLat == that.normalizeLat &&
|
|
||||||
java.util.Objects.equals(geohashFieldType, that.geohashFieldType) &&
|
java.util.Objects.equals(geohashFieldType, that.geohashFieldType) &&
|
||||||
java.util.Objects.equals(latFieldType, that.latFieldType) &&
|
java.util.Objects.equals(latFieldType, that.latFieldType) &&
|
||||||
java.util.Objects.equals(lonFieldType, that.lonFieldType);
|
java.util.Objects.equals(lonFieldType, that.lonFieldType);
|
||||||
@ -323,7 +336,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return java.util.Objects.hash(super.hashCode(), geohashFieldType, geohashPrecision, geohashPrefixEnabled, latFieldType, lonFieldType, validateLon, validateLat, normalizeLon, normalizeLat);
|
return java.util.Objects.hash(super.hashCode(), geohashFieldType, geohashPrecision, geohashPrefixEnabled, latFieldType,
|
||||||
|
lonFieldType, coerce, ignoreMalformed);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -347,22 +361,10 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||||||
if (isGeohashPrefixEnabled() != other.isGeohashPrefixEnabled()) {
|
if (isGeohashPrefixEnabled() != other.isGeohashPrefixEnabled()) {
|
||||||
conflicts.add("mapper [" + names().fullName() + "] has different geohash_prefix");
|
conflicts.add("mapper [" + names().fullName() + "] has different geohash_prefix");
|
||||||
}
|
}
|
||||||
if (normalizeLat() != other.normalizeLat()) {
|
if (isLatLonEnabled() && other.isLatLonEnabled() &&
|
||||||
conflicts.add("mapper [" + names().fullName() + "] has different normalize_lat");
|
|
||||||
}
|
|
||||||
if (normalizeLon() != other.normalizeLon()) {
|
|
||||||
conflicts.add("mapper [" + names().fullName() + "] has different normalize_lon");
|
|
||||||
}
|
|
||||||
if (isLatLonEnabled() &&
|
|
||||||
latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) {
|
latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) {
|
||||||
conflicts.add("mapper [" + names().fullName() + "] has different precision_step");
|
conflicts.add("mapper [" + names().fullName() + "] has different precision_step");
|
||||||
}
|
}
|
||||||
if (validateLat() != other.validateLat()) {
|
|
||||||
conflicts.add("mapper [" + names().fullName() + "] has different validate_lat");
|
|
||||||
}
|
|
||||||
if (validateLon() != other.validateLon()) {
|
|
||||||
conflicts.add("mapper [" + names().fullName() + "] has different validate_lon");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean isGeohashEnabled() {
|
public boolean isGeohashEnabled() {
|
||||||
@ -406,40 +408,22 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||||||
this.lonFieldType = lonFieldType;
|
this.lonFieldType = lonFieldType;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean validateLon() {
|
public boolean coerce() {
|
||||||
return validateLon;
|
return this.coerce;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setValidateLon(boolean validateLon) {
|
public void setCoerce(boolean coerce) {
|
||||||
checkIfFrozen();
|
checkIfFrozen();
|
||||||
this.validateLon = validateLon;
|
this.coerce = coerce;
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean validateLat() {
|
public boolean ignoreMalformed() {
|
||||||
return validateLat;
|
return this.ignoreMalformed;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setValidateLat(boolean validateLat) {
|
public void setIgnoreMalformed(boolean ignoreMalformed) {
|
||||||
checkIfFrozen();
|
checkIfFrozen();
|
||||||
this.validateLat = validateLat;
|
this.ignoreMalformed = ignoreMalformed;
|
||||||
}
|
|
||||||
|
|
||||||
public boolean normalizeLon() {
|
|
||||||
return normalizeLon;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setNormalizeLon(boolean normalizeLon) {
|
|
||||||
checkIfFrozen();
|
|
||||||
this.normalizeLon = normalizeLon;
|
|
||||||
}
|
|
||||||
|
|
||||||
public boolean normalizeLat() {
|
|
||||||
return normalizeLat;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setNormalizeLat(boolean normalizeLat) {
|
|
||||||
checkIfFrozen();
|
|
||||||
this.normalizeLat = normalizeLat;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -586,7 +570,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||||||
private final StringFieldMapper geohashMapper;
|
private final StringFieldMapper geohashMapper;
|
||||||
|
|
||||||
public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
|
public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
|
||||||
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,MultiFields multiFields) {
|
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,
|
||||||
|
MultiFields multiFields) {
|
||||||
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, null);
|
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, null);
|
||||||
this.pathType = pathType;
|
this.pathType = pathType;
|
||||||
this.latMapper = latMapper;
|
this.latMapper = latMapper;
|
||||||
@ -680,21 +665,22 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException {
|
private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException {
|
||||||
if (fieldType().normalizeLat() || fieldType().normalizeLon()) {
|
if (fieldType().ignoreMalformed == false) {
|
||||||
GeoUtils.normalizePoint(point, fieldType().normalizeLat(), fieldType().normalizeLon());
|
|
||||||
}
|
|
||||||
|
|
||||||
if (fieldType().validateLat()) {
|
|
||||||
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||||
throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name());
|
throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name());
|
||||||
}
|
}
|
||||||
}
|
|
||||||
if (fieldType().validateLon()) {
|
|
||||||
if (point.lon() > 180.0 || point.lon() < -180) {
|
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||||
throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name());
|
throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (fieldType().coerce) {
|
||||||
|
// by setting coerce to false we are assuming all geopoints are already in a valid coordinate system
|
||||||
|
// thus this extra step can be skipped
|
||||||
|
// LUCENE WATCH: This will be folded back into Lucene's GeoPointField
|
||||||
|
GeoUtils.normalizePoint(point, true, true);
|
||||||
|
}
|
||||||
|
|
||||||
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
|
||||||
Field field = new Field(fieldType().names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType());
|
Field field = new Field(fieldType().names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType());
|
||||||
context.doc().add(field);
|
context.doc().add(field);
|
||||||
@ -755,33 +741,11 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||||||
if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) {
|
if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) {
|
||||||
builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep());
|
builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep());
|
||||||
}
|
}
|
||||||
if (includeDefaults || fieldType().validateLat() != Defaults.VALIDATE_LAT || fieldType().validateLon() != Defaults.VALIDATE_LON) {
|
if (includeDefaults || fieldType().coerce != Defaults.COERCE) {
|
||||||
if (fieldType().validateLat() && fieldType().validateLon()) {
|
builder.field(Names.COERCE, fieldType().coerce);
|
||||||
builder.field("validate", true);
|
|
||||||
} else if (!fieldType().validateLat() && !fieldType().validateLon()) {
|
|
||||||
builder.field("validate", false);
|
|
||||||
} else {
|
|
||||||
if (includeDefaults || fieldType().validateLat() != Defaults.VALIDATE_LAT) {
|
|
||||||
builder.field("validate_lat", fieldType().validateLat());
|
|
||||||
}
|
|
||||||
if (includeDefaults || fieldType().validateLon() != Defaults.VALIDATE_LON) {
|
|
||||||
builder.field("validate_lon", fieldType().validateLon());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (includeDefaults || fieldType().normalizeLat() != Defaults.NORMALIZE_LAT || fieldType().normalizeLon() != Defaults.NORMALIZE_LON) {
|
if (includeDefaults || fieldType().ignoreMalformed != Defaults.IGNORE_MALFORMED) {
|
||||||
if (fieldType().normalizeLat() && fieldType().normalizeLon()) {
|
builder.field(Names.IGNORE_MALFORMED, fieldType().ignoreMalformed);
|
||||||
builder.field("normalize", true);
|
|
||||||
} else if (!fieldType().normalizeLat() && !fieldType().normalizeLon()) {
|
|
||||||
builder.field("normalize", false);
|
|
||||||
} else {
|
|
||||||
if (includeDefaults || fieldType().normalizeLat() != Defaults.NORMALIZE_LAT) {
|
|
||||||
builder.field("normalize_lat", fieldType().normalizeLat());
|
|
||||||
}
|
|
||||||
if (includeDefaults || fieldType().normalizeLon() != Defaults.NORMALIZE_LON) {
|
|
||||||
builder.field("normalize_lon", fieldType().normalizeLon());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -812,5 +776,4 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||||||
return new BytesRef(bytes);
|
return new BytesRef(bytes);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -41,6 +41,8 @@ public class GeoBoundingBoxQueryBuilder extends QueryBuilder {
|
|||||||
|
|
||||||
private String queryName;
|
private String queryName;
|
||||||
private String type;
|
private String type;
|
||||||
|
private Boolean coerce;
|
||||||
|
private Boolean ignoreMalformed;
|
||||||
|
|
||||||
public GeoBoundingBoxQueryBuilder(String name) {
|
public GeoBoundingBoxQueryBuilder(String name) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
@ -134,6 +136,16 @@ public class GeoBoundingBoxQueryBuilder extends QueryBuilder {
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public GeoBoundingBoxQueryBuilder coerce(boolean coerce) {
|
||||||
|
this.coerce = coerce;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public GeoBoundingBoxQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
|
||||||
|
this.ignoreMalformed = ignoreMalformed;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the type of executing of the geo bounding box. Can be either `memory` or `indexed`. Defaults
|
* Sets the type of executing of the geo bounding box. Can be either `memory` or `indexed`. Defaults
|
||||||
* to `memory`.
|
* to `memory`.
|
||||||
@ -169,6 +181,12 @@ public class GeoBoundingBoxQueryBuilder extends QueryBuilder {
|
|||||||
if (type != null) {
|
if (type != null) {
|
||||||
builder.field("type", type);
|
builder.field("type", type);
|
||||||
}
|
}
|
||||||
|
if (coerce != null) {
|
||||||
|
builder.field("coerce", coerce);
|
||||||
|
}
|
||||||
|
if (ignoreMalformed != null) {
|
||||||
|
builder.field("ignore_malformed", ignoreMalformed);
|
||||||
|
}
|
||||||
|
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
|
@ -21,12 +21,12 @@ package org.elasticsearch.index.query;
|
|||||||
|
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
import org.elasticsearch.common.geo.GeoUtils;
|
import org.elasticsearch.common.geo.GeoUtils;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||||
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
|
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
|
||||||
@ -81,7 +81,9 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
|||||||
String queryName = null;
|
String queryName = null;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
boolean normalize = true;
|
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
|
||||||
|
boolean coerce = false;
|
||||||
|
boolean ignoreMalformed = false;
|
||||||
|
|
||||||
GeoPoint sparse = new GeoPoint();
|
GeoPoint sparse = new GeoPoint();
|
||||||
|
|
||||||
@ -137,10 +139,15 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
|||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if ("_name".equals(currentFieldName)) {
|
if ("_name".equals(currentFieldName)) {
|
||||||
queryName = parser.text();
|
queryName = parser.text();
|
||||||
} else if ("normalize".equals(currentFieldName)) {
|
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
|
||||||
normalize = parser.booleanValue();
|
coerce = parser.booleanValue();
|
||||||
|
if (coerce == true) {
|
||||||
|
ignoreMalformed = true;
|
||||||
|
}
|
||||||
} else if ("type".equals(currentFieldName)) {
|
} else if ("type".equals(currentFieldName)) {
|
||||||
type = parser.text();
|
type = parser.text();
|
||||||
|
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
|
||||||
|
ignoreMalformed = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
throw new QueryParsingException(parseContext, "failed to parse [{}] query. unexpected field [{}]", NAME, currentFieldName);
|
throw new QueryParsingException(parseContext, "failed to parse [{}] query. unexpected field [{}]", NAME, currentFieldName);
|
||||||
}
|
}
|
||||||
@ -150,8 +157,24 @@ public class GeoBoundingBoxQueryParser implements QueryParser {
|
|||||||
final GeoPoint topLeft = sparse.reset(top, left); //just keep the object
|
final GeoPoint topLeft = sparse.reset(top, left); //just keep the object
|
||||||
final GeoPoint bottomRight = new GeoPoint(bottom, right);
|
final GeoPoint bottomRight = new GeoPoint(bottom, right);
|
||||||
|
|
||||||
if (normalize) {
|
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||||
// Special case: if the difference bettween the left and right is 360 and the right is greater than the left, we are asking for
|
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||||
|
if (topLeft.lat() > 90.0 || topLeft.lat() < -90.0) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", topLeft.lat(), NAME);
|
||||||
|
}
|
||||||
|
if (topLeft.lon() > 180.0 || topLeft.lon() < -180) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", topLeft.lon(), NAME);
|
||||||
|
}
|
||||||
|
if (bottomRight.lat() > 90.0 || bottomRight.lat() < -90.0) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", bottomRight.lat(), NAME);
|
||||||
|
}
|
||||||
|
if (bottomRight.lon() > 180.0 || bottomRight.lon() < -180) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", bottomRight.lon(), NAME);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (coerce) {
|
||||||
|
// Special case: if the difference between the left and right is 360 and the right is greater than the left, we are asking for
|
||||||
// the complete longitude range so need to set longitude to the complete longditude range
|
// the complete longitude range so need to set longitude to the complete longditude range
|
||||||
boolean completeLonRange = ((right - left) % 360 == 0 && right > left);
|
boolean completeLonRange = ((right - left) % 360 == 0 && right > left);
|
||||||
GeoUtils.normalizePoint(topLeft, true, !completeLonRange);
|
GeoUtils.normalizePoint(topLeft, true, !completeLonRange);
|
||||||
|
@ -44,6 +44,10 @@ public class GeoDistanceQueryBuilder extends QueryBuilder {
|
|||||||
|
|
||||||
private String queryName;
|
private String queryName;
|
||||||
|
|
||||||
|
private Boolean coerce;
|
||||||
|
|
||||||
|
private Boolean ignoreMalformed;
|
||||||
|
|
||||||
public GeoDistanceQueryBuilder(String name) {
|
public GeoDistanceQueryBuilder(String name) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
}
|
}
|
||||||
@ -97,6 +101,16 @@ public class GeoDistanceQueryBuilder extends QueryBuilder {
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public GeoDistanceQueryBuilder coerce(boolean coerce) {
|
||||||
|
this.coerce = coerce;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public GeoDistanceQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
|
||||||
|
this.ignoreMalformed = ignoreMalformed;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(GeoDistanceQueryParser.NAME);
|
builder.startObject(GeoDistanceQueryParser.NAME);
|
||||||
@ -115,6 +129,12 @@ public class GeoDistanceQueryBuilder extends QueryBuilder {
|
|||||||
if (queryName != null) {
|
if (queryName != null) {
|
||||||
builder.field("_name", queryName);
|
builder.field("_name", queryName);
|
||||||
}
|
}
|
||||||
|
if (coerce != null) {
|
||||||
|
builder.field("coerce", coerce);
|
||||||
|
}
|
||||||
|
if (ignoreMalformed != null) {
|
||||||
|
builder.field("ignore_malformed", ignoreMalformed);
|
||||||
|
}
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.geo.GeoDistance;
|
import org.elasticsearch.common.geo.GeoDistance;
|
||||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
@ -28,7 +29,6 @@ import org.elasticsearch.common.inject.Inject;
|
|||||||
import org.elasticsearch.common.unit.DistanceUnit;
|
import org.elasticsearch.common.unit.DistanceUnit;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||||
@ -71,8 +71,9 @@ public class GeoDistanceQueryParser implements QueryParser {
|
|||||||
DistanceUnit unit = DistanceUnit.DEFAULT;
|
DistanceUnit unit = DistanceUnit.DEFAULT;
|
||||||
GeoDistance geoDistance = GeoDistance.DEFAULT;
|
GeoDistance geoDistance = GeoDistance.DEFAULT;
|
||||||
String optimizeBbox = "memory";
|
String optimizeBbox = "memory";
|
||||||
boolean normalizeLon = true;
|
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
|
||||||
boolean normalizeLat = true;
|
boolean coerce = false;
|
||||||
|
boolean ignoreMalformed = false;
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
@ -125,9 +126,13 @@ public class GeoDistanceQueryParser implements QueryParser {
|
|||||||
queryName = parser.text();
|
queryName = parser.text();
|
||||||
} else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) {
|
} else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) {
|
||||||
optimizeBbox = parser.textOrNull();
|
optimizeBbox = parser.textOrNull();
|
||||||
} else if ("normalize".equals(currentFieldName)) {
|
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
|
||||||
normalizeLat = parser.booleanValue();
|
coerce = parser.booleanValue();
|
||||||
normalizeLon = parser.booleanValue();
|
if (coerce == true) {
|
||||||
|
ignoreMalformed = true;
|
||||||
|
}
|
||||||
|
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
|
||||||
|
ignoreMalformed = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
point.resetFromString(parser.text());
|
point.resetFromString(parser.text());
|
||||||
fieldName = currentFieldName;
|
fieldName = currentFieldName;
|
||||||
@ -135,6 +140,20 @@ public class GeoDistanceQueryParser implements QueryParser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||||
|
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||||
|
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
|
||||||
|
}
|
||||||
|
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (coerce) {
|
||||||
|
GeoUtils.normalizePoint(point, coerce, coerce);
|
||||||
|
}
|
||||||
|
|
||||||
if (vDistance == null) {
|
if (vDistance == null) {
|
||||||
throw new QueryParsingException(parseContext, "geo_distance requires 'distance' to be specified");
|
throw new QueryParsingException(parseContext, "geo_distance requires 'distance' to be specified");
|
||||||
} else if (vDistance instanceof Number) {
|
} else if (vDistance instanceof Number) {
|
||||||
@ -144,10 +163,6 @@ public class GeoDistanceQueryParser implements QueryParser {
|
|||||||
}
|
}
|
||||||
distance = geoDistance.normalize(distance, DistanceUnit.DEFAULT);
|
distance = geoDistance.normalize(distance, DistanceUnit.DEFAULT);
|
||||||
|
|
||||||
if (normalizeLat || normalizeLon) {
|
|
||||||
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
|
|
||||||
}
|
|
||||||
|
|
||||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||||
if (fieldType == null) {
|
if (fieldType == null) {
|
||||||
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||||
|
@ -46,6 +46,10 @@ public class GeoDistanceRangeQueryBuilder extends QueryBuilder {
|
|||||||
|
|
||||||
private String optimizeBbox;
|
private String optimizeBbox;
|
||||||
|
|
||||||
|
private Boolean coerce;
|
||||||
|
|
||||||
|
private Boolean ignoreMalformed;
|
||||||
|
|
||||||
public GeoDistanceRangeQueryBuilder(String name) {
|
public GeoDistanceRangeQueryBuilder(String name) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
}
|
}
|
||||||
@ -125,6 +129,16 @@ public class GeoDistanceRangeQueryBuilder extends QueryBuilder {
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public GeoDistanceRangeQueryBuilder coerce(boolean coerce) {
|
||||||
|
this.coerce = coerce;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public GeoDistanceRangeQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
|
||||||
|
this.ignoreMalformed = ignoreMalformed;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the filter name for the filter that can be used when searching for matched_filters per hit.
|
* Sets the filter name for the filter that can be used when searching for matched_filters per hit.
|
||||||
*/
|
*/
|
||||||
@ -154,6 +168,12 @@ public class GeoDistanceRangeQueryBuilder extends QueryBuilder {
|
|||||||
if (queryName != null) {
|
if (queryName != null) {
|
||||||
builder.field("_name", queryName);
|
builder.field("_name", queryName);
|
||||||
}
|
}
|
||||||
|
if (coerce != null) {
|
||||||
|
builder.field("coerce", coerce);
|
||||||
|
}
|
||||||
|
if (ignoreMalformed != null) {
|
||||||
|
builder.field("ignore_malformed", ignoreMalformed);
|
||||||
|
}
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
package org.elasticsearch.index.query;
|
package org.elasticsearch.index.query;
|
||||||
|
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.geo.GeoDistance;
|
import org.elasticsearch.common.geo.GeoDistance;
|
||||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
@ -28,7 +29,6 @@ import org.elasticsearch.common.inject.Inject;
|
|||||||
import org.elasticsearch.common.unit.DistanceUnit;
|
import org.elasticsearch.common.unit.DistanceUnit;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||||
@ -73,8 +73,9 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
|
|||||||
DistanceUnit unit = DistanceUnit.DEFAULT;
|
DistanceUnit unit = DistanceUnit.DEFAULT;
|
||||||
GeoDistance geoDistance = GeoDistance.DEFAULT;
|
GeoDistance geoDistance = GeoDistance.DEFAULT;
|
||||||
String optimizeBbox = "memory";
|
String optimizeBbox = "memory";
|
||||||
boolean normalizeLon = true;
|
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
|
||||||
boolean normalizeLat = true;
|
boolean coerce = false;
|
||||||
|
boolean ignoreMalformed = false;
|
||||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||||
if (token == XContentParser.Token.FIELD_NAME) {
|
if (token == XContentParser.Token.FIELD_NAME) {
|
||||||
currentFieldName = parser.currentName();
|
currentFieldName = parser.currentName();
|
||||||
@ -155,9 +156,13 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
|
|||||||
queryName = parser.text();
|
queryName = parser.text();
|
||||||
} else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) {
|
} else if ("optimize_bbox".equals(currentFieldName) || "optimizeBbox".equals(currentFieldName)) {
|
||||||
optimizeBbox = parser.textOrNull();
|
optimizeBbox = parser.textOrNull();
|
||||||
} else if ("normalize".equals(currentFieldName)) {
|
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
|
||||||
normalizeLat = parser.booleanValue();
|
coerce = parser.booleanValue();
|
||||||
normalizeLon = parser.booleanValue();
|
if (coerce == true) {
|
||||||
|
ignoreMalformed = true;
|
||||||
|
}
|
||||||
|
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
|
||||||
|
ignoreMalformed = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
point.resetFromString(parser.text());
|
point.resetFromString(parser.text());
|
||||||
fieldName = currentFieldName;
|
fieldName = currentFieldName;
|
||||||
@ -165,6 +170,20 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||||
|
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||||
|
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
|
||||||
|
}
|
||||||
|
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (coerce) {
|
||||||
|
GeoUtils.normalizePoint(point, coerce, coerce);
|
||||||
|
}
|
||||||
|
|
||||||
Double from = null;
|
Double from = null;
|
||||||
Double to = null;
|
Double to = null;
|
||||||
if (vFrom != null) {
|
if (vFrom != null) {
|
||||||
@ -184,10 +203,6 @@ public class GeoDistanceRangeQueryParser implements QueryParser {
|
|||||||
to = geoDistance.normalize(to, DistanceUnit.DEFAULT);
|
to = geoDistance.normalize(to, DistanceUnit.DEFAULT);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (normalizeLat || normalizeLon) {
|
|
||||||
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
|
|
||||||
}
|
|
||||||
|
|
||||||
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
MappedFieldType fieldType = parseContext.fieldMapper(fieldName);
|
||||||
if (fieldType == null) {
|
if (fieldType == null) {
|
||||||
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
throw new QueryParsingException(parseContext, "failed to find geo_point field [" + fieldName + "]");
|
||||||
|
@ -38,6 +38,10 @@ public class GeoPolygonQueryBuilder extends QueryBuilder {
|
|||||||
|
|
||||||
private String queryName;
|
private String queryName;
|
||||||
|
|
||||||
|
private Boolean coerce;
|
||||||
|
|
||||||
|
private Boolean ignoreMalformed;
|
||||||
|
|
||||||
public GeoPolygonQueryBuilder(String name) {
|
public GeoPolygonQueryBuilder(String name) {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
}
|
}
|
||||||
@ -70,6 +74,16 @@ public class GeoPolygonQueryBuilder extends QueryBuilder {
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public GeoPolygonQueryBuilder coerce(boolean coerce) {
|
||||||
|
this.coerce = coerce;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public GeoPolygonQueryBuilder ignoreMalformed(boolean ignoreMalformed) {
|
||||||
|
this.ignoreMalformed = ignoreMalformed;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(GeoPolygonQueryParser.NAME);
|
builder.startObject(GeoPolygonQueryParser.NAME);
|
||||||
@ -85,6 +99,12 @@ public class GeoPolygonQueryBuilder extends QueryBuilder {
|
|||||||
if (queryName != null) {
|
if (queryName != null) {
|
||||||
builder.field("_name", queryName);
|
builder.field("_name", queryName);
|
||||||
}
|
}
|
||||||
|
if (coerce != null) {
|
||||||
|
builder.field("coerce", coerce);
|
||||||
|
}
|
||||||
|
if (ignoreMalformed != null) {
|
||||||
|
builder.field("ignore_malformed", ignoreMalformed);
|
||||||
|
}
|
||||||
|
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
|
@ -22,13 +22,13 @@ package org.elasticsearch.index.query;
|
|||||||
import com.google.common.collect.Lists;
|
import com.google.common.collect.Lists;
|
||||||
|
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
import org.elasticsearch.common.geo.GeoUtils;
|
import org.elasticsearch.common.geo.GeoUtils;
|
||||||
import org.elasticsearch.common.inject.Inject;
|
import org.elasticsearch.common.inject.Inject;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||||
import org.elasticsearch.index.search.geo.GeoPolygonQuery;
|
import org.elasticsearch.index.search.geo.GeoPolygonQuery;
|
||||||
@ -70,9 +70,9 @@ public class GeoPolygonQueryParser implements QueryParser {
|
|||||||
|
|
||||||
List<GeoPoint> shell = Lists.newArrayList();
|
List<GeoPoint> shell = Lists.newArrayList();
|
||||||
|
|
||||||
boolean normalizeLon = true;
|
final boolean indexCreatedBeforeV2_0 = parseContext.indexVersionCreated().before(Version.V_2_0_0);
|
||||||
boolean normalizeLat = true;
|
boolean coerce = false;
|
||||||
|
boolean ignoreMalformed = false;
|
||||||
String queryName = null;
|
String queryName = null;
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
@ -108,9 +108,13 @@ public class GeoPolygonQueryParser implements QueryParser {
|
|||||||
} else if (token.isValue()) {
|
} else if (token.isValue()) {
|
||||||
if ("_name".equals(currentFieldName)) {
|
if ("_name".equals(currentFieldName)) {
|
||||||
queryName = parser.text();
|
queryName = parser.text();
|
||||||
} else if ("normalize".equals(currentFieldName)) {
|
} else if ("coerce".equals(currentFieldName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentFieldName))) {
|
||||||
normalizeLat = parser.booleanValue();
|
coerce = parser.booleanValue();
|
||||||
normalizeLon = parser.booleanValue();
|
if (coerce == true) {
|
||||||
|
ignoreMalformed = true;
|
||||||
|
}
|
||||||
|
} else if ("ignore_malformed".equals(currentFieldName) && coerce == false) {
|
||||||
|
ignoreMalformed = parser.booleanValue();
|
||||||
} else {
|
} else {
|
||||||
throw new QueryParsingException(parseContext, "[geo_polygon] query does not support [" + currentFieldName + "]");
|
throw new QueryParsingException(parseContext, "[geo_polygon] query does not support [" + currentFieldName + "]");
|
||||||
}
|
}
|
||||||
@ -134,9 +138,21 @@ public class GeoPolygonQueryParser implements QueryParser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (normalizeLat || normalizeLon) {
|
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||||
|
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||||
for (GeoPoint point : shell) {
|
for (GeoPoint point : shell) {
|
||||||
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
|
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal latitude value [{}] for [{}]", point.lat(), NAME);
|
||||||
|
}
|
||||||
|
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||||
|
throw new QueryParsingException(parseContext, "illegal longitude value [{}] for [{}]", point.lon(), NAME);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (coerce) {
|
||||||
|
for (GeoPoint point : shell) {
|
||||||
|
GeoUtils.normalizePoint(point, coerce, coerce);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -47,6 +47,8 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
|||||||
private String sortMode;
|
private String sortMode;
|
||||||
private QueryBuilder nestedFilter;
|
private QueryBuilder nestedFilter;
|
||||||
private String nestedPath;
|
private String nestedPath;
|
||||||
|
private Boolean coerce;
|
||||||
|
private Boolean ignoreMalformed;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructs a new distance based sort on a geo point like field.
|
* Constructs a new distance based sort on a geo point like field.
|
||||||
@ -146,6 +148,16 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public GeoDistanceSortBuilder coerce(boolean coerce) {
|
||||||
|
this.coerce = coerce;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public GeoDistanceSortBuilder ignoreMalformed(boolean ignoreMalformed) {
|
||||||
|
this.ignoreMalformed = ignoreMalformed;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject("_geo_distance");
|
builder.startObject("_geo_distance");
|
||||||
@ -181,6 +193,12 @@ public class GeoDistanceSortBuilder extends SortBuilder {
|
|||||||
if (nestedFilter != null) {
|
if (nestedFilter != null) {
|
||||||
builder.field("nested_filter", nestedFilter, params);
|
builder.field("nested_filter", nestedFilter, params);
|
||||||
}
|
}
|
||||||
|
if (coerce != null) {
|
||||||
|
builder.field("coerce", coerce);
|
||||||
|
}
|
||||||
|
if (ignoreMalformed != null) {
|
||||||
|
builder.field("ignore_malformed", ignoreMalformed);
|
||||||
|
}
|
||||||
|
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
return builder;
|
return builder;
|
||||||
|
@ -29,6 +29,7 @@ import org.apache.lucene.search.SortField;
|
|||||||
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
import org.apache.lucene.search.join.BitDocIdSetFilter;
|
||||||
import org.apache.lucene.util.BitSet;
|
import org.apache.lucene.util.BitSet;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.common.geo.GeoDistance;
|
import org.elasticsearch.common.geo.GeoDistance;
|
||||||
import org.elasticsearch.common.geo.GeoDistance.FixedSourceDistance;
|
import org.elasticsearch.common.geo.GeoDistance.FixedSourceDistance;
|
||||||
import org.elasticsearch.common.geo.GeoPoint;
|
import org.elasticsearch.common.geo.GeoPoint;
|
||||||
@ -42,7 +43,6 @@ import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
|||||||
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
||||||
import org.elasticsearch.index.fielddata.NumericDoubleValues;
|
import org.elasticsearch.index.fielddata.NumericDoubleValues;
|
||||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||||
import org.elasticsearch.index.mapper.FieldMapper;
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
import org.elasticsearch.index.mapper.object.ObjectMapper;
|
||||||
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
|
import org.elasticsearch.index.query.support.NestedInnerQueryParseSupport;
|
||||||
@ -73,8 +73,9 @@ public class GeoDistanceSortParser implements SortParser {
|
|||||||
MultiValueMode sortMode = null;
|
MultiValueMode sortMode = null;
|
||||||
NestedInnerQueryParseSupport nestedHelper = null;
|
NestedInnerQueryParseSupport nestedHelper = null;
|
||||||
|
|
||||||
boolean normalizeLon = true;
|
final boolean indexCreatedBeforeV2_0 = context.queryParserService().getIndexCreatedVersion().before(Version.V_2_0_0);
|
||||||
boolean normalizeLat = true;
|
boolean coerce = false;
|
||||||
|
boolean ignoreMalformed = false;
|
||||||
|
|
||||||
XContentParser.Token token;
|
XContentParser.Token token;
|
||||||
String currentName = parser.currentName();
|
String currentName = parser.currentName();
|
||||||
@ -107,9 +108,13 @@ public class GeoDistanceSortParser implements SortParser {
|
|||||||
unit = DistanceUnit.fromString(parser.text());
|
unit = DistanceUnit.fromString(parser.text());
|
||||||
} else if (currentName.equals("distance_type") || currentName.equals("distanceType")) {
|
} else if (currentName.equals("distance_type") || currentName.equals("distanceType")) {
|
||||||
geoDistance = GeoDistance.fromString(parser.text());
|
geoDistance = GeoDistance.fromString(parser.text());
|
||||||
} else if ("normalize".equals(currentName)) {
|
} else if ("coerce".equals(currentName) || (indexCreatedBeforeV2_0 && "normalize".equals(currentName))) {
|
||||||
normalizeLat = parser.booleanValue();
|
coerce = parser.booleanValue();
|
||||||
normalizeLon = parser.booleanValue();
|
if (coerce == true) {
|
||||||
|
ignoreMalformed = true;
|
||||||
|
}
|
||||||
|
} else if ("ignore_malformed".equals(currentName) && coerce == false) {
|
||||||
|
ignoreMalformed = parser.booleanValue();
|
||||||
} else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) {
|
} else if ("sort_mode".equals(currentName) || "sortMode".equals(currentName) || "mode".equals(currentName)) {
|
||||||
sortMode = MultiValueMode.fromString(parser.text());
|
sortMode = MultiValueMode.fromString(parser.text());
|
||||||
} else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) {
|
} else if ("nested_path".equals(currentName) || "nestedPath".equals(currentName)) {
|
||||||
@ -126,9 +131,21 @@ public class GeoDistanceSortParser implements SortParser {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (normalizeLat || normalizeLon) {
|
// validation was not available prior to 2.x, so to support bwc percolation queries we only ignore_malformed on 2.x created indexes
|
||||||
|
if (!indexCreatedBeforeV2_0 && !ignoreMalformed) {
|
||||||
for (GeoPoint point : geoPoints) {
|
for (GeoPoint point : geoPoints) {
|
||||||
GeoUtils.normalizePoint(point, normalizeLat, normalizeLon);
|
if (point.lat() > 90.0 || point.lat() < -90.0) {
|
||||||
|
throw new ElasticsearchParseException("illegal latitude value [{}] for [GeoDistanceSort]", point.lat());
|
||||||
|
}
|
||||||
|
if (point.lon() > 180.0 || point.lon() < -180) {
|
||||||
|
throw new ElasticsearchParseException("illegal longitude value [{}] for [GeoDistanceSort]", point.lon());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (coerce) {
|
||||||
|
for (GeoPoint point : geoPoints) {
|
||||||
|
GeoUtils.normalizePoint(point, coerce, coerce);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,7 +18,10 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.mapper.geo;
|
package org.elasticsearch.index.mapper.geo;
|
||||||
|
|
||||||
|
import org.elasticsearch.Version;
|
||||||
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||||
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
import org.elasticsearch.index.mapper.DocumentMapperParser;
|
||||||
@ -26,6 +29,7 @@ import org.elasticsearch.index.mapper.MapperParsingException;
|
|||||||
import org.elasticsearch.index.mapper.MergeResult;
|
import org.elasticsearch.index.mapper.MergeResult;
|
||||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
import org.elasticsearch.test.VersionUtils;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@ -138,7 +142,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
public void testNormalizeLatLonValuesDefault() throws Exception {
|
public void testNormalizeLatLonValuesDefault() throws Exception {
|
||||||
// default to normalize
|
// default to normalize
|
||||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").endObject().endObject()
|
.startObject("properties").startObject("point").field("type", "geo_point").field("coerce", true)
|
||||||
|
.field("ignore_malformed", true).endObject().endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
|
|
||||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
||||||
@ -171,7 +176,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
@Test
|
@Test
|
||||||
public void testValidateLatLonValues() throws Exception {
|
public void testValidateLatLonValues() throws Exception {
|
||||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("normalize", false).field("validate", true).endObject().endObject()
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("coerce", false)
|
||||||
|
.field("ignore_malformed", false).endObject().endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
|
|
||||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
||||||
@ -231,7 +237,8 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
@Test
|
@Test
|
||||||
public void testNoValidateLatLonValues() throws Exception {
|
public void testNoValidateLatLonValues() throws Exception {
|
||||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("normalize", false).field("validate", false).endObject().endObject()
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("coerce", false)
|
||||||
|
.field("ignore_malformed", true).endObject().endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
|
|
||||||
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
||||||
@ -472,30 +479,161 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
|
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test that expected exceptions are thrown when creating a new index with deprecated options
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testOptionDeprecation() throws Exception {
|
||||||
|
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||||
|
// test deprecation exceptions on newly created indexes
|
||||||
|
try {
|
||||||
|
String validateMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("validate", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(validateMapping);
|
||||||
|
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||||
|
} catch (MapperParsingException e) {
|
||||||
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate : true]");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
String validateMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("validate_lat", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(validateMapping);
|
||||||
|
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||||
|
} catch (MapperParsingException e) {
|
||||||
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lat : true]");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
String validateMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("validate_lon", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(validateMapping);
|
||||||
|
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||||
|
} catch (MapperParsingException e) {
|
||||||
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [validate_lon : true]");
|
||||||
|
}
|
||||||
|
|
||||||
|
// test deprecated normalize
|
||||||
|
try {
|
||||||
|
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("normalize", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(normalizeMapping);
|
||||||
|
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||||
|
} catch (MapperParsingException e) {
|
||||||
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize : true]");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("normalize_lat", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(normalizeMapping);
|
||||||
|
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||||
|
} catch (MapperParsingException e) {
|
||||||
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lat : true]");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
String normalizeMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("normalize_lon", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(normalizeMapping);
|
||||||
|
fail("process completed successfully when " + MapperParsingException.class.getName() + " expected");
|
||||||
|
} catch (MapperParsingException e) {
|
||||||
|
assertEquals(e.getMessage(), "Mapping definition for [point] has unsupported parameters: [normalize_lon : true]");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test backward compatibility
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testBackwardCompatibleOptions() throws Exception {
|
||||||
|
// backward compatibility testing
|
||||||
|
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0,
|
||||||
|
Version.V_1_7_1)).build();
|
||||||
|
|
||||||
|
// validate
|
||||||
|
DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser();
|
||||||
|
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("validate", false).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(mapping);
|
||||||
|
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true"));
|
||||||
|
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("validate_lat", false).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(mapping);
|
||||||
|
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true"));
|
||||||
|
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("validate_lon", false).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(mapping);
|
||||||
|
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"ignore_malformed\":true"));
|
||||||
|
|
||||||
|
// normalize
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("normalize", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(mapping);
|
||||||
|
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true"));
|
||||||
|
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("normalize_lat", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(mapping);
|
||||||
|
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true"));
|
||||||
|
|
||||||
|
mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
|
.field("normalize_lon", true).endObject().endObject()
|
||||||
|
.endObject().endObject().string();
|
||||||
|
parser.parse(mapping);
|
||||||
|
assertThat(parser.parse(mapping).mapping().toString(), containsString("\"coerce\":true"));
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGeoPointMapperMerge() throws Exception {
|
public void testGeoPointMapperMerge() throws Exception {
|
||||||
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
.field("validate", true).endObject().endObject()
|
.field("ignore_malformed", true).endObject().endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||||
DocumentMapper stage1 = parser.parse(stage1Mapping);
|
DocumentMapper stage1 = parser.parse(stage1Mapping);
|
||||||
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).field("geohash", true)
|
||||||
.field("validate", false).endObject().endObject()
|
.field("ignore_malformed", false).endObject().endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
DocumentMapper stage2 = parser.parse(stage2Mapping);
|
DocumentMapper stage2 = parser.parse(stage2Mapping);
|
||||||
|
|
||||||
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
|
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
|
||||||
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
assertThat(mergeResult.hasConflicts(), equalTo(true));
|
||||||
assertThat(mergeResult.buildConflicts().length, equalTo(2));
|
assertThat(mergeResult.buildConflicts().length, equalTo(1));
|
||||||
// todo better way of checking conflict?
|
// todo better way of checking conflict?
|
||||||
assertThat("mapper [point] has different validate_lat", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
|
assertThat("mapper [point] has different lat_lon", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
|
||||||
|
|
||||||
// correct mapping and ensure no failures
|
// correct mapping and ensure no failures
|
||||||
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
|
||||||
.field("validate", true).field("normalize", true).endObject().endObject()
|
.field("ignore_malformed", true).endObject().endObject()
|
||||||
.endObject().endObject().string();
|
.endObject().endObject().string();
|
||||||
stage2 = parser.parse(stage2Mapping);
|
stage2 = parser.parse(stage2Mapping);
|
||||||
mergeResult = stage1.merge(stage2.mapping(), false, false);
|
mergeResult = stage1.merge(stage2.mapping(), false, false);
|
||||||
|
@ -31,7 +31,7 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected int numProperties() {
|
protected int numProperties() {
|
||||||
return 6 + super.numProperties();
|
return 4 + super.numProperties();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -40,11 +40,9 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase {
|
|||||||
switch (propNum) {
|
switch (propNum) {
|
||||||
case 0: gft.setGeohashEnabled(new StringFieldMapper.StringFieldType(), 1, true); break;
|
case 0: gft.setGeohashEnabled(new StringFieldMapper.StringFieldType(), 1, true); break;
|
||||||
case 1: gft.setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType()); break;
|
case 1: gft.setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType()); break;
|
||||||
case 2: gft.setValidateLon(!gft.validateLon()); break;
|
case 2: gft.setIgnoreMalformed(!gft.ignoreMalformed()); break;
|
||||||
case 3: gft.setValidateLat(!gft.validateLat()); break;
|
case 3: gft.setCoerce(!gft.coerce()); break;
|
||||||
case 4: gft.setNormalizeLon(!gft.normalizeLon()); break;
|
default: super.modifyProperty(ft, propNum - 4);
|
||||||
case 5: gft.setNormalizeLat(!gft.normalizeLat()); break;
|
|
||||||
default: super.modifyProperty(ft, propNum - 6);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -339,34 +339,28 @@ public class GeoUtilsTests extends ESTestCase {
|
|||||||
@Test
|
@Test
|
||||||
public void testNormalizePoint_outsideNormalRange_withOptions() {
|
public void testNormalizePoint_outsideNormalRange_withOptions() {
|
||||||
for (int i = 0; i < 100; i++) {
|
for (int i = 0; i < 100; i++) {
|
||||||
boolean normLat = randomBoolean();
|
boolean normalize = randomBoolean();
|
||||||
boolean normLon = randomBoolean();
|
|
||||||
double normalisedLat = (randomDouble() * 180.0) - 90.0;
|
double normalisedLat = (randomDouble() * 180.0) - 90.0;
|
||||||
double normalisedLon = (randomDouble() * 360.0) - 180.0;
|
double normalisedLon = (randomDouble() * 360.0) - 180.0;
|
||||||
int shiftLat = randomIntBetween(1, 10000);
|
int shift = randomIntBetween(1, 10000);
|
||||||
int shiftLon = randomIntBetween(1, 10000);
|
double testLat = normalisedLat + (180.0 * shift);
|
||||||
double testLat = normalisedLat + (180.0 * shiftLat);
|
double testLon = normalisedLon + (360.0 * shift);
|
||||||
double testLon = normalisedLon + (360.0 * shiftLon);
|
|
||||||
|
|
||||||
double expectedLat;
|
double expectedLat;
|
||||||
double expectedLon;
|
double expectedLon;
|
||||||
if (normLat) {
|
if (normalize) {
|
||||||
expectedLat = normalisedLat * (shiftLat % 2 == 0 ? 1 : -1);
|
expectedLat = normalisedLat * (shift % 2 == 0 ? 1 : -1);
|
||||||
} else {
|
expectedLon = normalisedLon + ((shift % 2 == 1) ? 180 : 0);
|
||||||
expectedLat = testLat;
|
|
||||||
}
|
|
||||||
if (normLon) {
|
|
||||||
expectedLon = normalisedLon + ((normLat && shiftLat % 2 == 1) ? 180 : 0);
|
|
||||||
if (expectedLon > 180.0) {
|
if (expectedLon > 180.0) {
|
||||||
expectedLon -= 360;
|
expectedLon -= 360;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
double shiftValue = normalisedLon > 0 ? -180 : 180;
|
expectedLat = testLat;
|
||||||
expectedLon = testLon + ((normLat && shiftLat % 2 == 1) ? shiftValue : 0);
|
expectedLon = testLon;
|
||||||
}
|
}
|
||||||
GeoPoint testPoint = new GeoPoint(testLat, testLon);
|
GeoPoint testPoint = new GeoPoint(testLat, testLon);
|
||||||
GeoPoint expectedPoint = new GeoPoint(expectedLat, expectedLon);
|
GeoPoint expectedPoint = new GeoPoint(expectedLat, expectedLon);
|
||||||
GeoUtils.normalizePoint(testPoint, normLat, normLon);
|
GeoUtils.normalizePoint(testPoint, normalize, normalize);
|
||||||
assertThat("Unexpected Latitude", testPoint.lat(), closeTo(expectedPoint.lat(), MAX_ACCEPTABLE_ERROR));
|
assertThat("Unexpected Latitude", testPoint.lat(), closeTo(expectedPoint.lat(), MAX_ACCEPTABLE_ERROR));
|
||||||
assertThat("Unexpected Longitude", testPoint.lon(), closeTo(expectedPoint.lon(), MAX_ACCEPTABLE_ERROR));
|
assertThat("Unexpected Longitude", testPoint.lon(), closeTo(expectedPoint.lon(), MAX_ACCEPTABLE_ERROR));
|
||||||
}
|
}
|
||||||
|
@ -574,7 +574,8 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||||||
"type",
|
"type",
|
||||||
jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "string")
|
jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "string")
|
||||||
.endObject().startObject("date").field("type", "date").endObject().startObject("num").field("type", "double")
|
.endObject().startObject("date").field("type", "date").endObject().startObject("num").field("type", "double")
|
||||||
.endObject().startObject("geo").field("type", "geo_point").endObject().endObject().endObject().endObject()));
|
.endObject().startObject("geo").field("type", "geo_point").field("coerce", true).endObject().endObject()
|
||||||
|
.endObject().endObject()));
|
||||||
ensureYellow();
|
ensureYellow();
|
||||||
int numDocs = 200;
|
int numDocs = 200;
|
||||||
List<IndexRequestBuilder> indexBuilders = new ArrayList<>();
|
List<IndexRequestBuilder> indexBuilders = new ArrayList<>();
|
||||||
|
@ -289,50 +289,50 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
|
|||||||
SearchResponse searchResponse = client().prepareSearch()
|
SearchResponse searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(50, -180).bottomRight(-50, 180))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(50, -180).bottomRight(-50, 180))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(50, -180).bottomRight(-50, 180).type("indexed"))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(50, -180).bottomRight(-50, 180).type("indexed"))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(90, -180).bottomRight(-90, 180))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(90, -180).bottomRight(-90, 180))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(90, -180).bottomRight(-90, 180).type("indexed"))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(90, -180).bottomRight(-90, 180).type("indexed"))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
||||||
|
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(50, 0).bottomRight(-50, 360))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(50, 0).bottomRight(-50, 360))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(50, 0).bottomRight(-50, 360).type("indexed"))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(50, 0).bottomRight(-50, 360).type("indexed"))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(1l));
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(90, 0).bottomRight(-90, 360))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(90, 0).bottomRight(-90, 360))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setQuery(
|
.setQuery(
|
||||||
filteredQuery(matchAllQuery(),
|
filteredQuery(matchAllQuery(),
|
||||||
geoBoundingBoxQuery("location").topLeft(90, 0).bottomRight(-90, 360).type("indexed"))
|
geoBoundingBoxQuery("location").coerce(true).topLeft(90, 0).bottomRight(-90, 360).type("indexed"))
|
||||||
).execute().actionGet();
|
).execute().actionGet();
|
||||||
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
assertThat(searchResponse.getHits().totalHits(), equalTo(2l));
|
||||||
}
|
}
|
||||||
|
@ -221,8 +221,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||||||
public void testDistanceSortingMVFields() throws Exception {
|
public void testDistanceSortingMVFields() throws Exception {
|
||||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||||
.startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true)
|
.startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true)
|
||||||
.startObject("fielddata").field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject()
|
.field("ignore_malformed", true).field("coerce", true).startObject("fielddata")
|
||||||
.endObject().endObject();
|
.field("format", randomNumericFieldDataFormat()).endObject().endObject().endObject().endObject().endObject();
|
||||||
assertAcked(prepareCreate("test")
|
assertAcked(prepareCreate("test")
|
||||||
.addMapping("type1", xContentBuilder));
|
.addMapping("type1", xContentBuilder));
|
||||||
ensureGreen();
|
ensureGreen();
|
||||||
@ -233,6 +233,11 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||||||
.endObject()).execute().actionGet();
|
.endObject()).execute().actionGet();
|
||||||
|
|
||||||
client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject()
|
client().prepareIndex("test", "type1", "2").setSource(jsonBuilder().startObject()
|
||||||
|
.field("names", "New York 2")
|
||||||
|
.startObject("locations").field("lat", 400.7143528).field("lon", 285.9990269).endObject()
|
||||||
|
.endObject()).execute().actionGet();
|
||||||
|
|
||||||
|
client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject()
|
||||||
.field("names", "Times Square", "Tribeca")
|
.field("names", "Times Square", "Tribeca")
|
||||||
.startArray("locations")
|
.startArray("locations")
|
||||||
// to NY: 5.286 km
|
// to NY: 5.286 km
|
||||||
@ -242,7 +247,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||||||
.endArray()
|
.endArray()
|
||||||
.endObject()).execute().actionGet();
|
.endObject()).execute().actionGet();
|
||||||
|
|
||||||
client().prepareIndex("test", "type1", "3").setSource(jsonBuilder().startObject()
|
client().prepareIndex("test", "type1", "4").setSource(jsonBuilder().startObject()
|
||||||
.field("names", "Wall Street", "Soho")
|
.field("names", "Wall Street", "Soho")
|
||||||
.startArray("locations")
|
.startArray("locations")
|
||||||
// to NY: 1.055 km
|
// to NY: 1.055 km
|
||||||
@ -253,7 +258,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||||||
.endObject()).execute().actionGet();
|
.endObject()).execute().actionGet();
|
||||||
|
|
||||||
|
|
||||||
client().prepareIndex("test", "type1", "4").setSource(jsonBuilder().startObject()
|
client().prepareIndex("test", "type1", "5").setSource(jsonBuilder().startObject()
|
||||||
.field("names", "Greenwich Village", "Brooklyn")
|
.field("names", "Greenwich Village", "Brooklyn")
|
||||||
.startArray("locations")
|
.startArray("locations")
|
||||||
// to NY: 2.029 km
|
// to NY: 2.029 km
|
||||||
@ -270,70 +275,76 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC))
|
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertHitCount(searchResponse, 4);
|
assertHitCount(searchResponse, 5);
|
||||||
assertOrderedSearchHits(searchResponse, "1", "2", "3", "4");
|
assertOrderedSearchHits(searchResponse, "1", "2", "3", "4", "5");
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d));
|
||||||
|
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d));
|
||||||
|
|
||||||
// Order: Asc, Mode: max
|
// Order: Asc, Mode: max
|
||||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max"))
|
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.ASC).sortMode("max"))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertHitCount(searchResponse, 4);
|
assertHitCount(searchResponse, 5);
|
||||||
assertOrderedSearchHits(searchResponse, "1", "3", "2", "4");
|
assertOrderedSearchHits(searchResponse, "1", "2", "4", "3", "5");
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d));
|
||||||
|
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d));
|
||||||
|
|
||||||
// Order: Desc
|
// Order: Desc
|
||||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC))
|
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertHitCount(searchResponse, 4);
|
assertHitCount(searchResponse, 5);
|
||||||
assertOrderedSearchHits(searchResponse, "4", "2", "3", "1");
|
assertOrderedSearchHits(searchResponse, "5", "3", "4", "2", "1");
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(8572.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(5286.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1258.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
|
||||||
|
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||||
|
|
||||||
// Order: Desc, Mode: min
|
// Order: Desc, Mode: min
|
||||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min"))
|
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).order(SortOrder.DESC).sortMode("min"))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertHitCount(searchResponse, 4);
|
assertHitCount(searchResponse, 5);
|
||||||
assertOrderedSearchHits(searchResponse, "4", "3", "2", "1");
|
assertOrderedSearchHits(searchResponse, "5", "4", "3", "2", "1");
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(2029.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1055.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(462.1d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
|
||||||
|
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||||
|
|
||||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC))
|
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.ASC))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertHitCount(searchResponse, 4);
|
assertHitCount(searchResponse, 5);
|
||||||
assertOrderedSearchHits(searchResponse, "1", "3", "2", "4");
|
assertOrderedSearchHits(searchResponse, "1", "2", "4", "3", "5");
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(1157d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(2874d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1157d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(5301d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(2874d, 10d));
|
||||||
|
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(5301d, 10d));
|
||||||
|
|
||||||
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
searchResponse = client().prepareSearch("test").setQuery(matchAllQuery())
|
||||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.DESC))
|
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("avg").order(SortOrder.DESC))
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
assertHitCount(searchResponse, 4);
|
assertHitCount(searchResponse, 5);
|
||||||
assertOrderedSearchHits(searchResponse, "4", "2", "3", "1");
|
assertOrderedSearchHits(searchResponse, "5", "3", "4", "2", "1");
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(5301.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(0).sortValues()[0]).doubleValue(), closeTo(5301.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(2874.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(1).sortValues()[0]).doubleValue(), closeTo(2874.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1157.0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(2).sortValues()[0]).doubleValue(), closeTo(1157.0d, 10d));
|
||||||
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
assertThat(((Number) searchResponse.getHits().getAt(3).sortValues()[0]).doubleValue(), closeTo(421.2d, 10d));
|
||||||
|
assertThat(((Number) searchResponse.getHits().getAt(4).sortValues()[0]).doubleValue(), closeTo(0d, 10d));
|
||||||
|
|
||||||
assertFailures(client().prepareSearch("test").setQuery(matchAllQuery())
|
assertFailures(client().prepareSearch("test").setQuery(matchAllQuery())
|
||||||
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("sum")),
|
.addSort(SortBuilders.geoDistanceSort("locations").point(40.7143528, -74.0059731).sortMode("sum")),
|
||||||
|
@ -44,6 +44,25 @@ Then the following simple query can be executed with a
|
|||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
|
[float]
|
||||||
|
==== Query Options
|
||||||
|
|
||||||
|
[cols="<,<",options="header",]
|
||||||
|
|=======================================================================
|
||||||
|
|Option |Description
|
||||||
|
|`_name` |Optional name field to identify the filter
|
||||||
|
|
||||||
|
|`coerce` |Set to `true` to normalize longitude and latitude values to a
|
||||||
|
standard -180:180 / -90:90 coordinate system. (default is `false`).
|
||||||
|
|
||||||
|
|`ignore_malformed` |Set to `true` to
|
||||||
|
accept geo points with invalid latitude or longitude (default is `false`).
|
||||||
|
|
||||||
|
|`type` |Set to one of `indexed` or `memory` to defines whether this filter will
|
||||||
|
be executed in memory or indexed. See <<Type,Type>> below for further details
|
||||||
|
Default is `memory`.
|
||||||
|
|=======================================================================
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
==== Accepted Formats
|
==== Accepted Formats
|
||||||
|
|
||||||
|
@ -158,6 +158,19 @@ The following are options allowed on the filter:
|
|||||||
sure the `geo_point` type index lat lon in this case), or `none` which
|
sure the `geo_point` type index lat lon in this case), or `none` which
|
||||||
disables bounding box optimization.
|
disables bounding box optimization.
|
||||||
|
|
||||||
|
`_name`::
|
||||||
|
|
||||||
|
Optional name field to identify the query
|
||||||
|
|
||||||
|
`coerce`::
|
||||||
|
|
||||||
|
Set to `true` to normalize longitude and latitude values to a standard -180:180 / -90:90
|
||||||
|
coordinate system. (default is `false`).
|
||||||
|
|
||||||
|
`ignore_malformed`::
|
||||||
|
|
||||||
|
Set to `true` to accept geo points with invalid latitude or
|
||||||
|
longitude (default is `false`).
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
==== geo_point Type
|
==== geo_point Type
|
||||||
|
@ -24,7 +24,7 @@ Filters documents that exists within a range from a specific point:
|
|||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
Supports the same point location parameter as the
|
Supports the same point location parameter and query options as the
|
||||||
<<query-dsl-geo-distance-query,geo_distance>>
|
<<query-dsl-geo-distance-query,geo_distance>>
|
||||||
filter. And also support the common parameters for range (lt, lte, gt,
|
filter. And also support the common parameters for range (lt, lte, gt,
|
||||||
gte, from, to, include_upper and include_lower).
|
gte, from, to, include_upper and include_lower).
|
||||||
|
@ -26,6 +26,20 @@ points. Here is an example:
|
|||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
|
[float]
|
||||||
|
==== Query Options
|
||||||
|
|
||||||
|
[cols="<,<",options="header",]
|
||||||
|
|=======================================================================
|
||||||
|
|Option |Description
|
||||||
|
|`_name` |Optional name field to identify the filter
|
||||||
|
|
||||||
|
|`coerce` |Set to `true` to normalize longitude and latitude values to a
|
||||||
|
standard -180:180 / -90:90 coordinate system. (default is `false`).
|
||||||
|
|
||||||
|
|`ignore_malformed` |Set to `true` to accept geo points with invalid latitude or
|
||||||
|
longitude (default is `false`).
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
==== Allowed Formats
|
==== Allowed Formats
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user