Add GeoPointV2 Field Mapping

This commit adds the abstraction layer to GeoPointFieldMapper needed to cut over to Lucene 5.4's new GeoPointField type while maintaining backward compatibility with 'legacy' geo_point indexes.
This commit is contained in:
Nicholas Knize 2015-11-04 13:36:30 -06:00
parent 3cabff9880
commit afaf96732a
20 changed files with 1412 additions and 915 deletions

View File

@ -21,9 +21,7 @@ package org.elasticsearch.index.mapper;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.core.*;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
import org.elasticsearch.index.mapper.internal.*;
import org.elasticsearch.index.mapper.ip.IpFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
@ -92,10 +90,6 @@ public final class MapperBuilders {
return new DoubleFieldMapper.Builder(name);
}
public static GeoPointFieldMapper.Builder geoPointField(String name) {
return new GeoPointFieldMapper.Builder(name);
}
public static GeoShapeFieldMapper.Builder geoShapeField(String name) {
return new GeoShapeFieldMapper.Builder(name);
}

View File

@ -0,0 +1,539 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.geo;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.util.GeoHashUtils;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static org.elasticsearch.index.mapper.MapperBuilders.doubleField;
import static org.elasticsearch.index.mapper.MapperBuilders.stringField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
/**
* GeoPointFieldMapper base class to maintain backward compatibility
*/
public abstract class BaseGeoPointFieldMapper extends FieldMapper implements ArrayValueMapperParser {
public static final String CONTENT_TYPE = "geo_point";
public static class Names {
public static final String LAT = "lat";
public static final String LAT_SUFFIX = "." + LAT;
public static final String LON = "lon";
public static final String LON_SUFFIX = "." + LON;
public static final String GEOHASH = "geohash";
public static final String GEOHASH_SUFFIX = "." + GEOHASH;
public static final String IGNORE_MALFORMED = "ignore_malformed";
}
public static class Defaults {
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
public static final boolean ENABLE_LATLON = false;
public static final boolean ENABLE_GEOHASH = false;
public static final boolean ENABLE_GEOHASH_PREFIX = false;
public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit(false, false);
}
public abstract static class Builder<T extends Builder, Y extends BaseGeoPointFieldMapper> extends FieldMapper.Builder<T, Y> {
protected ContentPath.Type pathType = Defaults.PATH_TYPE;
protected boolean enableLatLon = Defaults.ENABLE_LATLON;
protected Integer precisionStep;
protected boolean enableGeoHash = Defaults.ENABLE_GEOHASH;
protected boolean enableGeoHashPrefix = Defaults.ENABLE_GEOHASH_PREFIX;
protected int geoHashPrecision = Defaults.GEO_HASH_PRECISION;
protected Boolean ignoreMalformed;
public Builder(String name, GeoPointFieldType fieldType) {
super(name, fieldType);
}
@Override
public GeoPointFieldType fieldType() {
return (GeoPointFieldType)fieldType;
}
@Override
public T multiFieldPathType(ContentPath.Type pathType) {
this.pathType = pathType;
return builder;
}
@Override
public T fieldDataSettings(Settings settings) {
this.fieldDataSettings = settings;
return builder;
}
public T enableLatLon(boolean enableLatLon) {
this.enableLatLon = enableLatLon;
return builder;
}
public T precisionStep(int precisionStep) {
this.precisionStep = precisionStep;
return builder;
}
public T enableGeoHash(boolean enableGeoHash) {
this.enableGeoHash = enableGeoHash;
return builder;
}
public T geoHashPrefix(boolean enableGeoHashPrefix) {
this.enableGeoHashPrefix = enableGeoHashPrefix;
return builder;
}
public T geoHashPrecision(int precision) {
this.geoHashPrecision = precision;
return builder;
}
public T ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return builder;
}
protected Explicit<Boolean> ignoreMalformed(BuilderContext context) {
if (ignoreMalformed != null) {
return new Explicit<>(ignoreMalformed, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.ignore_malformed", Defaults.IGNORE_MALFORMED.value()), false);
}
return Defaults.IGNORE_MALFORMED;
}
public abstract Y build(BuilderContext context, String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType,
Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper,
StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed, CopyTo copyTo);
public Y build(Mapper.BuilderContext context) {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
GeoPointFieldType geoPointFieldType = (GeoPointFieldType)fieldType;
DoubleFieldMapper latMapper = null;
DoubleFieldMapper lonMapper = null;
context.path().add(name);
if (enableLatLon) {
NumberFieldMapper.Builder<?, ?> latMapperBuilder = doubleField(Names.LAT).includeInAll(false);
NumberFieldMapper.Builder<?, ?> lonMapperBuilder = doubleField(Names.LON).includeInAll(false);
if (precisionStep != null) {
latMapperBuilder.precisionStep(precisionStep);
lonMapperBuilder.precisionStep(precisionStep);
}
latMapper = (DoubleFieldMapper) latMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context);
lonMapper = (DoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context);
geoPointFieldType.setLatLonEnabled(latMapper.fieldType(), lonMapper.fieldType());
}
StringFieldMapper geoHashMapper = null;
if (enableGeoHash || enableGeoHashPrefix) {
// TODO: possible also implicitly enable geohash if geohash precision is set
geoHashMapper = stringField(Names.GEOHASH).index(true).tokenized(false).includeInAll(false).store(fieldType.stored())
.omitNorms(true).indexOptions(IndexOptions.DOCS).build(context);
geoPointFieldType.setGeoHashEnabled(geoHashMapper.fieldType(), geoHashPrecision, enableGeoHashPrefix);
}
context.path().remove();
context.path().pathType(origPathType);
return build(context, name, fieldType, defaultFieldType, context.indexSettings(), origPathType,
latMapper, lonMapper, geoHashMapper, multiFieldsBuilder.build(this, context), ignoreMalformed(context), copyTo);
}
}
public abstract static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder;
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (parserContext.indexVersionCreated().onOrBefore(Version.CURRENT)) {
builder = new GeoPointFieldMapperLegacy.Builder(name);
} else {
builder = new GeoPointFieldMapper.Builder(name);
}
parseField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("lat_lon")) {
builder.enableLatLon(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("precision_step")) {
builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode));
iterator.remove();
} else if (propName.equals("geohash")) {
builder.enableGeoHash(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("geohash_prefix")) {
builder.geoHashPrefix(XContentMapValues.nodeBooleanValue(propNode));
if (XContentMapValues.nodeBooleanValue(propNode)) {
builder.enableGeoHash(true);
}
iterator.remove();
} else if (propName.equals("geohash_precision")) {
if (propNode instanceof Integer) {
builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(propNode));
} else {
builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(propNode.toString()));
}
iterator.remove();
} else if (propName.equals(Names.IGNORE_MALFORMED)) {
builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
}
}
if (builder instanceof GeoPointFieldMapperLegacy.Builder) {
return GeoPointFieldMapperLegacy.parse((GeoPointFieldMapperLegacy.Builder) builder, node, parserContext);
}
return (GeoPointFieldMapper.Builder) builder;
}
}
public static class GeoPointFieldType extends MappedFieldType {
protected MappedFieldType geoHashFieldType;
protected int geoHashPrecision;
protected boolean geoHashPrefixEnabled;
protected MappedFieldType latFieldType;
protected MappedFieldType lonFieldType;
GeoPointFieldType() {}
GeoPointFieldType(GeoPointFieldType ref) {
super(ref);
this.geoHashFieldType = ref.geoHashFieldType; // copying ref is ok, this can never be modified
this.geoHashPrecision = ref.geoHashPrecision;
this.geoHashPrefixEnabled = ref.geoHashPrefixEnabled;
this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified
this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified
}
@Override
public MappedFieldType clone() {
return new GeoPointFieldType(this);
}
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
GeoPointFieldType that = (GeoPointFieldType) o;
return geoHashPrecision == that.geoHashPrecision &&
geoHashPrefixEnabled == that.geoHashPrefixEnabled &&
java.util.Objects.equals(geoHashFieldType, that.geoHashFieldType) &&
java.util.Objects.equals(latFieldType, that.latFieldType) &&
java.util.Objects.equals(lonFieldType, that.lonFieldType);
}
@Override
public int hashCode() {
return java.util.Objects.hash(super.hashCode(), geoHashFieldType, geoHashPrecision, geoHashPrefixEnabled, latFieldType,
lonFieldType);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
super.checkCompatibility(fieldType, conflicts, strict);
GeoPointFieldType other = (GeoPointFieldType)fieldType;
if (isLatLonEnabled() != other.isLatLonEnabled()) {
conflicts.add("mapper [" + names().fullName() + "] has different [lat_lon]");
}
if (isLatLonEnabled() && other.isLatLonEnabled() &&
latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) {
conflicts.add("mapper [" + names().fullName() + "] has different [precision_step]");
}
if (isGeoHashEnabled() != other.isGeoHashEnabled()) {
conflicts.add("mapper [" + names().fullName() + "] has different [geohash]");
}
if (geoHashPrecision() != other.geoHashPrecision()) {
conflicts.add("mapper [" + names().fullName() + "] has different [geohash_precision]");
}
if (isGeoHashPrefixEnabled() != other.isGeoHashPrefixEnabled()) {
conflicts.add("mapper [" + names().fullName() + "] has different [geohash_prefix]");
}
}
public boolean isGeoHashEnabled() {
return geoHashFieldType != null;
}
public MappedFieldType geoHashFieldType() {
return geoHashFieldType;
}
public int geoHashPrecision() {
return geoHashPrecision;
}
public boolean isGeoHashPrefixEnabled() {
return geoHashPrefixEnabled;
}
public void setGeoHashEnabled(MappedFieldType geoHashFieldType, int geoHashPrecision, boolean geoHashPrefixEnabled) {
checkIfFrozen();
this.geoHashFieldType = geoHashFieldType;
this.geoHashPrecision = geoHashPrecision;
this.geoHashPrefixEnabled = geoHashPrefixEnabled;
}
public boolean isLatLonEnabled() {
return latFieldType != null;
}
public MappedFieldType latFieldType() {
return latFieldType;
}
public MappedFieldType lonFieldType() {
return lonFieldType;
}
public void setLatLonEnabled(MappedFieldType latFieldType, MappedFieldType lonFieldType) {
checkIfFrozen();
this.latFieldType = latFieldType;
this.lonFieldType = lonFieldType;
}
}
protected final DoubleFieldMapper latMapper;
protected final DoubleFieldMapper lonMapper;
protected final ContentPath.Type pathType;
protected final StringFieldMapper geoHashMapper;
protected Explicit<Boolean> ignoreMalformed;
protected BaseGeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper,
MultiFields multiFields, Explicit<Boolean> ignoreMalformed, CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo);
this.pathType = pathType;
this.latMapper = latMapper;
this.lonMapper = lonMapper;
this.geoHashMapper = geoHashMapper;
this.ignoreMalformed = ignoreMalformed;
}
@Override
public GeoPointFieldType fieldType() {
return (GeoPointFieldType) super.fieldType();
}
@Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
BaseGeoPointFieldMapper gpfmMergeWith = (BaseGeoPointFieldMapper) mergeWith;
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
if (gpfmMergeWith.ignoreMalformed.explicit()) {
this.ignoreMalformed = gpfmMergeWith.ignoreMalformed;
}
}
}
@Override
public Iterator<Mapper> iterator() {
List<Mapper> extras = new ArrayList<>();
if (fieldType().isGeoHashEnabled()) {
extras.add(geoHashMapper);
}
if (fieldType().isLatLonEnabled()) {
extras.add(latMapper);
extras.add(lonMapper);
}
return Iterators.concat(super.iterator(), extras.iterator());
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called");
}
protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException {
if (fieldType().isGeoHashEnabled()) {
if (geoHash == null) {
geoHash = GeoHashUtils.stringEncode(point.lon(), point.lat());
}
addGeoHashField(context, geoHash);
}
if (fieldType().isLatLonEnabled()) {
latMapper.parse(context.createExternalValueContext(point.lat()));
lonMapper.parse(context.createExternalValueContext(point.lon()));
}
multiFields.parse(this, context);
}
@Override
public Mapper parse(ParseContext context) throws IOException {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(simpleName());
GeoPoint sparse = context.parseExternalValue(GeoPoint.class);
if (sparse != null) {
parse(context, sparse, null);
} else {
sparse = new GeoPoint();
XContentParser.Token token = context.parser().currentToken();
if (token == XContentParser.Token.START_ARRAY) {
token = context.parser().nextToken();
if (token == XContentParser.Token.START_ARRAY) {
// its an array of array of lon/lat [ [1.2, 1.3], [1.4, 1.5] ]
while (token != XContentParser.Token.END_ARRAY) {
parse(context, GeoUtils.parseGeoPoint(context.parser(), sparse), null);
token = context.parser().nextToken();
}
} else {
// its an array of other possible values
if (token == XContentParser.Token.VALUE_NUMBER) {
double lon = context.parser().doubleValue();
token = context.parser().nextToken();
double lat = context.parser().doubleValue();
while ((token = context.parser().nextToken()) != XContentParser.Token.END_ARRAY);
parse(context, sparse.reset(lat, lon), null);
} else {
while (token != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) {
parsePointFromString(context, sparse, context.parser().text());
} else {
parse(context, GeoUtils.parseGeoPoint(context.parser(), sparse), null);
}
token = context.parser().nextToken();
}
}
}
} else if (token == XContentParser.Token.VALUE_STRING) {
parsePointFromString(context, sparse, context.parser().text());
} else if (token != XContentParser.Token.VALUE_NULL) {
parse(context, GeoUtils.parseGeoPoint(context.parser(), sparse), null);
}
}
context.path().remove();
context.path().pathType(origPathType);
return null;
}
private void addGeoHashField(ParseContext context, String geoHash) throws IOException {
int len = Math.min(fieldType().geoHashPrecision(), geoHash.length());
int min = fieldType().isGeoHashPrefixEnabled() ? 1 : len;
for (int i = len; i >= min; i--) {
// side effect of this call is adding the field
geoHashMapper.parse(context.createExternalValueContext(geoHash.substring(0, i)));
}
}
private void parsePointFromString(ParseContext context, GeoPoint sparse, String point) throws IOException {
if (point.indexOf(',') < 0) {
parse(context, sparse.resetFromGeoHash(point), point);
} else {
parse(context, sparse.resetFromString(point), null);
}
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || pathType != Defaults.PATH_TYPE) {
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
}
if (includeDefaults || fieldType().isLatLonEnabled() != GeoPointFieldMapper.Defaults.ENABLE_LATLON) {
builder.field("lat_lon", fieldType().isLatLonEnabled());
}
if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) {
builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep());
}
if (includeDefaults || fieldType().isGeoHashEnabled() != Defaults.ENABLE_GEOHASH) {
builder.field("geohash", fieldType().isGeoHashEnabled());
}
if (includeDefaults || fieldType().isGeoHashPrefixEnabled() != Defaults.ENABLE_GEOHASH_PREFIX) {
builder.field("geohash_prefix", fieldType().isGeoHashPrefixEnabled());
}
if (fieldType().isGeoHashEnabled() && (includeDefaults || fieldType().geoHashPrecision() != Defaults.GEO_HASH_PRECISION)) {
builder.field("geohash_precision", fieldType().geoHashPrecision());
}
if (includeDefaults || ignoreMalformed.explicit()) {
builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value());
}
}
}

View File

@ -19,38 +19,24 @@
package org.elasticsearch.index.mapper.geo;
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.GeoPointField;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.util.ByteUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper.CustomNumericDocValuesField;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
import java.io.IOException;
import java.util.*;
import static org.elasticsearch.index.mapper.MapperBuilders.*;
import static org.elasticsearch.index.mapper.core.TypeParsers.*;
import java.util.Map;
/**
* Parsing: We handle:
@ -62,736 +48,84 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.*;
* "lon" : 2.1
* }
*/
public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapperParser {
public class GeoPointFieldMapper extends BaseGeoPointFieldMapper {
public static final String CONTENT_TYPE = "geo_point";
public static class Names {
public static final String LAT = "lat";
public static final String LAT_SUFFIX = "." + LAT;
public static final String LON = "lon";
public static final String LON_SUFFIX = "." + LON;
public static final String GEOHASH = "geohash";
public static final String GEOHASH_SUFFIX = "." + GEOHASH;
public static final String IGNORE_MALFORMED = "ignore_malformed";
public static final String COERCE = "coerce";
}
public static class Defaults extends BaseGeoPointFieldMapper.Defaults {
public static class Defaults {
public static final ContentPath.Type PATH_TYPE = ContentPath.Type.FULL;
public static final boolean ENABLE_LATLON = false;
public static final boolean ENABLE_GEOHASH = false;
public static final boolean ENABLE_GEOHASH_PREFIX = false;
public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit(false, false);
public static final Explicit<Boolean> COERCE = new Explicit(false, false);
public static final MappedFieldType FIELD_TYPE = new GeoPointFieldType();
public static final GeoPointFieldType FIELD_TYPE = new GeoPointFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.setNumericType(FieldType.NumericType.LONG);
FIELD_TYPE.setNumericPrecisionStep(GeoPointField.PRECISION_STEP);
FIELD_TYPE.setDocValuesType(DocValuesType.SORTED_NUMERIC);
FIELD_TYPE.setHasDocValues(true);
FIELD_TYPE.setStored(true);
FIELD_TYPE.freeze();
}
}
public static class Builder extends FieldMapper.Builder<Builder, GeoPointFieldMapper> {
private ContentPath.Type pathType = Defaults.PATH_TYPE;
private boolean enableGeoHash = Defaults.ENABLE_GEOHASH;
private boolean enableGeohashPrefix = Defaults.ENABLE_GEOHASH_PREFIX;
private boolean enableLatLon = Defaults.ENABLE_LATLON;
private Integer precisionStep;
private int geoHashPrecision = Defaults.GEO_HASH_PRECISION;
private Boolean ignoreMalformed;
private Boolean coerce;
/**
* Concrete builder for indexed GeoPointField type
*/
public static class Builder extends BaseGeoPointFieldMapper.Builder<Builder, GeoPointFieldMapper> {
public Builder(String name) {
super(name, Defaults.FIELD_TYPE);
this.builder = this;
}
public Builder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return builder;
}
protected Explicit<Boolean> ignoreMalformed(BuilderContext context) {
if (ignoreMalformed != null) {
return new Explicit<>(ignoreMalformed, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.ignore_malformed", Defaults.IGNORE_MALFORMED.value()), false);
}
return Defaults.IGNORE_MALFORMED;
}
public Builder coerce(boolean coerce) {
this.coerce = coerce;
return builder;
}
protected Explicit<Boolean> coerce(BuilderContext context) {
if (coerce != null) {
return new Explicit<>(coerce, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false);
}
return Defaults.COERCE;
}
@Override
public GeoPointFieldType fieldType() {
return (GeoPointFieldType)fieldType;
}
@Override
public Builder multiFieldPathType(ContentPath.Type pathType) {
this.pathType = pathType;
return this;
}
public Builder enableGeoHash(boolean enableGeoHash) {
this.enableGeoHash = enableGeoHash;
return this;
}
public Builder geohashPrefix(boolean enableGeohashPrefix) {
this.enableGeohashPrefix = enableGeohashPrefix;
return this;
}
public Builder enableLatLon(boolean enableLatLon) {
this.enableLatLon = enableLatLon;
return this;
}
public Builder precisionStep(int precisionStep) {
this.precisionStep = precisionStep;
return this;
}
public Builder geoHashPrecision(int precision) {
this.geoHashPrecision = precision;
return this;
}
@Override
public Builder fieldDataSettings(Settings settings) {
this.fieldDataSettings = settings;
return builder;
public GeoPointFieldMapper build(BuilderContext context, String simpleName, MappedFieldType fieldType,
MappedFieldType defaultFieldType, Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper,
DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed,
CopyTo copyTo) {
fieldType.setTokenized(false);
setupFieldType(context);
return new GeoPointFieldMapper(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper,
geoHashMapper, multiFields, ignoreMalformed, copyTo);
}
@Override
public GeoPointFieldMapper build(BuilderContext context) {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
DoubleFieldMapper latMapper = null;
DoubleFieldMapper lonMapper = null;
GeoPointFieldType geoPointFieldType = (GeoPointFieldType)fieldType;
context.path().add(name);
if (enableLatLon) {
NumberFieldMapper.Builder<?, ?> latMapperBuilder = doubleField(Names.LAT).includeInAll(false);
NumberFieldMapper.Builder<?, ?> lonMapperBuilder = doubleField(Names.LON).includeInAll(false);
if (precisionStep != null) {
latMapperBuilder.precisionStep(precisionStep);
lonMapperBuilder.precisionStep(precisionStep);
}
latMapper = (DoubleFieldMapper) latMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context);
lonMapper = (DoubleFieldMapper) lonMapperBuilder.includeInAll(false).store(fieldType.stored()).docValues(false).build(context);
geoPointFieldType.setLatLonEnabled(latMapper.fieldType(), lonMapper.fieldType());
}
StringFieldMapper geohashMapper = null;
if (enableGeoHash || enableGeohashPrefix) {
// TODO: possible also implicitly enable geohash if geohash precision is set
geohashMapper = stringField(Names.GEOHASH).index(true).tokenized(false).includeInAll(false).store(fieldType.stored())
.omitNorms(true).indexOptions(IndexOptions.DOCS).build(context);
geoPointFieldType.setGeohashEnabled(geohashMapper.fieldType(), geoHashPrecision, enableGeohashPrefix);
}
context.path().remove();
context.path().pathType(origPathType);
// this is important: even if geo points feel like they need to be tokenized to distinguish lat from lon, we actually want to
// store them as a single token.
fieldType.setTokenized(false);
setupFieldType(context);
fieldType.setHasDocValues(false);
defaultFieldType.setHasDocValues(false);
return new GeoPointFieldMapper(name, fieldType, defaultFieldType, context.indexSettings(), origPathType,
latMapper, lonMapper, geohashMapper, multiFieldsBuilder.build(this, context), ignoreMalformed(context), coerce(context));
return super.build(context);
}
}
public static class TypeParser implements Mapper.TypeParser {
public static class TypeParser extends BaseGeoPointFieldMapper.TypeParser {
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
Builder builder = geoPointField(name);
final boolean indexCreatedBeforeV2_0 = parserContext.indexVersionCreated().before(Version.V_2_0_0);
parseField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (propName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
builder.multiFieldPathType(parsePathType(name, propNode.toString()));
iterator.remove();
} else if (propName.equals("lat_lon")) {
builder.enableLatLon(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("geohash")) {
builder.enableGeoHash(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals("geohash_prefix")) {
builder.geohashPrefix(XContentMapValues.nodeBooleanValue(propNode));
if (XContentMapValues.nodeBooleanValue(propNode)) {
builder.enableGeoHash(true);
}
iterator.remove();
} else if (propName.equals("precision_step")) {
builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode));
iterator.remove();
} else if (propName.equals("geohash_precision")) {
if (propNode instanceof Integer) {
builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(propNode));
} else {
builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(propNode.toString()));
}
iterator.remove();
} else if (propName.equals(Names.IGNORE_MALFORMED)) {
builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (indexCreatedBeforeV2_0 && propName.equals("validate")) {
builder.ignoreMalformed(!XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (indexCreatedBeforeV2_0 && propName.equals("validate_lon")) {
builder.ignoreMalformed(!XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (indexCreatedBeforeV2_0 && propName.equals("validate_lat")) {
builder.ignoreMalformed(!XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (propName.equals(Names.COERCE)) {
builder.coerce(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize")) {
builder.coerce(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lat")) {
builder.coerce(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lon")) {
builder.coerce(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove();
}
}
return builder;
return super.parse(name, node, parserContext);
}
}
public static final class GeoPointFieldType extends MappedFieldType {
private MappedFieldType geohashFieldType;
private int geohashPrecision;
private boolean geohashPrefixEnabled;
private MappedFieldType latFieldType;
private MappedFieldType lonFieldType;
public GeoPointFieldType() {}
protected GeoPointFieldType(GeoPointFieldType ref) {
super(ref);
this.geohashFieldType = ref.geohashFieldType; // copying ref is ok, this can never be modified
this.geohashPrecision = ref.geohashPrecision;
this.geohashPrefixEnabled = ref.geohashPrefixEnabled;
this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified
this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified
}
@Override
public MappedFieldType clone() {
return new GeoPointFieldType(this);
}
@Override
public boolean equals(Object o) {
if (!super.equals(o)) return false;
GeoPointFieldType that = (GeoPointFieldType) o;
return geohashPrecision == that.geohashPrecision &&
geohashPrefixEnabled == that.geohashPrefixEnabled &&
java.util.Objects.equals(geohashFieldType, that.geohashFieldType) &&
java.util.Objects.equals(latFieldType, that.latFieldType) &&
java.util.Objects.equals(lonFieldType, that.lonFieldType);
}
@Override
public int hashCode() {
return java.util.Objects.hash(super.hashCode(), geohashFieldType, geohashPrecision, geohashPrefixEnabled, latFieldType,
lonFieldType);
}
@Override
public String typeName() {
return CONTENT_TYPE;
}
@Override
public void checkCompatibility(MappedFieldType fieldType, List<String> conflicts, boolean strict) {
super.checkCompatibility(fieldType, conflicts, strict);
GeoPointFieldType other = (GeoPointFieldType)fieldType;
if (isLatLonEnabled() != other.isLatLonEnabled()) {
conflicts.add("mapper [" + names().fullName() + "] has different [lat_lon]");
}
if (isGeohashEnabled() != other.isGeohashEnabled()) {
conflicts.add("mapper [" + names().fullName() + "] has different [geohash]");
}
if (geohashPrecision() != other.geohashPrecision()) {
conflicts.add("mapper [" + names().fullName() + "] has different [geohash_precision]");
}
if (isGeohashPrefixEnabled() != other.isGeohashPrefixEnabled()) {
conflicts.add("mapper [" + names().fullName() + "] has different [geohash_prefix]");
}
if (isLatLonEnabled() && other.isLatLonEnabled() &&
latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) {
conflicts.add("mapper [" + names().fullName() + "] has different [precision_step]");
}
}
public boolean isGeohashEnabled() {
return geohashFieldType != null;
}
public MappedFieldType geohashFieldType() {
return geohashFieldType;
}
public int geohashPrecision() {
return geohashPrecision;
}
public boolean isGeohashPrefixEnabled() {
return geohashPrefixEnabled;
}
public void setGeohashEnabled(MappedFieldType geohashFieldType, int geohashPrecision, boolean geohashPrefixEnabled) {
checkIfFrozen();
this.geohashFieldType = geohashFieldType;
this.geohashPrecision = geohashPrecision;
this.geohashPrefixEnabled = geohashPrefixEnabled;
}
public boolean isLatLonEnabled() {
return latFieldType != null;
}
public MappedFieldType latFieldType() {
return latFieldType;
}
public MappedFieldType lonFieldType() {
return lonFieldType;
}
public void setLatLonEnabled(MappedFieldType latFieldType, MappedFieldType lonFieldType) {
checkIfFrozen();
this.latFieldType = latFieldType;
this.lonFieldType = lonFieldType;
}
@Override
public GeoPoint value(Object value) {
if (value instanceof GeoPoint) {
return (GeoPoint) value;
} else {
return GeoPoint.parseFromLatLon(value.toString());
}
}
}
/**
* A byte-aligned fixed-length encoding for latitudes and longitudes.
*/
public static final class Encoding {
// With 14 bytes we already have better precision than a double since a double has 11 bits of exponent
private static final int MAX_NUM_BYTES = 14;
private static final Encoding[] INSTANCES;
static {
INSTANCES = new Encoding[MAX_NUM_BYTES + 1];
for (int numBytes = 2; numBytes <= MAX_NUM_BYTES; numBytes += 2) {
INSTANCES[numBytes] = new Encoding(numBytes);
}
}
/** Get an instance based on the number of bytes that has been used to encode values. */
public static final Encoding of(int numBytesPerValue) {
final Encoding instance = INSTANCES[numBytesPerValue];
if (instance == null) {
throw new IllegalStateException("No encoding for " + numBytesPerValue + " bytes per value");
}
return instance;
}
/** Get an instance based on the expected precision. Here are examples of the number of required bytes per value depending on the
* expected precision:<ul>
* <li>1km: 4 bytes</li>
* <li>3m: 6 bytes</li>
* <li>1m: 8 bytes</li>
* <li>1cm: 8 bytes</li>
* <li>1mm: 10 bytes</li></ul> */
public static final Encoding of(DistanceUnit.Distance precision) {
for (Encoding encoding : INSTANCES) {
if (encoding != null && encoding.precision().compareTo(precision) <= 0) {
return encoding;
}
}
return INSTANCES[MAX_NUM_BYTES];
}
private final DistanceUnit.Distance precision;
private final int numBytes;
private final int numBytesPerCoordinate;
private final double factor;
private Encoding(int numBytes) {
assert numBytes >= 1 && numBytes <= MAX_NUM_BYTES;
assert (numBytes & 1) == 0; // we don't support odd numBytes for the moment
this.numBytes = numBytes;
this.numBytesPerCoordinate = numBytes / 2;
this.factor = Math.pow(2, - numBytesPerCoordinate * 8 + 9);
assert (1L << (numBytesPerCoordinate * 8 - 1)) * factor > 180 && (1L << (numBytesPerCoordinate * 8 - 2)) * factor < 180 : numBytesPerCoordinate + " " + factor;
if (numBytes == MAX_NUM_BYTES) {
// no precision loss compared to a double
precision = new DistanceUnit.Distance(0, DistanceUnit.DEFAULT);
} else {
precision = new DistanceUnit.Distance(
GeoDistance.PLANE.calculate(0, 0, factor / 2, factor / 2, DistanceUnit.DEFAULT), // factor/2 because we use Math.round instead of a cast to convert the double to a long
DistanceUnit.DEFAULT);
}
}
public DistanceUnit.Distance precision() {
return precision;
}
/** The number of bytes required to encode a single geo point. */
public final int numBytes() {
return numBytes;
}
/** The number of bits required to encode a single coordinate of a geo point. */
public int numBitsPerCoordinate() {
return numBytesPerCoordinate << 3;
}
/** Return the bits that encode a latitude/longitude. */
public long encodeCoordinate(double lat) {
return Math.round((lat + 180) / factor);
}
/** Decode a sequence of bits into the original coordinate. */
public double decodeCoordinate(long bits) {
return bits * factor - 180;
}
private void encodeBits(long bits, byte[] out, int offset) {
for (int i = 0; i < numBytesPerCoordinate; ++i) {
out[offset++] = (byte) bits;
bits >>>= 8;
}
assert bits == 0;
}
private long decodeBits(byte [] in, int offset) {
long r = in[offset++] & 0xFFL;
for (int i = 1; i < numBytesPerCoordinate; ++i) {
r = (in[offset++] & 0xFFL) << (i * 8);
}
return r;
}
/** Encode a geo point into a byte-array, over {@link #numBytes()} bytes. */
public void encode(double lat, double lon, byte[] out, int offset) {
encodeBits(encodeCoordinate(lat), out, offset);
encodeBits(encodeCoordinate(lon), out, offset + numBytesPerCoordinate);
}
/** Decode a geo point from a byte-array, reading {@link #numBytes()} bytes. */
public GeoPoint decode(byte[] in, int offset, GeoPoint out) {
final long latBits = decodeBits(in, offset);
final long lonBits = decodeBits(in, offset + numBytesPerCoordinate);
return decode(latBits, lonBits, out);
}
/** Decode a geo point from the bits of the encoded latitude and longitudes. */
public GeoPoint decode(long latBits, long lonBits, GeoPoint out) {
final double lat = decodeCoordinate(latBits);
final double lon = decodeCoordinate(lonBits);
return out.reset(lat, lon);
}
}
private final ContentPath.Type pathType;
private final DoubleFieldMapper latMapper;
private final DoubleFieldMapper lonMapper;
private final StringFieldMapper geohashMapper;
protected Explicit<Boolean> ignoreMalformed;
protected Explicit<Boolean> coerce;
public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,
MultiFields multiFields, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, null);
this.pathType = pathType;
this.latMapper = latMapper;
this.lonMapper = lonMapper;
this.geohashMapper = geohashMapper;
this.ignoreMalformed = ignoreMalformed;
this.coerce = coerce;
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper,
StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed, CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper, geoHashMapper, multiFields,
ignoreMalformed, copyTo);
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
public GeoPointFieldType fieldType() {
return (GeoPointFieldType) super.fieldType();
}
@Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
GeoPointFieldMapper gpfmMergeWith = (GeoPointFieldMapper) mergeWith;
if (gpfmMergeWith.coerce.explicit()) {
if (coerce.explicit() && coerce.value() != gpfmMergeWith.coerce.value()) {
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different [coerce]");
}
}
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
if (gpfmMergeWith.ignoreMalformed.explicit()) {
this.ignoreMalformed = gpfmMergeWith.ignoreMalformed;
}
if (gpfmMergeWith.coerce.explicit()) {
this.coerce = gpfmMergeWith.coerce;
}
}
}
@Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called");
}
@Override
public Mapper parse(ParseContext context) throws IOException {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(simpleName());
GeoPoint sparse = context.parseExternalValue(GeoPoint.class);
if (sparse != null) {
parse(context, sparse, null);
} else {
sparse = new GeoPoint();
XContentParser.Token token = context.parser().currentToken();
if (token == XContentParser.Token.START_ARRAY) {
token = context.parser().nextToken();
if (token == XContentParser.Token.START_ARRAY) {
// its an array of array of lon/lat [ [1.2, 1.3], [1.4, 1.5] ]
while (token != XContentParser.Token.END_ARRAY) {
parse(context, GeoUtils.parseGeoPoint(context.parser(), sparse), null);
token = context.parser().nextToken();
}
} else {
// its an array of other possible values
if (token == XContentParser.Token.VALUE_NUMBER) {
double lon = context.parser().doubleValue();
token = context.parser().nextToken();
double lat = context.parser().doubleValue();
while ((token = context.parser().nextToken()) != XContentParser.Token.END_ARRAY);
parse(context, sparse.reset(lat, lon), null);
} else {
while (token != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.VALUE_STRING) {
parsePointFromString(context, sparse, context.parser().text());
} else {
parse(context, GeoUtils.parseGeoPoint(context.parser(), sparse), null);
}
token = context.parser().nextToken();
}
}
}
} else if (token == XContentParser.Token.VALUE_STRING) {
parsePointFromString(context, sparse, context.parser().text());
} else if (token != XContentParser.Token.VALUE_NULL) {
parse(context, GeoUtils.parseGeoPoint(context.parser(), sparse), null);
}
}
context.path().remove();
context.path().pathType(origPathType);
return null;
}
private void addGeohashField(ParseContext context, String geohash) throws IOException {
int len = Math.min(fieldType().geohashPrecision(), geohash.length());
int min = fieldType().isGeohashPrefixEnabled() ? 1 : len;
for (int i = len; i >= min; i--) {
// side effect of this call is adding the field
geohashMapper.parse(context.createExternalValueContext(geohash.substring(0, i)));
}
}
private void parsePointFromString(ParseContext context, GeoPoint sparse, String point) throws IOException {
if (point.indexOf(',') < 0) {
parse(context, sparse.resetFromGeoHash(point), point);
} else {
parse(context, sparse.resetFromString(point), null);
}
}
private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException {
boolean validPoint = false;
if (coerce.value() == false && ignoreMalformed.value() == false) {
protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException {
if (ignoreMalformed.value() == false) {
if (point.lat() > 90.0 || point.lat() < -90.0) {
throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name());
}
if (point.lon() > 180.0 || point.lon() < -180) {
throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name());
}
validPoint = true;
}
if (coerce.value() == true && validPoint == false) {
// by setting coerce to false we are assuming all geopoints are already in a valid coordinate system
// thus this extra step can be skipped
} else {
// LUCENE WATCH: This will be folded back into Lucene's GeoPointField
GeoUtils.normalizePoint(point, true, true);
GeoUtils.normalizePoint(point);
}
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
Field field = new Field(fieldType().names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType());
context.doc().add(field);
}
if (fieldType().isGeohashEnabled()) {
if (geohash == null) {
geohash = GeoHashUtils.stringEncode(point.lon(), point.lat());
}
addGeohashField(context, geohash);
}
if (fieldType().isLatLonEnabled()) {
latMapper.parse(context.createExternalValueContext(point.lat()));
lonMapper.parse(context.createExternalValueContext(point.lon()));
}
if (fieldType().hasDocValues()) {
CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(fieldType().names().indexName());
if (field == null) {
field = new CustomGeoPointDocValuesField(fieldType().names().indexName(), point.lat(), point.lon());
context.doc().addWithKey(fieldType().names().indexName(), field);
} else {
field.add(point.lat(), point.lon());
}
}
multiFields.parse(this, context);
}
@Override
public Iterator<Mapper> iterator() {
List<Mapper> extras = new ArrayList<>();
if (fieldType().isGeohashEnabled()) {
extras.add(geohashMapper);
}
if (fieldType().isLatLonEnabled()) {
extras.add(latMapper);
extras.add(lonMapper);
}
return Iterators.concat(super.iterator(), extras.iterator());
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || pathType != Defaults.PATH_TYPE) {
builder.field("path", pathType.name().toLowerCase(Locale.ROOT));
}
if (includeDefaults || fieldType().isLatLonEnabled() != Defaults.ENABLE_LATLON) {
builder.field("lat_lon", fieldType().isLatLonEnabled());
}
if (includeDefaults || fieldType().isGeohashEnabled() != Defaults.ENABLE_GEOHASH) {
builder.field("geohash", fieldType().isGeohashEnabled());
}
if (includeDefaults || fieldType().isGeohashPrefixEnabled() != Defaults.ENABLE_GEOHASH_PREFIX) {
builder.field("geohash_prefix", fieldType().isGeohashPrefixEnabled());
}
if (fieldType().isGeohashEnabled() && (includeDefaults || fieldType().geohashPrecision() != Defaults.GEO_HASH_PRECISION)) {
builder.field("geohash_precision", fieldType().geohashPrecision());
}
if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) {
builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep());
}
if (includeDefaults || coerce.explicit()) {
builder.field(Names.COERCE, coerce.value());
}
if (includeDefaults || ignoreMalformed.explicit()) {
builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value());
}
}
public static class CustomGeoPointDocValuesField extends CustomNumericDocValuesField {
private final ObjectHashSet<GeoPoint> points;
public CustomGeoPointDocValuesField(String name, double lat, double lon) {
super(name);
points = new ObjectHashSet<>(2);
points.add(new GeoPoint(lat, lon));
}
public void add(double lat, double lon) {
points.add(new GeoPoint(lat, lon));
}
@Override
public BytesRef binaryValue() {
final byte[] bytes = new byte[points.size() * 16];
int off = 0;
for (Iterator<ObjectCursor<GeoPoint>> it = points.iterator(); it.hasNext(); ) {
final GeoPoint point = it.next().value;
ByteUtils.writeDoubleLE(point.getLat(), bytes, off);
ByteUtils.writeDoubleLE(point.getLon(), bytes, off + 8);
off += 16;
}
return new BytesRef(bytes);
context.doc().add(new GeoPointField(fieldType().names().indexName(), point.lon(), point.lat(), fieldType() ));
}
super.parse(context, point, geoHash);
}
}

View File

@ -0,0 +1,394 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.geo;
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.util.ByteUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper.CustomNumericDocValuesField;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
import java.io.IOException;
import java.util.Iterator;
import java.util.Map;
/**
* Parsing: We handle:
* <p>
* - "field" : "geo_hash"
* - "field" : "lat,lon"
* - "field" : {
* "lat" : 1.1,
* "lon" : 2.1
* }
*/
public class GeoPointFieldMapperLegacy extends BaseGeoPointFieldMapper implements ArrayValueMapperParser {
public static final String CONTENT_TYPE = "geo_point";
public static class Names extends BaseGeoPointFieldMapper.Names {
public static final String COERCE = "coerce";
}
public static class Defaults extends BaseGeoPointFieldMapper.Defaults{
public static final Explicit<Boolean> COERCE = new Explicit(false, false);
public static final GeoPointFieldType FIELD_TYPE = new GeoPointFieldType();
static {
FIELD_TYPE.setIndexOptions(IndexOptions.DOCS);
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setOmitNorms(true);
FIELD_TYPE.freeze();
}
}
/**
* Concrete builder for legacy GeoPointField
*/
public static class Builder extends BaseGeoPointFieldMapper.Builder<Builder, GeoPointFieldMapperLegacy> {
private Boolean coerce;
public Builder(String name) {
super(name, Defaults.FIELD_TYPE);
this.builder = this;
}
public Builder coerce(boolean coerce) {
this.coerce = coerce;
return builder;
}
protected Explicit<Boolean> coerce(BuilderContext context) {
if (coerce != null) {
return new Explicit<>(coerce, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false);
}
return Defaults.COERCE;
}
@Override
public GeoPointFieldMapperLegacy build(BuilderContext context, String simpleName, MappedFieldType fieldType,
MappedFieldType defaultFieldType, Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper,
DoubleFieldMapper lonMapper, StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed,
CopyTo copyTo) {
fieldType.setTokenized(false);
setupFieldType(context);
fieldType.setHasDocValues(false);
defaultFieldType.setHasDocValues(false);
return new GeoPointFieldMapperLegacy(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper,
geoHashMapper, multiFields, ignoreMalformed, coerce(context), copyTo);
}
@Override
public GeoPointFieldMapperLegacy build(BuilderContext context) {
return super.build(context);
}
}
public static Builder parse(Builder builder, Map<String, Object> node, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException {
final boolean indexCreatedBeforeV2_0 = parserContext.indexVersionCreated().before(Version.V_2_0_0);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = Strings.toUnderscoreCase(entry.getKey());
Object propNode = entry.getValue();
if (indexCreatedBeforeV2_0 && propName.equals("validate")) {
builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode);
iterator.remove();
} else if (indexCreatedBeforeV2_0 && propName.equals("validate_lon")) {
builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode);
iterator.remove();
} else if (indexCreatedBeforeV2_0 && propName.equals("validate_lat")) {
builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode);
iterator.remove();
} else if (propName.equals(Names.COERCE)) {
builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
iterator.remove();
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize")) {
builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
iterator.remove();
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lat")) {
builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
iterator.remove();
} else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lon")) {
builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
iterator.remove();
}
}
return builder;
}
/**
* A byte-aligned fixed-length encoding for latitudes and longitudes.
*/
public static final class Encoding {
// With 14 bytes we already have better precision than a double since a double has 11 bits of exponent
private static final int MAX_NUM_BYTES = 14;
private static final Encoding[] INSTANCES;
static {
INSTANCES = new Encoding[MAX_NUM_BYTES + 1];
for (int numBytes = 2; numBytes <= MAX_NUM_BYTES; numBytes += 2) {
INSTANCES[numBytes] = new Encoding(numBytes);
}
}
/** Get an instance based on the number of bytes that has been used to encode values. */
public static final Encoding of(int numBytesPerValue) {
final Encoding instance = INSTANCES[numBytesPerValue];
if (instance == null) {
throw new IllegalStateException("No encoding for " + numBytesPerValue + " bytes per value");
}
return instance;
}
/** Get an instance based on the expected precision. Here are examples of the number of required bytes per value depending on the
* expected precision:<ul>
* <li>1km: 4 bytes</li>
* <li>3m: 6 bytes</li>
* <li>1m: 8 bytes</li>
* <li>1cm: 8 bytes</li>
* <li>1mm: 10 bytes</li></ul> */
public static final Encoding of(DistanceUnit.Distance precision) {
for (Encoding encoding : INSTANCES) {
if (encoding != null && encoding.precision().compareTo(precision) <= 0) {
return encoding;
}
}
return INSTANCES[MAX_NUM_BYTES];
}
private final DistanceUnit.Distance precision;
private final int numBytes;
private final int numBytesPerCoordinate;
private final double factor;
private Encoding(int numBytes) {
assert numBytes >= 1 && numBytes <= MAX_NUM_BYTES;
assert (numBytes & 1) == 0; // we don't support odd numBytes for the moment
this.numBytes = numBytes;
this.numBytesPerCoordinate = numBytes / 2;
this.factor = Math.pow(2, - numBytesPerCoordinate * 8 + 9);
assert (1L << (numBytesPerCoordinate * 8 - 1)) * factor > 180 && (1L << (numBytesPerCoordinate * 8 - 2)) * factor < 180 : numBytesPerCoordinate + " " + factor;
if (numBytes == MAX_NUM_BYTES) {
// no precision loss compared to a double
precision = new DistanceUnit.Distance(0, DistanceUnit.DEFAULT);
} else {
precision = new DistanceUnit.Distance(
GeoDistance.PLANE.calculate(0, 0, factor / 2, factor / 2, DistanceUnit.DEFAULT), // factor/2 because we use Math.round instead of a cast to convert the double to a long
DistanceUnit.DEFAULT);
}
}
public DistanceUnit.Distance precision() {
return precision;
}
/** The number of bytes required to encode a single geo point. */
public final int numBytes() {
return numBytes;
}
/** The number of bits required to encode a single coordinate of a geo point. */
public int numBitsPerCoordinate() {
return numBytesPerCoordinate << 3;
}
/** Return the bits that encode a latitude/longitude. */
public long encodeCoordinate(double lat) {
return Math.round((lat + 180) / factor);
}
/** Decode a sequence of bits into the original coordinate. */
public double decodeCoordinate(long bits) {
return bits * factor - 180;
}
private void encodeBits(long bits, byte[] out, int offset) {
for (int i = 0; i < numBytesPerCoordinate; ++i) {
out[offset++] = (byte) bits;
bits >>>= 8;
}
assert bits == 0;
}
private long decodeBits(byte [] in, int offset) {
long r = in[offset++] & 0xFFL;
for (int i = 1; i < numBytesPerCoordinate; ++i) {
r = (in[offset++] & 0xFFL) << (i * 8);
}
return r;
}
/** Encode a geo point into a byte-array, over {@link #numBytes()} bytes. */
public void encode(double lat, double lon, byte[] out, int offset) {
encodeBits(encodeCoordinate(lat), out, offset);
encodeBits(encodeCoordinate(lon), out, offset + numBytesPerCoordinate);
}
/** Decode a geo point from a byte-array, reading {@link #numBytes()} bytes. */
public GeoPoint decode(byte[] in, int offset, GeoPoint out) {
final long latBits = decodeBits(in, offset);
final long lonBits = decodeBits(in, offset + numBytesPerCoordinate);
return decode(latBits, lonBits, out);
}
/** Decode a geo point from the bits of the encoded latitude and longitudes. */
public GeoPoint decode(long latBits, long lonBits, GeoPoint out) {
final double lat = decodeCoordinate(latBits);
final double lon = decodeCoordinate(lonBits);
return out.reset(lat, lon);
}
}
protected Explicit<Boolean> coerce;
public GeoPointFieldMapperLegacy(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper,
StringFieldMapper geoHashMapper, MultiFields multiFields, Explicit<Boolean> ignoreMalformed,
Explicit<Boolean> coerce, CopyTo copyTo) {
super(simpleName, fieldType, defaultFieldType, indexSettings, pathType, latMapper, lonMapper, geoHashMapper, multiFields,
ignoreMalformed, copyTo);
this.coerce = coerce;
}
@Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
GeoPointFieldMapperLegacy gpfmMergeWith = (GeoPointFieldMapperLegacy) mergeWith;
if (gpfmMergeWith.coerce.explicit()) {
if (coerce.explicit() && coerce.value() != gpfmMergeWith.coerce.value()) {
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different [coerce]");
}
}
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
if (gpfmMergeWith.coerce.explicit()) {
this.coerce = gpfmMergeWith.coerce;
}
}
}
@Override
protected void parse(ParseContext context, GeoPoint point, String geoHash) throws IOException {
boolean validPoint = false;
if (coerce.value() == false && ignoreMalformed.value() == false) {
if (point.lat() > 90.0 || point.lat() < -90.0) {
throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name());
}
if (point.lon() > 180.0 || point.lon() < -180) {
throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name());
}
validPoint = true;
}
if (coerce.value() == true && validPoint == false) {
// by setting coerce to false we are assuming all geopoints are already in a valid coordinate system
// thus this extra step can be skipped
GeoUtils.normalizePoint(point, true, true);
}
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
Field field = new Field(fieldType().names().indexName(), Double.toString(point.lat()) + ',' + Double.toString(point.lon()), fieldType());
context.doc().add(field);
}
super.parse(context, point, geoHash);
if (fieldType().hasDocValues()) {
CustomGeoPointDocValuesField field = (CustomGeoPointDocValuesField) context.doc().getByKey(fieldType().names().indexName());
if (field == null) {
field = new CustomGeoPointDocValuesField(fieldType().names().indexName(), point.lat(), point.lon());
context.doc().addWithKey(fieldType().names().indexName(), field);
} else {
field.add(point.lat(), point.lon());
}
}
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || coerce.explicit()) {
builder.field(Names.COERCE, coerce.value());
}
}
public static class CustomGeoPointDocValuesField extends CustomNumericDocValuesField {
private final ObjectHashSet<GeoPoint> points;
public CustomGeoPointDocValuesField(String name, double lat, double lon) {
super(name);
points = new ObjectHashSet<>(2);
points.add(new GeoPoint(lat, lon));
}
public void add(double lat, double lon) {
points.add(new GeoPoint(lat, lon));
}
@Override
public BytesRef binaryValue() {
final byte[] bytes = new byte[points.size() * 16];
int off = 0;
for (Iterator<ObjectCursor<GeoPoint>> it = points.iterator(); it.hasNext(); ) {
final GeoPoint point = it.next().value;
ByteUtils.writeDoubleLE(point.getLat(), bytes, off);
ByteUtils.writeDoubleLE(point.getLon(), bytes, off + 8);
off += 16;
}
return new BytesRef(bytes);
}
}
}

View File

@ -30,7 +30,8 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
import org.elasticsearch.index.search.geo.InMemoryGeoBoundingBoxQuery;
import org.elasticsearch.index.search.geo.IndexedGeoBoundingBoxQuery;
@ -233,6 +234,14 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
@Override
public Query doToQuery(QueryShardContext context) {
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
}
QueryValidationException exception = checkLatLon(context.indexVersionCreated().before(Version.V_2_0_0));
if (exception != null) {
throw new QueryShardException(context, "couldn't validate latitude/ longitude values", exception);
@ -255,15 +264,6 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
}
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
}
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
if (context.indexVersionCreated().after(Version.CURRENT)) {
return new GeoPointInBBoxQuery(fieldType.names().fullName(), topLeft.lon(), bottomRight.lat(), bottomRight.lon(), topLeft.lat());
@ -272,6 +272,7 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
Query query;
switch(type) {
case INDEXED:
GeoPointFieldMapperLegacy.GeoPointFieldType geoFieldType = ((GeoPointFieldMapperLegacy.GeoPointFieldType) fieldType);
query = IndexedGeoBoundingBoxQuery.create(luceneTopLeft, luceneBottomRight, geoFieldType);
break;
case MEMORY:

View File

@ -32,7 +32,8 @@ import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
import java.io.IOException;
@ -204,6 +205,15 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
@Override
protected Query doToQuery(QueryShardContext shardContext) throws IOException {
MappedFieldType fieldType = shardContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(shardContext, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(shardContext, "field [" + fieldName + "] is not a geo_point field");
}
QueryValidationException exception = checkLatLon(shardContext.indexVersionCreated().before(Version.V_2_0_0));
if (exception != null) {
throw new QueryShardException(shardContext, "couldn't validate latitude/ longitude values", exception);
@ -215,17 +225,9 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
double normDistance = geoDistance.normalize(this.distance, DistanceUnit.DEFAULT);
MappedFieldType fieldType = shardContext.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(shardContext, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(shardContext, "field [" + fieldName + "] is not a geo_point field");
}
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
if (shardContext.indexVersionCreated().onOrBefore(Version.CURRENT)) {
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
GeoPointFieldMapperLegacy.GeoPointFieldType geoFieldType = ((GeoPointFieldMapperLegacy.GeoPointFieldType) fieldType);
IndexGeoPointFieldData indexFieldData = shardContext.getForField(fieldType);
return new GeoDistanceRangeQuery(center, null, normDistance, true, false, geoDistance, geoFieldType, indexFieldData, optimizeBbox);
}

View File

@ -32,7 +32,8 @@ import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
import java.io.IOException;
@ -211,6 +212,13 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
}
final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0);
// validation was not available prior to 2.x, so to support bwc
@ -253,17 +261,9 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
toValue = GeoUtils.maxRadialDistance(point);
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
}
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
if (context.indexVersionCreated().onOrBefore(Version.CURRENT)) {
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
GeoPointFieldMapperLegacy.GeoPointFieldType geoFieldType = ((GeoPointFieldMapperLegacy.GeoPointFieldType) fieldType);
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
return new GeoDistanceRangeQuery(point, fromValue, toValue, includeLower, includeUpper, geoDistance, geoFieldType,
indexFieldData, optimizeBbox);

View File

@ -30,7 +30,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
import org.elasticsearch.index.search.geo.GeoPolygonQuery;
import java.io.IOException;
@ -100,6 +100,13 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQuery
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
}
List<GeoPoint> shell = new ArrayList<GeoPoint>();
for (GeoPoint geoPoint : this.shell) {
@ -129,14 +136,6 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQuery
}
}
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(context, "failed to find geo_point field [" + fieldName + "]");
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(context, "field [" + fieldName + "] is not a geo_point field");
}
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
if (context.indexVersionCreated().onOrBefore(Version.CURRENT)) {
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);

View File

@ -36,7 +36,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
import java.io.IOException;
import java.util.ArrayList;
@ -75,8 +75,8 @@ public class GeohashCellQuery {
* @param geohashes optional array of additional geohashes
* @return a new GeoBoundinboxfilter
*/
public static Query create(QueryShardContext context, GeoPointFieldMapper.GeoPointFieldType fieldType, String geohash, @Nullable List<CharSequence> geohashes) {
MappedFieldType geoHashMapper = fieldType.geohashFieldType();
public static Query create(QueryShardContext context, BaseGeoPointFieldMapper.GeoPointFieldType fieldType, String geohash, @Nullable List<CharSequence> geohashes) {
MappedFieldType geoHashMapper = fieldType.geoHashFieldType();
if (geoHashMapper == null) {
throw new IllegalArgumentException("geohash filter needs geohash_prefix to be enabled");
}
@ -185,15 +185,15 @@ public class GeohashCellQuery {
MappedFieldType fieldType = context.fieldMapper(fieldName);
if (fieldType == null) {
throw new QueryShardException(context, "failed to parse [{}] query. missing [{}] field [{}]", NAME,
GeoPointFieldMapper.CONTENT_TYPE, fieldName);
BaseGeoPointFieldMapper.CONTENT_TYPE, fieldName);
}
if (!(fieldType instanceof GeoPointFieldMapper.GeoPointFieldType)) {
if (!(fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType)) {
throw new QueryShardException(context, "failed to parse [{}] query. field [{}] is not a geo_point field", NAME, fieldName);
}
GeoPointFieldMapper.GeoPointFieldType geoFieldType = ((GeoPointFieldMapper.GeoPointFieldType) fieldType);
if (!geoFieldType.isGeohashPrefixEnabled()) {
BaseGeoPointFieldMapper.GeoPointFieldType geoFieldType = ((BaseGeoPointFieldMapper.GeoPointFieldType) fieldType);
if (!geoFieldType.isGeoHashPrefixEnabled()) {
throw new QueryShardException(context, "failed to parse [{}] query. [geohash_prefix] is not enabled for field [{}]", NAME,
fieldName);
}

View File

@ -41,7 +41,7 @@ import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.MultiValueMode;
@ -187,8 +187,8 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder> ext
parser.nextToken();
if (fieldType instanceof DateFieldMapper.DateFieldType) {
return parseDateVariable(parser, context, (DateFieldMapper.DateFieldType) fieldType, mode);
} else if (fieldType instanceof GeoPointFieldMapper.GeoPointFieldType) {
return parseGeoVariable(parser, context, (GeoPointFieldMapper.GeoPointFieldType) fieldType, mode);
} else if (fieldType instanceof BaseGeoPointFieldMapper.GeoPointFieldType) {
return parseGeoVariable(parser, context, (BaseGeoPointFieldMapper.GeoPointFieldType) fieldType, mode);
} else if (fieldType instanceof NumberFieldMapper.NumberFieldType) {
return parseNumberVariable(parser, context, (NumberFieldMapper.NumberFieldType) fieldType, mode);
} else {
@ -231,7 +231,7 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder> ext
}
private AbstractDistanceScoreFunction parseGeoVariable(XContentParser parser, QueryShardContext context,
GeoPointFieldMapper.GeoPointFieldType fieldType, MultiValueMode mode) throws IOException {
BaseGeoPointFieldMapper.GeoPointFieldType fieldType, MultiValueMode mode) throws IOException {
XContentParser.Token token;
String parameterName = null;
GeoPoint origin = new GeoPoint();

View File

@ -37,6 +37,7 @@ import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
import java.io.IOException;
@ -58,8 +59,9 @@ public class GeoDistanceRangeQuery extends Query {
private final IndexGeoPointFieldData indexFieldData;
public GeoDistanceRangeQuery(GeoPoint point, Double lowerVal, Double upperVal, boolean includeLower, boolean includeUpper, GeoDistance geoDistance, GeoPointFieldMapper.GeoPointFieldType fieldType, IndexGeoPointFieldData indexFieldData,
String optimizeBbox) {
public GeoDistanceRangeQuery(GeoPoint point, Double lowerVal, Double upperVal, boolean includeLower,
boolean includeUpper, GeoDistance geoDistance, GeoPointFieldMapperLegacy.GeoPointFieldType fieldType,
IndexGeoPointFieldData indexFieldData, String optimizeBbox) {
this.lat = point.lat();
this.lon = point.lon();
this.geoDistance = geoDistance;

View File

@ -25,12 +25,13 @@ import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
/**
*/
public class IndexedGeoBoundingBoxQuery {
public static Query create(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) {
public static Query create(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapperLegacy.GeoPointFieldType fieldType) {
if (!fieldType.isLatLonEnabled()) {
throw new IllegalArgumentException("lat/lon is not enabled (indexed) for field [" + fieldType.names().fullName() + "], can't use indexed filter on it");
}
@ -42,7 +43,7 @@ public class IndexedGeoBoundingBoxQuery {
}
}
private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) {
private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapperLegacy.GeoPointFieldType fieldType) {
BooleanQuery.Builder filter = new BooleanQuery.Builder();
filter.setMinimumNumberShouldMatch(1);
filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true), Occur.SHOULD);
@ -51,7 +52,7 @@ public class IndexedGeoBoundingBoxQuery {
return new ConstantScoreQuery(filter.build());
}
private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) {
private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapperLegacy.GeoPointFieldType fieldType) {
BooleanQuery.Builder filter = new BooleanQuery.Builder();
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true), Occur.MUST);
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST);

View File

@ -24,10 +24,10 @@ import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexService;
@ -37,6 +37,9 @@ import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
@ -92,7 +95,13 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
} else if (type.getType().equals("byte")) {
fieldType = MapperBuilders.byteField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
} else if (type.getType().equals("geo_point")) {
fieldType = MapperBuilders.geoPointField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
BaseGeoPointFieldMapper.Builder builder;
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (indexService.getIndexSettings().getIndexVersionCreated().onOrBefore(Version.CURRENT)) {
fieldType = new GeoPointFieldMapperLegacy.Builder(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
} else {
fieldType = new GeoPointFieldMapper.Builder(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
}
} else if (type.getType().equals("_parent")) {
fieldType = new ParentFieldMapper.Builder("_type").type(fieldName).build(context).fieldType();
} else if (type.getType().equals("binary")) {

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.externalvalues;
import com.spatial4j.core.shape.Point;
import org.apache.lucene.document.Field;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Iterators;
import org.elasticsearch.common.geo.GeoPoint;
@ -37,7 +38,9 @@ import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.BinaryFieldMapper;
import org.elasticsearch.index.mapper.core.BooleanFieldMapper;
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
import java.io.IOException;
@ -72,6 +75,7 @@ public class ExternalMapper extends FieldMapper {
private BinaryFieldMapper.Builder binBuilder = new BinaryFieldMapper.Builder(Names.FIELD_BIN);
private BooleanFieldMapper.Builder boolBuilder = new BooleanFieldMapper.Builder(Names.FIELD_BOOL);
private GeoPointFieldMapper.Builder pointBuilder = new GeoPointFieldMapper.Builder(Names.FIELD_POINT);
private GeoPointFieldMapperLegacy.Builder legacyPointBuilder = new GeoPointFieldMapperLegacy.Builder(Names.FIELD_POINT);
private GeoShapeFieldMapper.Builder shapeBuilder = new GeoShapeFieldMapper.Builder(Names.FIELD_SHAPE);
private Mapper.Builder stringBuilder;
private String generatedValue;
@ -98,7 +102,9 @@ public class ExternalMapper extends FieldMapper {
context.path().add(name);
BinaryFieldMapper binMapper = binBuilder.build(context);
BooleanFieldMapper boolMapper = boolBuilder.build(context);
GeoPointFieldMapper pointMapper = pointBuilder.build(context);
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
BaseGeoPointFieldMapper pointMapper = (context.indexCreatedVersion().onOrBefore(Version.CURRENT)) ?
legacyPointBuilder.build(context) : pointBuilder.build(context);
GeoShapeFieldMapper shapeMapper = shapeBuilder.build(context);
FieldMapper stringMapper = (FieldMapper)stringBuilder.build(context);
context.path().remove();
@ -164,13 +170,13 @@ public class ExternalMapper extends FieldMapper {
private final BinaryFieldMapper binMapper;
private final BooleanFieldMapper boolMapper;
private final GeoPointFieldMapper pointMapper;
private final BaseGeoPointFieldMapper pointMapper;
private final GeoShapeFieldMapper shapeMapper;
private final FieldMapper stringMapper;
public ExternalMapper(String simpleName, MappedFieldType fieldType,
String generatedValue, String mapperName,
BinaryFieldMapper binMapper, BooleanFieldMapper boolMapper, GeoPointFieldMapper pointMapper,
BinaryFieldMapper binMapper, BooleanFieldMapper boolMapper, BaseGeoPointFieldMapper pointMapper,
GeoShapeFieldMapper shapeMapper, FieldMapper stringMapper, Settings indexSettings, MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, new ExternalFieldType(), indexSettings, multiFields, copyTo);
this.generatedValue = generatedValue;

View File

@ -19,11 +19,16 @@
package org.elasticsearch.index.mapper.externalvalues;
import org.apache.lucene.util.GeoUtils;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.VersionUtils;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
@ -32,7 +37,9 @@ import static org.hamcrest.Matchers.notNullValue;
*/
public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
public void testExternalValues() throws Exception {
MapperService mapperService = createIndex("test").mapperService();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
MapperService mapperService = createIndex("test", settings).mapperService();
mapperService.documentMapperParser().putRootTypeParser(ExternalMetadataMapper.CONTENT_TYPE,
new ExternalMetadataMapper.TypeParser());
mapperService.documentMapperParser().putTypeParser(ExternalMapperPlugin.EXTERNAL,
@ -58,7 +65,12 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T"));
assertThat(doc.rootDoc().getField("field.point"), notNullValue());
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoUtils.mortonHash(51.0, 42.0)));
}
assertThat(doc.rootDoc().getField("field.shape"), notNullValue());
@ -70,7 +82,9 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
}
public void testExternalValuesWithMultifield() throws Exception {
MapperService mapperService = createIndex("test").mapperService();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
MapperService mapperService = createIndex("test", settings).mapperService();
mapperService.documentMapperParser().putTypeParser(ExternalMapperPlugin.EXTERNAL,
new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo"));
@ -105,7 +119,12 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T"));
assertThat(doc.rootDoc().getField("field.point"), notNullValue());
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoUtils.mortonHash(51.0, 42.0)));
}
assertThat(doc.rootDoc().getField("field.shape"), notNullValue());
@ -117,7 +136,9 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
}
public void testExternalValuesWithMultifieldTwoLevels() throws Exception {
MapperService mapperService = createIndex("test").mapperService();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
MapperService mapperService = createIndex("test", settings).mapperService();
mapperService.documentMapperParser().putTypeParser(ExternalMapperPlugin.EXTERNAL,
new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo"));
@ -158,7 +179,12 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T"));
assertThat(doc.rootDoc().getField("field.point"), notNullValue());
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoUtils.mortonHash(51.0, 42.0)));
}
assertThat(doc.rootDoc().getField("field.shape"), notNullValue());

View File

@ -37,7 +37,7 @@ public class GeoEncodingTests extends ESTestCase {
final double lat = randomDouble() * 180 - 90;
final double lon = randomDouble() * 360 - 180;
final Distance precision = new Distance(1+(randomDouble() * 9), randomFrom(Arrays.asList(DistanceUnit.MILLIMETERS, DistanceUnit.METERS, DistanceUnit.KILOMETERS)));
final GeoPointFieldMapper.Encoding encoding = GeoPointFieldMapper.Encoding.of(precision);
final GeoPointFieldMapperLegacy.Encoding encoding = GeoPointFieldMapperLegacy.Encoding.of(precision);
assertThat(encoding.precision().convert(DistanceUnit.METERS).value, lessThanOrEqualTo(precision.convert(DistanceUnit.METERS).value));
final GeoPoint geoPoint = encoding.decode(encoding.encodeCoordinate(lat), encoding.encodeCoordinate(lon), new GeoPoint());
final double error = GeoDistance.PLANE.calculate(lat, lon, geoPoint.lat(), geoPoint.lon(), DistanceUnit.METERS);

View File

@ -19,12 +19,14 @@
package org.elasticsearch.index.mapper.geo;
import org.apache.lucene.util.GeoHashUtils;
import org.apache.lucene.util.GeoUtils;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
@ -55,7 +57,9 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -63,20 +67,30 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.bytes());
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
boolean indexCreatedBefore22 = version.onOrBefore(Version.CURRENT);
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lat").fieldType().stored(), is(false));
final boolean stored = indexCreatedBefore22 == false;
assertThat(doc.rootDoc().getField("point.lat").fieldType().stored(), is(stored));
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon").fieldType().stored(), is(false));
assertThat(doc.rootDoc().getField("point.lon").fieldType().stored(), is(stored));
assertThat(doc.rootDoc().getField("point.geohash"), nullValue());
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
if (indexCreatedBefore22 == true) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
}
}
public void testLatLonValuesWithGeohash() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true).endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("geohash", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -91,10 +105,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
public void testLatLonInOneValueWithGeohash() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true).endObject().endObject()
.endObject().endObject().string();
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("geohash", true).endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -109,10 +125,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
public void testGeoHashIndexValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true).endObject().endObject()
.endObject().endObject().string();
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("geohash", true).endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -127,10 +145,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
public void testGeoHashValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -144,13 +164,18 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
}
public void testNormalizeLatLonValuesDefault() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
// default to normalize
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("coerce", true)
.field("ignore_malformed", true).endObject().endObject()
.endObject().endObject().string();
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point");
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
mapping.field("coerce", true);
}
mapping.field("ignore_malformed", true).endObject().endObject().endObject().endObject();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping.string());
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -158,7 +183,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.bytes());
assertThat(doc.rootDoc().get("point"), equalTo("89.0,1.0"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().get("point"), equalTo("89.0,1.0"));
} else {
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoUtils.mortonHash(1.0, 89.0)));
}
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -166,7 +196,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.bytes());
assertThat(doc.rootDoc().get("point"), equalTo("-89.0,-1.0"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().get("point"), equalTo("-89.0,-1.0"));
} else {
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoUtils.mortonHash(-1.0, -89.0)));
}
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -174,19 +209,28 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.endObject()
.bytes());
assertThat(doc.rootDoc().get("point"), equalTo("-1.0,-179.0"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().get("point"), equalTo("-1.0,-179.0"));
} else {
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoUtils.mortonHash(-179.0, -1.0)));
}
}
public void testValidateLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("coerce", false)
.field("ignore_malformed", false).endObject().endObject()
.endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true);
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
mapping.field("coerce", false);
}
mapping.field("ignore_malformed", false).endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping.string());
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 90).field("lon", 1.3).endObject()
.endObject()
@ -238,15 +282,19 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
}
public void testNoValidateLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("coerce", false)
.field("ignore_malformed", true).endObject().endObject()
.endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true);
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
mapping.field("coerce", false);
}
mapping.field("ignore_malformed", true).endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping.string());
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 90).field("lon", 1.3).endObject()
.endObject()
@ -279,10 +327,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
public void testLatLonValuesStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", "yes").endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -295,15 +345,22 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3));
assertThat(doc.rootDoc().getField("point.geohash"), nullValue());
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
}
}
public void testArrayLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", "yes").endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -318,18 +375,30 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2));
assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2));
assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3));
assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
}
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4));
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.5, 1.4)));
}
}
public void testLatLonInOneValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -339,7 +408,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
}
}
public void testLatLonInOneValueStored() throws Exception {
@ -347,7 +421,9 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -359,15 +435,22 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2));
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3));
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
}
}
public void testLatLonInOneValueArray() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", "yes").endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -382,10 +465,20 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2));
assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2));
assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3));
assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
}
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4));
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.5, 1.4)));
}
}
public void testLonLatArray() throws Exception {
@ -393,7 +486,9 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -403,17 +498,23 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
}
}
public void testLonLatArrayDynamic() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startArray("dynamic_templates").startObject()
.startObject("point").field("match", "point*").startObject("mapping").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endArray()
.endObject().endObject().string();
.startObject("point").field("match", "point*").startObject("mapping").field("type", "geo_point")
.field("lat_lon", true).endObject().endObject().endObject().endArray().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -423,15 +524,22 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
}
}
public void testLonLatArrayStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", "yes").endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -443,15 +551,22 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2));
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3));
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
}
}
public void testLonLatArrayArrayStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("store", "yes").endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -466,10 +581,20 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2));
assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2));
assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3));
assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
}
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4));
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.5, 1.4)));
}
}
@ -477,7 +602,9 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
* Test that expected exceptions are thrown when creating a new index with deprecated options
*/
public void testOptionDeprecation() throws Exception {
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser();
// test deprecation exceptions on newly created indexes
try {
String validateMapping = XContentFactory.jsonBuilder().startObject().startObject("type")
@ -602,30 +729,30 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
}
public void testGeoPointMapperMerge() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("coerce", true).endObject().endObject()
.endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("geohash", true).endObject().endObject().endObject().endObject().string();
DocumentMapperParser parser = createIndex("test", settings).mapperService().documentMapperParser();
DocumentMapper stage1 = parser.parse(stage1Mapping);
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).field("geohash", true)
.field("coerce", false).endObject().endObject()
.endObject().endObject().string();
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false)
.field("geohash", false).endObject().endObject().endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping);
MergeResult mergeResult = stage1.merge(stage2.mapping(), false, false);
assertThat(mergeResult.hasConflicts(), equalTo(true));
assertThat(mergeResult.buildConflicts().length, equalTo(2));
assertThat(mergeResult.buildConflicts().length, equalTo(3));
// todo better way of checking conflict?
assertThat("mapper [point] has different [lat_lon]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
assertThat("mapper [point] has different [coerce]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
assertThat("mapper [point] has different [geohash]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
assertThat("mapper [point] has different [geohash_precision]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
// correct mapping and ensure no failures
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("coerce", true).endObject().endObject()
.endObject().endObject().string();
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true)
.field("geohash", true).endObject().endObject().endObject().endObject().string();
stage2 = parser.parse(stage2Mapping);
mergeResult = stage1.merge(stage2.mapping(), false, false);
assertThat(Arrays.toString(mergeResult.buildConflicts()), mergeResult.hasConflicts(), equalTo(false));
@ -639,7 +766,10 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().string();
// create index and add a test point (dr5regy6rc6z)
CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").addMapping("pin", mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings)
.addMapping("pin", mapping);
mappingRequest.execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
client().prepareIndex("test", "pin", "1").setSource(jsonBuilder().startObject().startObject("location").field("lat", 40.7143528)
@ -661,7 +791,10 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
.endObject().endObject().endObject().endObject().string();
// create index and add a test point (dr5regy6rc6z)
CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").addMapping("pin", mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("test").setSettings(settings)
.addMapping("pin", mapping);
mappingRequest.execute().actionGet();
client().admin().cluster().prepareHealth().setWaitForEvents(Priority.LANGUID).setWaitForGreenStatus().execute().actionGet();
client().prepareIndex("test", "pin", "1").setSource(jsonBuilder().startObject().startObject("location").field("lat", 40.7143528)

View File

@ -27,7 +27,7 @@ import org.junit.Before;
public class GeoPointFieldTypeTests extends FieldTypeTestCase {
@Override
protected MappedFieldType createDefaultFieldType() {
return new GeoPointFieldMapper.GeoPointFieldType();
return new BaseGeoPointFieldMapper.GeoPointFieldType();
}
@Before
@ -35,13 +35,13 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase {
addModifier(new Modifier("geohash", false, true) {
@Override
public void modify(MappedFieldType ft) {
((GeoPointFieldMapper.GeoPointFieldType)ft).setGeohashEnabled(new StringFieldMapper.StringFieldType(), 1, true);
((BaseGeoPointFieldMapper.GeoPointFieldType)ft).setGeoHashEnabled(new StringFieldMapper.StringFieldType(), 1, true);
}
});
addModifier(new Modifier("lat_lon", false, true) {
@Override
public void modify(MappedFieldType ft) {
((GeoPointFieldMapper.GeoPointFieldType)ft).setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType());
((BaseGeoPointFieldMapper.GeoPointFieldType)ft).setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType());
}
});
}

View File

@ -20,11 +20,16 @@
package org.elasticsearch.index.mapper.geo;
import org.apache.lucene.util.GeoHashUtils;
import org.apache.lucene.util.GeoUtils;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.VersionUtils;
import org.hamcrest.MatcherAssert;
import static org.hamcrest.Matchers.equalTo;
@ -39,10 +44,12 @@ import static org.hamcrest.Matchers.nullValue;
public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
public void testLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).endObject().endObject()
.endObject().endObject().string();
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false)
.endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -50,9 +57,14 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.endObject()
.bytes());
MatcherAssert.assertThat(doc.rootDoc().getField("point.lat"), nullValue());
MatcherAssert.assertThat(doc.rootDoc().getField("point.lon"), nullValue());
MatcherAssert.assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
assertThat(doc.rootDoc().getField("point.lat"), nullValue());
assertThat(doc.rootDoc().getField("point.lon"), nullValue());
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
}
}
public void testLatLonInOneValue() throws Exception {
@ -60,7 +72,9 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -68,17 +82,24 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.endObject()
.bytes());
MatcherAssert.assertThat(doc.rootDoc().getField("point.lat"), nullValue());
MatcherAssert.assertThat(doc.rootDoc().getField("point.lon"), nullValue());
MatcherAssert.assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
assertThat(doc.rootDoc().getField("point.lat"), nullValue());
assertThat(doc.rootDoc().getField("point.lon"), nullValue());
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
}
}
public void testGeoHashValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).endObject().endObject()
.endObject().endObject().string();
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true)
.endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()
@ -86,32 +107,38 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.endObject()
.bytes());
MatcherAssert.assertThat(doc.rootDoc().getField("point.lat"), nullValue());
MatcherAssert.assertThat(doc.rootDoc().getField("point.lon"), nullValue());
MatcherAssert.assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.stringEncode(1.3, 1.2)));
MatcherAssert.assertThat(doc.rootDoc().get("point"), notNullValue());
assertThat(doc.rootDoc().getField("point.lat"), nullValue());
assertThat(doc.rootDoc().getField("point.lon"), nullValue());
assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.stringEncode(1.3, 1.2)));
assertThat(doc.rootDoc().get("point"), notNullValue());
}
public void testGeoHashPrecisionAsInteger() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).field("geohash_precision", 10).endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true)
.field("geohash_precision", 10).endObject().endObject().endObject().endObject().string();
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point");
assertThat(mapper, instanceOf(GeoPointFieldMapper.class));
GeoPointFieldMapper geoPointFieldMapper = (GeoPointFieldMapper) mapper;
assertThat(geoPointFieldMapper.fieldType().geohashPrecision(), is(10));
assertThat(mapper, instanceOf(BaseGeoPointFieldMapper.class));
BaseGeoPointFieldMapper geoPointFieldMapper = (BaseGeoPointFieldMapper) mapper;
assertThat(geoPointFieldMapper.fieldType().geoHashPrecision(), is(10));
}
public void testGeoHashPrecisionAsLength() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).field("geohash_precision", "5m").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
FieldMapper mapper = defaultMapper.mappers().smartNameFieldMapper("point");
assertThat(mapper, instanceOf(GeoPointFieldMapper.class));
GeoPointFieldMapper geoPointFieldMapper = (GeoPointFieldMapper) mapper;
assertThat(geoPointFieldMapper.fieldType().geohashPrecision(), is(10));
assertThat(mapper, instanceOf(BaseGeoPointFieldMapper.class));
BaseGeoPointFieldMapper geoPointFieldMapper = (BaseGeoPointFieldMapper) mapper;
assertThat(geoPointFieldMapper.fieldType().geoHashPrecision(), is(10));
}
public void testNullValue() throws Exception {
@ -119,7 +146,9 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
.startObject("properties").startObject("point").field("type", "geo_point").endObject().endObject()
.endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
DocumentMapper defaultMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
.startObject()

View File

@ -21,6 +21,9 @@ package org.elasticsearch.index.mapper.multifield;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.GeoUtils;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
@ -38,8 +41,9 @@ import org.elasticsearch.index.mapper.core.DateFieldMapper;
import org.elasticsearch.index.mapper.core.LongFieldMapper;
import org.elasticsearch.index.mapper.core.StringFieldMapper;
import org.elasticsearch.index.mapper.core.TokenCountFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.VersionUtils;
import java.util.Arrays;
import java.util.Collections;
@ -251,8 +255,12 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
}
public void testConvertMultiFieldGeoPoint() throws Exception {
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
boolean indexCreatedBefore22 = version.onOrBefore(Version.CURRENT);
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-geo_point.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
assertThat(docMapper.mappers().getMapper("a"), notNullValue());
assertThat(docMapper.mappers().getMapper("a"), instanceOf(StringFieldMapper.class));
@ -261,10 +269,13 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
assertThat(docMapper.mappers().getMapper("a").fieldType().tokenized(), equalTo(false));
assertThat(docMapper.mappers().getMapper("a.b"), notNullValue());
assertThat(docMapper.mappers().getMapper("a.b"), instanceOf(GeoPointFieldMapper.class));
assertThat(docMapper.mappers().getMapper("a.b"), instanceOf(BaseGeoPointFieldMapper.class));
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("a.b").fieldType().indexOptions());
assertThat(docMapper.mappers().getMapper("a.b").fieldType().stored(), equalTo(false));
final boolean stored = indexCreatedBefore22 == false;
assertThat(docMapper.mappers().getMapper("a.b").fieldType().stored(), equalTo(stored));
assertThat(docMapper.mappers().getMapper("a.b").fieldType().tokenized(), equalTo(false));
final boolean hasDocValues = indexCreatedBefore22 == false;
assertThat(docMapper.mappers().getMapper("a.b").fieldType().hasDocValues(), equalTo(hasDocValues));
BytesReference json = jsonBuilder().startObject()
.field("a", "-1,-1")
@ -281,15 +292,20 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
f = doc.getField("a.b");
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("a.b"));
assertThat(f.stringValue(), equalTo("-1.0,-1.0"));
assertThat(f.fieldType().stored(), equalTo(false));
if (indexCreatedBefore22 == true) {
assertThat(f.stringValue(), equalTo("-1.0,-1.0"));
} else {
assertThat(Long.parseLong(f.stringValue()), equalTo(GeoUtils.mortonHash(-1.0, -1.0)));
}
assertThat(f.fieldType().stored(), equalTo(stored));
assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions());
assertThat(docMapper.mappers().getMapper("b"), notNullValue());
assertThat(docMapper.mappers().getMapper("b"), instanceOf(GeoPointFieldMapper.class));
assertThat(docMapper.mappers().getMapper("b"), instanceOf(BaseGeoPointFieldMapper.class));
assertNotSame(IndexOptions.NONE, docMapper.mappers().getMapper("b").fieldType().indexOptions());
assertThat(docMapper.mappers().getMapper("b").fieldType().stored(), equalTo(false));
assertThat(docMapper.mappers().getMapper("b").fieldType().stored(), equalTo(stored));
assertThat(docMapper.mappers().getMapper("b").fieldType().tokenized(), equalTo(false));
assertThat(docMapper.mappers().getMapper("b").fieldType().hasDocValues(), equalTo(hasDocValues));
assertThat(docMapper.mappers().getMapper("b.a"), notNullValue());
assertThat(docMapper.mappers().getMapper("b.a"), instanceOf(StringFieldMapper.class));
@ -305,8 +321,12 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
f = doc.getField("b");
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("b"));
assertThat(f.stringValue(), equalTo("-1.0,-1.0"));
assertThat(f.fieldType().stored(), equalTo(false));
if (indexCreatedBefore22 == true) {
assertThat(f.stringValue(), equalTo("-1.0,-1.0"));
} else {
assertThat(Long.parseLong(f.stringValue()), equalTo(GeoUtils.mortonHash(-1.0, -1.0)));
}
assertThat(f.fieldType().stored(), equalTo(stored));
assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions());
f = doc.getField("b.a");
@ -324,15 +344,23 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
f = doc.getFields("b")[0];
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("b"));
assertThat(f.stringValue(), equalTo("-1.0,-1.0"));
assertThat(f.fieldType().stored(), equalTo(false));
if (indexCreatedBefore22 == true) {
assertThat(f.stringValue(), equalTo("-1.0,-1.0"));
} else {
assertThat(Long.parseLong(f.stringValue()), equalTo(GeoUtils.mortonHash(-1.0, -1.0)));
}
assertThat(f.fieldType().stored(), equalTo(stored));
assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions());
f = doc.getFields("b")[1];
assertThat(f, notNullValue());
assertThat(f.name(), equalTo("b"));
assertThat(f.stringValue(), equalTo("-2.0,-2.0"));
assertThat(f.fieldType().stored(), equalTo(false));
if (indexCreatedBefore22 == true) {
assertThat(f.stringValue(), equalTo("-2.0,-2.0"));
} else {
assertThat(Long.parseLong(f.stringValue()), equalTo(GeoUtils.mortonHash(-2.0, -2.0)));
}
assertThat(f.fieldType().stored(), equalTo(stored));
assertNotSame(IndexOptions.NONE, f.fieldType().indexOptions());
f = doc.getField("b.a");