Geo Overhaul (work with multiple locations), closes #414.

This commit is contained in:
kimchy 2010-10-08 22:31:21 +02:00
parent 523a8b4c3e
commit 6314c2460c
38 changed files with 745 additions and 511 deletions

View File

@ -24,8 +24,9 @@ import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.elasticsearch.common.lucene.docset.GetDocSet;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPoint;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import java.io.IOException;
@ -38,20 +39,14 @@ public class GeoBoundingBoxFilter extends Filter {
private final Point bottomRight;
private final String latFieldName;
private final String lonFieldName;
private final FieldDataType fieldDataType;
private final String fieldName;
private final FieldDataCache fieldDataCache;
public GeoBoundingBoxFilter(Point topLeft, Point bottomRight, String latFieldName, String lonFieldName, FieldDataType fieldDataType, FieldDataCache fieldDataCache) {
public GeoBoundingBoxFilter(Point topLeft, Point bottomRight, String fieldName, FieldDataCache fieldDataCache) {
this.topLeft = topLeft;
this.bottomRight = bottomRight;
this.latFieldName = latFieldName;
this.lonFieldName = lonFieldName;
this.fieldDataType = fieldDataType;
this.fieldName = fieldName;
this.fieldDataCache = fieldDataCache;
}
@ -63,55 +58,46 @@ public class GeoBoundingBoxFilter extends Filter {
return bottomRight;
}
public String latFieldName() {
return latFieldName;
}
public String lonFieldName() {
return lonFieldName;
public String fieldName() {
return fieldName;
}
@Override public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
final NumericFieldData latFieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, latFieldName);
final NumericFieldData lonFieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, lonFieldName);
final GeoPointFieldData fieldData = (GeoPointFieldData) fieldDataCache.cache(GeoPointFieldDataType.TYPE, reader, fieldName);
//checks to see if bounding box crosses 180 degrees
if (topLeft.lon > bottomRight.lon) {
return new GetDocSet(reader.maxDoc()) {
@Override public boolean get(int doc) throws IOException {
if (!latFieldData.hasValue(doc) || !lonFieldData.hasValue(doc)) {
if (!fieldData.hasValue(doc)) {
return false;
}
if (latFieldData.multiValued()) {
double[] lats = latFieldData.doubleValues(doc);
double[] lons = latFieldData.doubleValues(doc);
for (int i = 0; i < lats.length; i++) {
double lat = lats[i];
double lon = lons[i];
if (lon < 0) {
if (-180.0 <= lon && bottomRight.lon >= lon
&& topLeft.lat >= lat && bottomRight.lat <= lat) {
if (fieldData.multiValued()) {
GeoPoint[] points = fieldData.values(doc);
for (GeoPoint point : points) {
if (point.lon() < 0) {
if (-180.0 <= point.lon() && bottomRight.lon >= point.lon()
&& topLeft.lat >= point.lat() && bottomRight.lat <= point.lat()) {
return true;
}
} else {
if (topLeft.lon <= lon && 180 >= lon
&& topLeft.lat >= lat && bottomRight.lat <= lat) {
if (topLeft.lon <= point.lon() && 180 >= point.lon()
&& topLeft.lat >= point.lat() && bottomRight.lat <= point.lat()) {
return true;
}
}
}
} else {
double lat = latFieldData.doubleValue(doc);
double lon = lonFieldData.doubleValue(doc);
if (lon < 0) {
if (-180.0 <= lon && bottomRight.lon >= lon
&& topLeft.lat >= lat && bottomRight.lat <= lat) {
GeoPoint point = fieldData.value(doc);
if (point.lon() < 0) {
if (-180.0 <= point.lon() && bottomRight.lon >= point.lon()
&& topLeft.lat >= point.lat() && bottomRight.lat <= point.lat()) {
return true;
}
} else {
if (topLeft.lon <= lon && 180 >= lon
&& topLeft.lat >= lat && bottomRight.lat <= lat) {
if (topLeft.lon <= point.lon() && 180 >= point.lon()
&& topLeft.lat >= point.lat() && bottomRight.lat <= point.lat()) {
return true;
}
}
@ -122,25 +108,23 @@ public class GeoBoundingBoxFilter extends Filter {
} else {
return new GetDocSet(reader.maxDoc()) {
@Override public boolean get(int doc) throws IOException {
if (!latFieldData.hasValue(doc) || !lonFieldData.hasValue(doc)) {
if (!fieldData.hasValue(doc)) {
return false;
}
if (latFieldData.multiValued()) {
double[] lats = latFieldData.doubleValues(doc);
double[] lons = latFieldData.doubleValues(doc);
for (int i = 0; i < lats.length; i++) {
if (topLeft.lon <= lons[i] && bottomRight.lon >= lons[i]
&& topLeft.lat >= lats[i] && bottomRight.lat <= lats[i]) {
if (fieldData.multiValued()) {
GeoPoint[] points = fieldData.values(doc);
for (GeoPoint point : points) {
if (topLeft.lon <= point.lon() && bottomRight.lon >= point.lon()
&& topLeft.lat >= point.lat() && bottomRight.lat <= point.lat()) {
return true;
}
}
} else {
double lat = latFieldData.doubleValue(doc);
double lon = lonFieldData.doubleValue(doc);
GeoPoint point = fieldData.value(doc);
if (topLeft.lon <= lon && bottomRight.lon >= lon
&& topLeft.lat >= lat && bottomRight.lat <= lat) {
if (topLeft.lon <= point.lon() && bottomRight.lon >= point.lon()
&& topLeft.lat >= point.lat() && bottomRight.lat <= point.lat()) {
return true;
}
}

View File

@ -25,11 +25,11 @@ import org.apache.lucene.search.FieldComparatorSource;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.xcontent.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPoint;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import java.io.IOException;
@ -78,9 +78,7 @@ public class GeoDistanceDataComparator extends FieldComparator {
protected final String fieldName;
protected final String indexLatFieldName;
protected final String indexLonFieldName;
protected final String indexFieldName;
protected final double lat;
@ -92,11 +90,7 @@ public class GeoDistanceDataComparator extends FieldComparator {
protected final FieldDataCache fieldDataCache;
protected final FieldDataType fieldDataType;
protected NumericFieldData latFieldData;
protected NumericFieldData lonFieldData;
protected GeoPointFieldData fieldData;
private final double[] values;
@ -113,23 +107,18 @@ public class GeoDistanceDataComparator extends FieldComparator {
this.geoDistance = geoDistance;
this.fieldDataCache = fieldDataCache;
FieldMapper mapper = mapperService.smartNameFieldMapper(fieldName + GeoPointFieldMapper.Names.LAT_SUFFIX);
FieldMapper mapper = mapperService.smartNameFieldMapper(fieldName);
if (mapper == null) {
throw new ElasticSearchIllegalArgumentException("No mapping found for field [" + fieldName + "] for geo distance sort");
}
this.indexLatFieldName = mapper.names().indexName();
mapper = mapperService.smartNameFieldMapper(fieldName + GeoPointFieldMapper.Names.LON_SUFFIX);
if (mapper == null) {
throw new ElasticSearchIllegalArgumentException("No mapping found for field [" + fieldName + "] for geo distance sort");
if (mapper.fieldDataType() != GeoPointFieldDataType.TYPE) {
throw new ElasticSearchIllegalArgumentException("field [" + fieldName + "] is not a geo_point field");
}
this.indexLonFieldName = mapper.names().indexName();
this.fieldDataType = mapper.fieldDataType();
this.indexFieldName = mapper.names().indexName();
}
@Override public void setNextReader(IndexReader reader, int docBase) throws IOException {
latFieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, indexLatFieldName);
lonFieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, indexLonFieldName);
fieldData = (GeoPointFieldData) fieldDataCache.cache(GeoPointFieldDataType.TYPE, reader, indexFieldName);
}
@Override public int compare(int slot1, int slot2) {
@ -146,11 +135,12 @@ public class GeoDistanceDataComparator extends FieldComparator {
@Override public int compareBottom(int doc) {
double distance;
if (!latFieldData.hasValue(doc) || !lonFieldData.hasValue(doc)) {
if (!fieldData.hasValue(doc)) {
// is this true? push this to the "end"
distance = Double.MAX_VALUE;
} else {
distance = geoDistance.calculate(lat, lon, latFieldData.doubleValue(doc), lonFieldData.doubleValue(doc), unit);
GeoPoint point = fieldData.value(doc);
distance = geoDistance.calculate(lat, lon, point.lat(), point.lon(), unit);
}
final double v2 = distance;
if (bottom > v2) {
@ -164,11 +154,12 @@ public class GeoDistanceDataComparator extends FieldComparator {
@Override public void copy(int slot, int doc) {
double distance;
if (!latFieldData.hasValue(doc) || !lonFieldData.hasValue(doc)) {
if (!fieldData.hasValue(doc)) {
// is this true? push this to the "end"
distance = Double.MAX_VALUE;
} else {
distance = geoDistance.calculate(lat, lon, latFieldData.doubleValue(doc), lonFieldData.doubleValue(doc), unit);
GeoPoint point = fieldData.value(doc);
distance = geoDistance.calculate(lat, lon, point.lat(), point.lon(), unit);
}
values[slot] = distance;
}

View File

@ -25,8 +25,9 @@ import org.apache.lucene.search.Filter;
import org.elasticsearch.common.lucene.docset.GetDocSet;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPoint;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import java.io.IOException;
@ -43,23 +44,16 @@ public class GeoDistanceFilter extends Filter {
private final GeoDistance geoDistance;
private final String latFieldName;
private final String lonFieldName;
private final FieldDataType fieldDataType;
private final String fieldName;
private final FieldDataCache fieldDataCache;
public GeoDistanceFilter(double lat, double lon, double distance, GeoDistance geoDistance, String latFieldName, String lonFieldName,
FieldDataType fieldDataType, FieldDataCache fieldDataCache) {
public GeoDistanceFilter(double lat, double lon, double distance, GeoDistance geoDistance, String fieldName, FieldDataCache fieldDataCache) {
this.lat = lat;
this.lon = lon;
this.distance = distance;
this.geoDistance = geoDistance;
this.latFieldName = latFieldName;
this.lonFieldName = lonFieldName;
this.fieldDataType = fieldDataType;
this.fieldName = fieldName;
this.fieldDataCache = fieldDataCache;
}
@ -79,39 +73,34 @@ public class GeoDistanceFilter extends Filter {
return geoDistance;
}
public String latFieldName() {
return latFieldName;
}
public String lonFieldName() {
return lonFieldName;
public String fieldName() {
return fieldName;
}
@Override public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
final NumericFieldData latFieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, latFieldName);
final NumericFieldData lonFieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, lonFieldName);
final GeoPointFieldData fieldData = (GeoPointFieldData) fieldDataCache.cache(GeoPointFieldDataType.TYPE, reader, fieldName);
return new GetDocSet(reader.maxDoc()) {
@Override public boolean isCacheable() {
return false;
}
@Override public boolean get(int doc) throws IOException {
if (!latFieldData.hasValue(doc) || !lonFieldData.hasValue(doc)) {
if (!fieldData.hasValue(doc)) {
return false;
}
if (latFieldData.multiValued()) {
double[] lats = latFieldData.doubleValues(doc);
double[] lons = latFieldData.doubleValues(doc);
for (int i = 0; i < lats.length; i++) {
double d = geoDistance.calculate(lat, lon, lats[i], lons[i], DistanceUnit.MILES);
if (fieldData.multiValued()) {
GeoPoint[] points = fieldData.values(doc);
for (GeoPoint point : points) {
double d = geoDistance.calculate(lat, lon, point.lat(), point.lon(), DistanceUnit.MILES);
if (d < distance) {
return true;
}
}
return false;
} else {
double d = geoDistance.calculate(lat, lon, latFieldData.doubleValue(doc), lonFieldData.doubleValue(doc), DistanceUnit.MILES);
GeoPoint point = fieldData.value(doc);
double d = geoDistance.calculate(lat, lon, point.lat(), point.lon(), DistanceUnit.MILES);
return d < distance;
}
}
@ -123,16 +112,13 @@ public class GeoDistanceFilter extends Filter {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
GeoDistanceFilter that = (GeoDistanceFilter) o;
GeoDistanceFilter filter = (GeoDistanceFilter) o;
if (Double.compare(that.distance, distance) != 0) return false;
if (Double.compare(that.lat, lat) != 0) return false;
if (Double.compare(that.lon, lon) != 0) return false;
if (geoDistance != that.geoDistance) return false;
if (latFieldName != null ? !latFieldName.equals(that.latFieldName) : that.latFieldName != null)
return false;
if (lonFieldName != null ? !lonFieldName.equals(that.lonFieldName) : that.lonFieldName != null)
return false;
if (Double.compare(filter.distance, distance) != 0) return false;
if (Double.compare(filter.lat, lat) != 0) return false;
if (Double.compare(filter.lon, lon) != 0) return false;
if (fieldName != null ? !fieldName.equals(filter.fieldName) : filter.fieldName != null) return false;
if (geoDistance != filter.geoDistance) return false;
return true;
}
@ -148,8 +134,7 @@ public class GeoDistanceFilter extends Filter {
temp = distance != +0.0d ? Double.doubleToLongBits(distance) : 0L;
result = 31 * result + (int) (temp ^ (temp >>> 32));
result = 31 * result + (geoDistance != null ? geoDistance.hashCode() : 0);
result = 31 * result + (latFieldName != null ? latFieldName.hashCode() : 0);
result = 31 * result + (lonFieldName != null ? lonFieldName.hashCode() : 0);
result = 31 * result + (fieldName != null ? fieldName.hashCode() : 0);
return result;
}
}

View File

@ -113,7 +113,9 @@ public class GeoHashUtils {
}
public static double[] decode(String geohash) {
return decode(geohash, new double[2]);
double[] ret = new double[2];
decode(geohash, ret);
return ret;
}
/**
@ -122,7 +124,7 @@ public class GeoHashUtils {
* @param geohash Geohash to deocde
* @return Array with the latitude at index 0, and longitude at index 1
*/
public static double[] decode(String geohash, double[] ret) {
public static void decode(String geohash, double[] ret) {
// double[] latInterval = {-90.0, 90.0};
// double[] lngInterval = {-180.0, 180.0};
double latInterval0 = -90.0;
@ -162,6 +164,6 @@ public class GeoHashUtils {
// longitude = (lngInterval[0] + lngInterval[1]) / 2D;
ret[1] = (lngInterval0 + lngInterval1) / 2D;
return ret;
// return ret;
}
}

View File

@ -24,8 +24,9 @@ import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.Filter;
import org.elasticsearch.common.lucene.docset.GetDocSet;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPoint;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import java.io.IOException;
@ -36,19 +37,13 @@ public class GeoPolygonFilter extends Filter {
private final Point[] points;
private final String latFieldName;
private final String lonFieldName;
private final FieldDataType fieldDataType;
private final String fieldName;
private final FieldDataCache fieldDataCache;
public GeoPolygonFilter(Point[] points, String latFieldName, String lonFieldName, FieldDataType fieldDataType, FieldDataCache fieldDataCache) {
public GeoPolygonFilter(Point[] points, String fieldName, FieldDataCache fieldDataCache) {
this.points = points;
this.latFieldName = latFieldName;
this.lonFieldName = lonFieldName;
this.fieldDataType = fieldDataType;
this.fieldName = fieldName;
this.fieldDataCache = fieldDataCache;
}
@ -56,36 +51,29 @@ public class GeoPolygonFilter extends Filter {
return points;
}
public String latFieldName() {
return latFieldName;
}
public String lonFieldName() {
return lonFieldName;
public String fieldName() {
return this.fieldName;
}
@Override public DocIdSet getDocIdSet(IndexReader reader) throws IOException {
final NumericFieldData latFieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, latFieldName);
final NumericFieldData lonFieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, lonFieldName);
final GeoPointFieldData fieldData = (GeoPointFieldData) fieldDataCache.cache(GeoPointFieldDataType.TYPE, reader, fieldName);
return new GetDocSet(reader.maxDoc()) {
@Override public boolean get(int doc) throws IOException {
if (!latFieldData.hasValue(doc) || !lonFieldData.hasValue(doc)) {
if (!fieldData.hasValue(doc)) {
return false;
}
if (latFieldData.multiValued()) {
double[] lats = latFieldData.doubleValues(doc);
double[] lons = latFieldData.doubleValues(doc);
for (int i = 0; i < lats.length; i++) {
if (pointInPolygon(points, lats[i], lons[i])) {
if (fieldData.multiValued()) {
GeoPoint[] docPoints = fieldData.values(doc);
for (GeoPoint docPoint : docPoints) {
if (pointInPolygon(points, docPoint.lat(), docPoint.lon())) {
return true;
}
}
} else {
double lat = latFieldData.doubleValue(doc);
double lon = lonFieldData.doubleValue(doc);
return pointInPolygon(points, lat, lon);
GeoPoint point = fieldData.value(doc);
return pointInPolygon(points, point.lat(), point.lon());
}
return false;
}

View File

@ -20,9 +20,7 @@
package org.elasticsearch.index.field.data;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldComparator;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import java.io.IOException;
@ -89,8 +87,6 @@ public abstract class FieldData<Doc extends DocFieldData> {
*/
public abstract FieldDataType type();
public abstract FieldComparator newComparator(FieldDataCache fieldDataCache, int numHits, String field, int sortPos, boolean reversed);
public static FieldData load(FieldDataType type, IndexReader reader, String fieldName) throws IOException {
return type.load(reader, fieldName);
}

View File

@ -21,9 +21,7 @@ package org.elasticsearch.index.field.data.doubles;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FieldComparator;
import org.elasticsearch.common.trove.TDoubleArrayList;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.field.data.support.FieldDataLoader;
@ -44,10 +42,6 @@ public abstract class DoubleFieldData extends NumericFieldData<DoubleDocFieldDat
this.values = values;
}
@Override public FieldComparator newComparator(FieldDataCache fieldDataCache, int numHits, String field, int sortPos, boolean reversed) {
return new DoubleFieldDataComparator(numHits, field, fieldDataCache);
}
abstract public double value(int docId);
abstract public double[] values(int docId);

View File

@ -21,9 +21,7 @@ package org.elasticsearch.index.field.data.floats;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FieldComparator;
import org.elasticsearch.common.trove.TFloatArrayList;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.field.data.support.FieldDataLoader;
@ -44,10 +42,6 @@ public abstract class FloatFieldData extends NumericFieldData<FloatDocFieldData>
this.values = values;
}
@Override public FieldComparator newComparator(FieldDataCache fieldDataCache, int numHits, String field, int sortPos, boolean reversed) {
return new FloatFieldDataComparator(numHits, field, fieldDataCache);
}
abstract public float value(int docId);
abstract public float[] values(int docId);

View File

@ -21,9 +21,7 @@ package org.elasticsearch.index.field.data.ints;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FieldComparator;
import org.elasticsearch.common.trove.TIntArrayList;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.field.data.support.FieldDataLoader;
@ -44,10 +42,6 @@ public abstract class IntFieldData extends NumericFieldData<IntDocFieldData> {
this.values = values;
}
@Override public FieldComparator newComparator(FieldDataCache fieldDataCache, int numHits, String field, int sortPos, boolean reversed) {
return new IntFieldDataComparator(numHits, field, fieldDataCache);
}
abstract public int value(int docId);
abstract public int[] values(int docId);

View File

@ -21,11 +21,9 @@ package org.elasticsearch.index.field.data.longs;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FieldComparator;
import org.elasticsearch.common.joda.time.MutableDateTime;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.trove.TLongArrayList;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.field.data.support.FieldDataLoader;
@ -53,10 +51,6 @@ public abstract class LongFieldData extends NumericFieldData<LongDocFieldData> {
this.values = values;
}
@Override public FieldComparator newComparator(FieldDataCache fieldDataCache, int numHits, String field, int sortPos, boolean reversed) {
return new LongFieldDataComparator(numHits, field, fieldDataCache);
}
abstract public long value(int docId);
abstract public long[] values(int docId);

View File

@ -21,9 +21,7 @@ package org.elasticsearch.index.field.data.shorts;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldCache;
import org.apache.lucene.search.FieldComparator;
import org.elasticsearch.common.trove.TShortArrayList;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.field.data.support.FieldDataLoader;
@ -44,10 +42,6 @@ public abstract class ShortFieldData extends NumericFieldData<ShortDocFieldData>
this.values = values;
}
@Override public FieldComparator newComparator(FieldDataCache fieldDataCache, int numHits, String field, int sortPos, boolean reversed) {
return new ShortFieldDataComparator(numHits, field, fieldDataCache);
}
abstract public short value(int docId);
abstract public short[] values(int docId);

View File

@ -19,10 +19,8 @@
package org.elasticsearch.index.field.data.strings;
import org.apache.lucene.search.FieldComparator;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
/**
* @author kimchy (shay.banon)
@ -49,10 +47,6 @@ public class MultiValueStringFieldData extends StringFieldData {
this.order = order;
}
@Override public FieldComparator newComparator(FieldDataCache fieldDataCache, int numHits, String field, int sortPos, boolean reversed) {
return new StringValFieldDataComparator(numHits, field, fieldDataCache);
}
@Override public boolean multiValued() {
return true;
}

View File

@ -19,10 +19,8 @@
package org.elasticsearch.index.field.data.strings;
import org.apache.lucene.search.FieldComparator;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
/**
* @author kimchy (shay.banon)
@ -43,10 +41,6 @@ public class SingleValueStringFieldData extends StringFieldData {
this.order = order;
}
@Override public FieldComparator newComparator(FieldDataCache fieldDataCache, int numHits, String field, int sortPos, boolean reversed) {
return new StringOrdValFieldDataComparator(numHits, field, sortPos, reversed, fieldDataCache);
}
int[] order() {
return order;
}

View File

@ -35,6 +35,7 @@ import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldMapper;
import org.elasticsearch.index.settings.IndexSettings;
import javax.annotation.Nullable;

View File

@ -0,0 +1,61 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.geo;
import org.elasticsearch.common.lucene.geo.GeoHashUtils;
/**
* @author kimchy (shay.banon)
*/
public class GeoPoint {
private final double lat;
private final double lon;
public GeoPoint(double lat, double lon) {
this.lat = lat;
this.lon = lon;
}
public final double lat() {
return this.lat;
}
public final double getLat() {
return this.lat;
}
public final double lon() {
return this.lon;
}
public final double getLon() {
return this.lon;
}
public final String geohash() {
return GeoHashUtils.encode(lat, lon);
}
public final String getGeohash() {
return GeoHashUtils.encode(lat, lon);
}
}

View File

@ -0,0 +1,40 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.geo;
import org.elasticsearch.index.field.data.DocFieldData;
/**
* @author kimchy (shay.banon)
*/
public class GeoPointDocFieldData extends DocFieldData<GeoPointFieldData> {
public GeoPointDocFieldData(GeoPointFieldData fieldData) {
super(fieldData);
}
public GeoPoint getValue() {
return fieldData.value(docId);
}
public GeoPoint[] getValues() {
return fieldData.values(docId);
}
}

View File

@ -0,0 +1,110 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.geo;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.lucene.geo.GeoHashUtils;
import org.elasticsearch.index.field.data.FieldData;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.support.FieldDataLoader;
import java.io.IOException;
import java.util.ArrayList;
/**
* @author kimchy (shay.banon)
*/
public abstract class GeoPointFieldData extends FieldData<GeoPointDocFieldData> {
public static final GeoPoint[] EMPTY_ARRAY = new GeoPoint[0];
protected final GeoPoint[] values;
protected GeoPointFieldData(String fieldName, GeoPoint[] values) {
super(fieldName);
this.values = values;
}
abstract public GeoPoint value(int docId);
abstract public GeoPoint[] values(int docId);
@Override public GeoPointDocFieldData docFieldData(int docId) {
return super.docFieldData(docId);
}
@Override public String stringValue(int docId) {
return value(docId).geohash();
}
@Override protected GeoPointDocFieldData createFieldData() {
return new GeoPointDocFieldData(this);
}
@Override public FieldDataType type() {
return GeoPointFieldDataType.TYPE;
}
@Override public void forEachValue(StringValueProc proc) {
for (int i = 1; i < values.length; i++) {
proc.onValue(values[i].geohash());
}
}
public void forEachValue(ValueProc proc) {
for (int i = 1; i < values.length; i++) {
proc.onValue(values[i]);
}
}
public static interface ValueProc {
void onValue(GeoPoint value);
}
public static GeoPointFieldData load(IndexReader reader, String field) throws IOException {
return FieldDataLoader.load(reader, field, new StringTypeLoader());
}
static class StringTypeLoader extends FieldDataLoader.FreqsTypeLoader<GeoPointFieldData> {
private final ArrayList<GeoPoint> terms = new ArrayList<GeoPoint>();
private final double[] latlon = new double[2];
StringTypeLoader() {
super();
// the first one indicates null value
terms.add(null);
}
@Override public void collectTerm(String term) {
GeoHashUtils.decode(term, latlon);
terms.add(new GeoPoint(latlon[0], latlon[1]));
}
@Override public GeoPointFieldData buildSingleValue(String field, int[] order) {
return new SingleValueGeoPointFieldData(field, order, terms.toArray(new GeoPoint[terms.size()]));
}
@Override public GeoPointFieldData buildMultiValue(String field, int[][] order) {
return new MultiValueGeoPointFieldData(field, order, terms.toArray(new GeoPoint[terms.size()]));
}
}
}

View File

@ -0,0 +1,53 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.geo;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.FieldComparatorSource;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.strings.StringOrdValFieldDataComparator;
import java.io.IOException;
/**
* @author kimchy (shay.banon)
*/
public class GeoPointFieldDataType implements FieldDataType<GeoPointFieldData> {
public static final GeoPointFieldDataType TYPE = new GeoPointFieldDataType();
@Override public Class<GeoPointFieldData> fieldDataClass() {
return GeoPointFieldData.class;
}
@Override public FieldComparatorSource newFieldComparatorSource(final FieldDataCache cache) {
return new FieldComparatorSource() {
@Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
return new StringOrdValFieldDataComparator(numHits, fieldname, sortPos, reversed, cache);
}
};
}
@Override public GeoPointFieldData load(IndexReader reader, String fieldName) throws IOException {
return GeoPointFieldData.load(reader, fieldName);
}
}

View File

@ -17,18 +17,20 @@
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent;
package org.elasticsearch.index.mapper.xcontent.geo;
import org.apache.lucene.document.Field;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.geo.GeoHashUtils;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.mapper.FieldMapperListener;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.xcontent.*;
import java.io.IOException;
import java.util.Map;
@ -70,15 +72,11 @@ public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperPars
private ContentPath.Type pathType = Defaults.PATH_TYPE;
private boolean enableLatLon = true;
private boolean enableGeohash = false;
private String resolution = "64";
private boolean enableLatLon = false;
private Integer precisionStep;
private int geohashPrecision = GeoHashUtils.PRECISION;
private int precision = GeoHashUtils.PRECISION;
private Field.Store store = Defaults.STORE;
@ -97,23 +95,13 @@ public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperPars
return this;
}
public Builder enableGeohash(boolean enableGeohash) {
this.enableGeohash = enableGeohash;
return this;
}
public Builder resolution(String resolution) {
this.resolution = resolution;
return this;
}
public Builder precisionStep(int precisionStep) {
this.precisionStep = precisionStep;
return this;
}
public Builder geohashPrecision(int geohashPrecision) {
this.geohashPrecision = geohashPrecision;
public Builder precision(int precision) {
this.precision = precision;
return this;
}
@ -126,23 +114,16 @@ public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperPars
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
GeoHashFieldMapper geohashMapper = new GeoHashFieldMapper.Builder(name).includeInAll(false).build(context);
NumberFieldMapper latMapper = null;
NumberFieldMapper lonMapper = null;
StringFieldMapper geohashMapper = null;
context.path().add(name);
if (enableLatLon) {
NumberFieldMapper.Builder latMapperBuilder;
NumberFieldMapper.Builder lonMapperBuilder;
if ("32".equals(resolution)) {
latMapperBuilder = floatField(Names.LAT).includeInAll(false);
lonMapperBuilder = floatField(Names.LON).includeInAll(false);
} else if ("64".equals(resolution)) {
latMapperBuilder = doubleField(Names.LAT).includeInAll(false);
lonMapperBuilder = doubleField(Names.LON).includeInAll(false);
} else {
throw new ElasticSearchIllegalArgumentException("Can't handle geo_point resolution [" + resolution + "]");
}
NumberFieldMapper.Builder latMapperBuilder = doubleField(Names.LAT).includeInAll(false);
NumberFieldMapper.Builder lonMapperBuilder = doubleField(Names.LON).includeInAll(false);
if (precisionStep != null) {
latMapperBuilder.precisionStep(precisionStep);
lonMapperBuilder.precisionStep(precisionStep);
@ -150,14 +131,11 @@ public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperPars
latMapper = (NumberFieldMapper) latMapperBuilder.includeInAll(false).store(store).build(context);
lonMapper = (NumberFieldMapper) lonMapperBuilder.includeInAll(false).store(store).build(context);
}
if (enableGeohash) {
geohashMapper = stringField(Names.GEOHASH).includeInAll(false).build(context);
}
context.path().remove();
context.path().pathType(origPathType);
return new GeoPointFieldMapper(name, pathType, enableLatLon, enableGeohash, resolution, precisionStep, geohashPrecision, latMapper, lonMapper, geohashMapper);
return new GeoPointFieldMapper(name, pathType, enableLatLon, precisionStep, precision, latMapper, lonMapper, geohashMapper);
}
}
@ -174,14 +152,10 @@ public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperPars
builder.store(parseStore(name, fieldNode.toString()));
} else if (fieldName.equals("lat_lon")) {
builder.enableLatLon(XContentMapValues.nodeBooleanValue(fieldNode));
} else if (fieldName.equals("geohash")) {
builder.enableGeohash(XContentMapValues.nodeBooleanValue(fieldNode));
} else if (fieldName.equals("resolution")) {
builder.resolution(Integer.toString(XContentMapValues.nodeIntegerValue(fieldNode)));
} else if (fieldName.equals("precisionStep")) {
} else if (fieldName.equals("precision_step")) {
builder.precisionStep(XContentMapValues.nodeIntegerValue(fieldNode));
} else if (fieldName.equals("geohash_precision") || fieldName.equals("geohashPrecision")) {
builder.geohashPrecision(XContentMapValues.nodeIntegerValue(fieldNode));
} else if (fieldName.equals("precision")) {
builder.precision(XContentMapValues.nodeIntegerValue(fieldNode));
}
}
return builder;
@ -194,13 +168,9 @@ public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperPars
private final boolean enableLatLon;
private final boolean enableGeohash;
private final String resolution;
private final Integer precisionStep;
private final int geohashPrecision;
private final int precision;
private final NumberFieldMapper latMapper;
@ -208,15 +178,13 @@ public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperPars
private final StringFieldMapper geohashMapper;
public GeoPointFieldMapper(String name, ContentPath.Type pathType, boolean enableLatLon, boolean enableGeohash, String resolution, Integer precisionStep, int geohashPrecision,
public GeoPointFieldMapper(String name, ContentPath.Type pathType, boolean enableLatLon, Integer precisionStep, int precision,
NumberFieldMapper latMapper, NumberFieldMapper lonMapper, StringFieldMapper geohashMapper) {
this.name = name;
this.pathType = pathType;
this.enableLatLon = enableLatLon;
this.enableGeohash = enableGeohash;
this.resolution = resolution;
this.precisionStep = precisionStep;
this.geohashPrecision = geohashPrecision;
this.precision = precision;
this.latMapper = latMapper;
this.lonMapper = lonMapper;
@ -318,19 +286,19 @@ public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperPars
}
private void parseLatLon(ParseContext context, Double lat, Double lon) throws IOException {
context.externalValue(GeoHashUtils.encode(lat, lon, precision));
geohashMapper.parse(context);
if (enableLatLon) {
context.externalValue(lat);
latMapper.parse(context);
context.externalValue(lon);
lonMapper.parse(context);
}
if (enableGeohash) {
context.externalValue(GeoHashUtils.encode(lat, lon, geohashPrecision));
geohashMapper.parse(context);
}
}
private void parseGeohash(ParseContext context, String geohash) throws IOException {
context.externalValue(geohash);
geohashMapper.parse(context);
if (enableLatLon) {
double[] values = GeoHashUtils.decode(geohash);
context.externalValue(values[0]);
@ -338,10 +306,6 @@ public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperPars
context.externalValue(values[1]);
lonMapper.parse(context);
}
if (enableGeohash) {
context.externalValue(geohash);
geohashMapper.parse(context);
}
}
@Override public void merge(XContentMapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
@ -349,13 +313,11 @@ public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperPars
}
@Override public void traverse(FieldMapperListener fieldMapperListener) {
geohashMapper.traverse(fieldMapperListener);
if (enableLatLon) {
latMapper.traverse(fieldMapperListener);
lonMapper.traverse(fieldMapperListener);
}
if (enableGeohash) {
geohashMapper.traverse(fieldMapperListener);
}
}
@Override public void toXContent(XContentBuilder builder, Params params) throws IOException {
@ -363,18 +325,55 @@ public class GeoPointFieldMapper implements XContentMapper, ArrayValueMapperPars
builder.field("type", CONTENT_TYPE);
builder.field("path", pathType.name().toLowerCase());
builder.field("lat_lon", enableLatLon);
builder.field("geohash", enableGeohash);
builder.field("resolution", resolution);
if (latMapper != null) {
builder.field("store", latMapper.store().name().toLowerCase());
} else if (geohashMapper != null) {
builder.field("store", geohashMapper.store().name().toLowerCase());
}
builder.field("geohash_precision", geohashPrecision);
builder.field("precision", precision);
if (precisionStep != null) {
builder.field("precision_step", precisionStep);
}
builder.endObject();
}
public static class GeoHashFieldMapper extends StringFieldMapper {
public static class Builder extends AbstractFieldMapper.OpenBuilder<Builder, StringFieldMapper> {
protected String nullValue = Defaults.NULL_VALUE;
public Builder(String name) {
super(name);
builder = this;
}
public Builder nullValue(String nullValue) {
this.nullValue = nullValue;
return this;
}
@Override public Builder includeInAll(Boolean includeInAll) {
this.includeInAll = includeInAll;
return this;
}
@Override public GeoHashFieldMapper build(BuilderContext context) {
GeoHashFieldMapper fieldMapper = new GeoHashFieldMapper(buildNames(context),
index, store, termVector, boost, omitNorms, omitTermFreqAndPositions, nullValue,
indexAnalyzer, searchAnalyzer);
fieldMapper.includeInAll(includeInAll);
return fieldMapper;
}
}
public GeoHashFieldMapper(Names names, Field.Index index, Field.Store store, Field.TermVector termVector, float boost, boolean omitNorms, boolean omitTermFreqAndPositions, String nullValue, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer) {
super(names, index, store, termVector, boost, omitNorms, omitTermFreqAndPositions, nullValue, indexAnalyzer, searchAnalyzer);
}
@Override public FieldDataType fieldDataType() {
return GeoPointFieldDataType.TYPE;
}
}
}

View File

@ -0,0 +1,91 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.geo;
import org.elasticsearch.common.thread.ThreadLocals;
/**
* @author kimchy (shay.banon)
*/
public class MultiValueGeoPointFieldData extends GeoPointFieldData {
private static final int VALUE_CACHE_SIZE = 100;
private static ThreadLocal<ThreadLocals.CleanableValue<GeoPoint[][]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<GeoPoint[][]>>() {
@Override protected ThreadLocals.CleanableValue<GeoPoint[][]> initialValue() {
GeoPoint[][] value = new GeoPoint[VALUE_CACHE_SIZE][];
for (int i = 0; i < value.length; i++) {
value[i] = new GeoPoint[i];
}
return new ThreadLocals.CleanableValue<GeoPoint[][]>(value);
}
};
// order with value 0 indicates no value
private final int[][] order;
public MultiValueGeoPointFieldData(String fieldName, int[][] order, GeoPoint[] values) {
super(fieldName, values);
this.order = order;
}
@Override public boolean multiValued() {
return true;
}
@Override public boolean hasValue(int docId) {
return order[docId] != null;
}
@Override public void forEachValueInDoc(int docId, StringValueInDocProc proc) {
int[] docOrders = order[docId];
if (docOrders == null) {
return;
}
for (int docOrder : docOrders) {
proc.onValue(docId, values[docOrder].geohash());
}
}
@Override public GeoPoint value(int docId) {
int[] docOrders = order[docId];
if (docOrders == null) {
return null;
}
return values[docOrders[0]];
}
@Override public GeoPoint[] values(int docId) {
int[] docOrders = order[docId];
if (docOrders == null) {
return EMPTY_ARRAY;
}
GeoPoint[] points;
if (docOrders.length < VALUE_CACHE_SIZE) {
points = valuesCache.get().get()[docOrders.length];
} else {
points = new GeoPoint[docOrders.length];
}
for (int i = 0; i < docOrders.length; i++) {
points[i] = values[docOrders[i]];
}
return points;
}
}

View File

@ -0,0 +1,80 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.geo;
import org.elasticsearch.common.thread.ThreadLocals;
/**
* @author kimchy (shay.banon)
*/
public class SingleValueGeoPointFieldData extends GeoPointFieldData {
private static ThreadLocal<ThreadLocals.CleanableValue<GeoPoint[]>> valuesCache = new ThreadLocal<ThreadLocals.CleanableValue<GeoPoint[]>>() {
@Override protected ThreadLocals.CleanableValue<GeoPoint[]> initialValue() {
return new ThreadLocals.CleanableValue<GeoPoint[]>(new GeoPoint[1]);
}
};
// order with value 0 indicates no value
private final int[] order;
public SingleValueGeoPointFieldData(String fieldName, int[] order, GeoPoint[] values) {
super(fieldName, values);
this.order = order;
}
int[] order() {
return order;
}
GeoPoint[] values() {
return this.values;
}
@Override public boolean multiValued() {
return false;
}
@Override public boolean hasValue(int docId) {
return order[docId] != 0;
}
@Override public void forEachValueInDoc(int docId, StringValueInDocProc proc) {
int loc = order[docId];
if (loc == 0) {
return;
}
proc.onValue(docId, values[loc].geohash());
}
@Override public GeoPoint value(int docId) {
return values[order[docId]];
}
@Override public GeoPoint[] values(int docId) {
int loc = order[docId];
if (loc == 0) {
return EMPTY_ARRAY;
}
GeoPoint[] ret = valuesCache.get().get();
ret[0] = values[loc];
return ret;
}
}

View File

@ -29,7 +29,8 @@ import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.xcontent.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldMapper;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.settings.IndexSettings;
@ -55,8 +56,7 @@ public class GeoBoundingBoxFilterParser extends AbstractIndexComponent implement
@Override public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
String latFieldName = null;
String lonFieldName = null;
String fieldName = null;
GeoBoundingBoxFilter.Point topLeft = new GeoBoundingBoxFilter.Point();
GeoBoundingBoxFilter.Point bottomRight = new GeoBoundingBoxFilter.Point();
@ -67,8 +67,7 @@ public class GeoBoundingBoxFilterParser extends AbstractIndexComponent implement
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
latFieldName = currentFieldName + GeoPointFieldMapper.Names.LAT_SUFFIX;
lonFieldName = currentFieldName + GeoPointFieldMapper.Names.LON_SUFFIX;
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
@ -117,8 +116,7 @@ public class GeoBoundingBoxFilterParser extends AbstractIndexComponent implement
}
} else if (token.isValue()) {
if ("field".equals(currentFieldName)) {
latFieldName = parser.text() + GeoPointFieldMapper.Names.LAT_SUFFIX;
lonFieldName = parser.text() + GeoPointFieldMapper.Names.LON_SUFFIX;
fieldName = parser.text();
} else {
GeoBoundingBoxFilter.Point point = null;
if ("top_left".equals(currentFieldName) || "topLeft".equals(currentFieldName)) {
@ -151,21 +149,18 @@ public class GeoBoundingBoxFilterParser extends AbstractIndexComponent implement
MapperService mapperService = parseContext.mapperService();
FieldMapper mapper = mapperService.smartNameFieldMapper(latFieldName);
FieldMapper mapper = mapperService.smartNameFieldMapper(fieldName);
if (mapper == null) {
throw new QueryParsingException(index, "failed to find lat field [" + latFieldName + "]");
throw new QueryParsingException(index, "failed to find geo_point field [" + fieldName + "]");
}
latFieldName = mapper.names().indexName();
mapper = mapperService.smartNameFieldMapper(lonFieldName);
if (mapper == null) {
throw new QueryParsingException(index, "failed to find lon field [" + lonFieldName + "]");
if (mapper.fieldDataType() != GeoPointFieldDataType.TYPE) {
throw new QueryParsingException(index, "field [" + fieldName + "] is not a geo_point field");
}
lonFieldName = mapper.names().indexName();
fieldName = mapper.names().indexName();
Filter filter = new GeoBoundingBoxFilter(topLeft, bottomRight, latFieldName, lonFieldName, mapper.fieldDataType(), parseContext.indexCache().fieldData());
filter = wrapSmartNameFilter(filter, parseContext.smartFieldMappers(latFieldName), parseContext);
Filter filter = new GeoBoundingBoxFilter(topLeft, bottomRight, fieldName, parseContext.indexCache().fieldData());
filter = wrapSmartNameFilter(filter, parseContext.smartFieldMappers(fieldName), parseContext);
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
}

View File

@ -31,7 +31,8 @@ import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.xcontent.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldMapper;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.settings.IndexSettings;
@ -70,8 +71,7 @@ public class GeoDistanceFilterParser extends AbstractIndexComponent implements X
String currentFieldName = null;
double lat = 0;
double lon = 0;
String latFieldName = null;
String lonFieldName = null;
String fieldName = null;
double distance = 0;
DistanceUnit unit = null;
GeoDistance geoDistance = GeoDistance.ARC;
@ -86,13 +86,11 @@ public class GeoDistanceFilterParser extends AbstractIndexComponent implements X
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
}
latFieldName = currentFieldName + "." + GeoPointFieldMapper.Names.LAT;
lonFieldName = currentFieldName + "." + GeoPointFieldMapper.Names.LON;
fieldName = currentFieldName;
} else if (token == XContentParser.Token.START_OBJECT) {
// the json in the format of -> field : { lat : 30, lon : 12 }
String currentName = parser.currentName();
latFieldName = currentFieldName + GeoPointFieldMapper.Names.LAT_SUFFIX;
lonFieldName = currentFieldName + GeoPointFieldMapper.Names.LON_SUFFIX;
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
currentName = parser.currentName();
@ -121,16 +119,15 @@ public class GeoDistanceFilterParser extends AbstractIndexComponent implements X
geoDistance = GeoDistance.fromString(parser.text());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LAT_SUFFIX)) {
lat = parser.doubleValue();
latFieldName = currentFieldName;
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LAT_SUFFIX.length());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.LON_SUFFIX)) {
lon = parser.doubleValue();
lonFieldName = currentFieldName;
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.LON_SUFFIX.length());
} else if (currentFieldName.endsWith(GeoPointFieldMapper.Names.GEOHASH_SUFFIX)) {
double[] values = GeoHashUtils.decode(parser.text());
lat = values[0];
lon = values[1];
latFieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.GEOHASH_SUFFIX.length()) + GeoPointFieldMapper.Names.LAT_SUFFIX;
lonFieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.GEOHASH_SUFFIX.length()) + GeoPointFieldMapper.Names.LON_SUFFIX;
fieldName = currentFieldName.substring(0, currentFieldName.length() - GeoPointFieldMapper.Names.GEOHASH_SUFFIX.length());
} else if ("_name".equals(currentFieldName)) {
filterName = parser.text();
} else {
@ -145,9 +142,7 @@ public class GeoDistanceFilterParser extends AbstractIndexComponent implements X
lat = values[0];
lon = values[1];
}
latFieldName = currentFieldName + GeoPointFieldMapper.Names.LAT_SUFFIX;
lonFieldName = currentFieldName + GeoPointFieldMapper.Names.LON_SUFFIX;
fieldName = currentFieldName;
}
}
}
@ -157,20 +152,17 @@ public class GeoDistanceFilterParser extends AbstractIndexComponent implements X
}
MapperService mapperService = parseContext.mapperService();
FieldMapper mapper = mapperService.smartNameFieldMapper(latFieldName);
FieldMapper mapper = mapperService.smartNameFieldMapper(fieldName);
if (mapper == null) {
throw new QueryParsingException(index, "failed to find lat field [" + latFieldName + "]");
throw new QueryParsingException(index, "failed to find geo_point field [" + fieldName + "]");
}
latFieldName = mapper.names().indexName();
mapper = mapperService.smartNameFieldMapper(lonFieldName);
if (mapper == null) {
throw new QueryParsingException(index, "failed to find lon field [" + lonFieldName + "]");
if (mapper.fieldDataType() != GeoPointFieldDataType.TYPE) {
throw new QueryParsingException(index, "field [" + fieldName + "] is not a geo_point field");
}
lonFieldName = mapper.names().indexName();
fieldName = mapper.names().indexName();
Filter filter = new GeoDistanceFilter(lat, lon, distance, geoDistance, latFieldName, lonFieldName, mapper.fieldDataType(), parseContext.indexCache().fieldData());
filter = wrapSmartNameFilter(filter, parseContext.smartFieldMappers(latFieldName), parseContext);
Filter filter = new GeoDistanceFilter(lat, lon, distance, geoDistance, fieldName, parseContext.indexCache().fieldData());
filter = wrapSmartNameFilter(filter, parseContext.smartFieldMappers(fieldName), parseContext);
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
}

View File

@ -30,7 +30,8 @@ import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.xcontent.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldMapper;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.settings.IndexSettings;
@ -68,8 +69,7 @@ public class GeoPolygonFilterParser extends AbstractIndexComponent implements XC
@Override public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
XContentParser parser = parseContext.parser();
String latFieldName = null;
String lonFieldName = null;
String fieldName = null;
List<GeoPolygonFilter.Point> points = Lists.newArrayList();
@ -81,8 +81,7 @@ public class GeoPolygonFilterParser extends AbstractIndexComponent implements XC
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.START_OBJECT) {
latFieldName = currentFieldName + GeoPointFieldMapper.Names.LAT_SUFFIX;
lonFieldName = currentFieldName + GeoPointFieldMapper.Names.LON_SUFFIX;
fieldName = currentFieldName;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {
@ -150,20 +149,17 @@ public class GeoPolygonFilterParser extends AbstractIndexComponent implements XC
}
MapperService mapperService = parseContext.mapperService();
FieldMapper mapper = mapperService.smartNameFieldMapper(latFieldName);
FieldMapper mapper = mapperService.smartNameFieldMapper(fieldName);
if (mapper == null) {
throw new QueryParsingException(index, "failed to find lat field [" + latFieldName + "]");
throw new QueryParsingException(index, "failed to find geo_point field [" + fieldName + "]");
}
latFieldName = mapper.names().indexName();
mapper = mapperService.smartNameFieldMapper(lonFieldName);
if (mapper == null) {
throw new QueryParsingException(index, "failed to find lon field [" + lonFieldName + "]");
if (mapper.fieldDataType() != GeoPointFieldDataType.TYPE) {
throw new QueryParsingException(index, "field [" + fieldName + "] is not a geo_point field");
}
lonFieldName = mapper.names().indexName();
fieldName = mapper.names().indexName();
Filter filter = new GeoPolygonFilter(points.toArray(new GeoPolygonFilter.Point[points.size()]), latFieldName, lonFieldName, mapper.fieldDataType(), parseContext.indexCache().fieldData());
filter = wrapSmartNameFilter(filter, parseContext.smartFieldMappers(latFieldName), parseContext);
Filter filter = new GeoPolygonFilter(points.toArray(new GeoPolygonFilter.Point[points.size()]), fieldName, parseContext.indexCache().fieldData());
filter = wrapSmartNameFilter(filter, parseContext.smartFieldMappers(fieldName), parseContext);
if (filterName != null) {
parseContext.addNamedFilter(filterName, filter);
}

View File

@ -23,11 +23,10 @@ import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.lucene.geo.GeoDistance;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.xcontent.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPoint;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldData;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldDataType;
import org.elasticsearch.search.facets.Facet;
import org.elasticsearch.search.facets.FacetPhaseExecutionException;
import org.elasticsearch.search.facets.support.AbstractFacetCollector;
@ -42,9 +41,7 @@ public class GeoDistanceFacetCollector extends AbstractFacetCollector {
protected final String fieldName;
protected final String indexLatFieldName;
protected final String indexLonFieldName;
protected final String indexFieldName;
protected final double lat;
@ -56,11 +53,7 @@ public class GeoDistanceFacetCollector extends AbstractFacetCollector {
protected final FieldDataCache fieldDataCache;
protected final FieldDataType fieldDataType;
protected NumericFieldData latFieldData;
protected NumericFieldData lonFieldData;
protected GeoPointFieldData fieldData;
protected final GeoDistanceFacet.Entry[] entries;
@ -75,41 +68,35 @@ public class GeoDistanceFacetCollector extends AbstractFacetCollector {
this.geoDistance = geoDistance;
this.fieldDataCache = context.fieldDataCache();
MapperService.SmartNameFieldMappers smartMappers = context.mapperService().smartName(fieldName + GeoPointFieldMapper.Names.LAT_SUFFIX);
MapperService.SmartNameFieldMappers smartMappers = context.mapperService().smartName(fieldName);
if (smartMappers == null || !smartMappers.hasMapper()) {
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + fieldName + "]");
}
if (smartMappers.mapper().fieldDataType() != GeoPointFieldDataType.TYPE) {
throw new FacetPhaseExecutionException(facetName, "field [" + fieldName + "] is not a geo_point field");
}
// add type filter if there is exact doc mapper associated with it
if (smartMappers.hasDocMapper()) {
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
}
this.indexLatFieldName = smartMappers.mapper().names().indexName();
FieldMapper mapper = context.mapperService().smartNameFieldMapper(fieldName + GeoPointFieldMapper.Names.LON_SUFFIX);
if (mapper == null) {
throw new FacetPhaseExecutionException(facetName, "No mapping found for field [" + fieldName + "]");
}
this.indexLonFieldName = mapper.names().indexName();
this.fieldDataType = mapper.fieldDataType();
this.indexFieldName = smartMappers.mapper().names().indexName();
}
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
latFieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, indexLatFieldName);
lonFieldData = (NumericFieldData) fieldDataCache.cache(fieldDataType, reader, indexLonFieldName);
fieldData = (GeoPointFieldData) fieldDataCache.cache(GeoPointFieldDataType.TYPE, reader, indexFieldName);
}
@Override protected void doCollect(int doc) throws IOException {
if (!latFieldData.hasValue(doc) || !lonFieldData.hasValue(doc)) {
if (!fieldData.hasValue(doc)) {
return;
}
if (latFieldData.multiValued()) {
double[] lats = latFieldData.doubleValues(doc);
double[] lons = latFieldData.doubleValues(doc);
for (int i = 0; i < lats.length; i++) {
double distance = geoDistance.calculate(lat, lon, lats[i], lons[i], unit);
if (fieldData.multiValued()) {
GeoPoint[] points = fieldData.values(doc);
for (GeoPoint point : points) {
double distance = geoDistance.calculate(lat, lon, point.lat(), point.lon(), unit);
for (GeoDistanceFacet.Entry entry : entries) {
if (distance >= entry.getFrom() && distance < entry.getTo()) {
entry.count++;
@ -118,7 +105,8 @@ public class GeoDistanceFacetCollector extends AbstractFacetCollector {
}
}
} else {
double distance = geoDistance.calculate(lat, lon, latFieldData.doubleValue(doc), lonFieldData.doubleValue(doc), unit);
GeoPoint point = fieldData.value(doc);
double distance = geoDistance.calculate(lat, lon, point.lat(), point.lon(), unit);
for (GeoDistanceFacet.Entry entry : entries) {
if (distance >= entry.getFrom() && distance < entry.getTo()) {
entry.count++;

View File

@ -25,7 +25,7 @@ import org.elasticsearch.common.lucene.geo.GeoHashUtils;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.xcontent.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldMapper;
import org.elasticsearch.search.facets.FacetPhaseExecutionException;
import org.elasticsearch.search.facets.collector.FacetCollector;
import org.elasticsearch.search.facets.collector.FacetCollectorParser;

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.facets.geodistance;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.lucene.geo.GeoDistance;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPoint;
import org.elasticsearch.script.search.SearchScript;
import org.elasticsearch.search.internal.SearchContext;
@ -49,17 +50,16 @@ public class ScriptGeoDistanceFacetCollector extends GeoDistanceFacetCollector {
}
@Override protected void doCollect(int doc) throws IOException {
if (!latFieldData.hasValue(doc) || !lonFieldData.hasValue(doc)) {
if (!fieldData.hasValue(doc)) {
return;
}
double value = ((Number) script.execute(doc)).doubleValue();
if (latFieldData.multiValued()) {
double[] lats = latFieldData.doubleValues(doc);
double[] lons = latFieldData.doubleValues(doc);
for (int i = 0; i < lats.length; i++) {
double distance = geoDistance.calculate(lat, lon, lats[i], lons[i], unit);
if (fieldData.multiValued()) {
GeoPoint[] points = fieldData.values(doc);
for (GeoPoint point : points) {
double distance = geoDistance.calculate(lat, lon, point.lat(), point.lon(), unit);
for (GeoDistanceFacet.Entry entry : entries) {
if (distance >= entry.getFrom() && distance < entry.getTo()) {
entry.count++;
@ -68,7 +68,8 @@ public class ScriptGeoDistanceFacetCollector extends GeoDistanceFacetCollector {
}
}
} else {
double distance = geoDistance.calculate(lat, lon, latFieldData.doubleValue(doc), lonFieldData.doubleValue(doc), unit);
GeoPoint point = fieldData.value(doc);
double distance = geoDistance.calculate(lat, lon, point.lat(), point.lon(), unit);
for (GeoDistanceFacet.Entry entry : entries) {
if (distance >= entry.getFrom() && distance < entry.getTo()) {
entry.count++;

View File

@ -25,6 +25,7 @@ import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.field.data.FieldDataType;
import org.elasticsearch.index.field.data.NumericFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPoint;
import org.elasticsearch.search.facets.Facet;
import org.elasticsearch.search.facets.FacetPhaseExecutionException;
import org.elasticsearch.search.internal.SearchContext;
@ -63,16 +64,15 @@ public class ValueGeoDistanceFacetCollector extends GeoDistanceFacetCollector {
}
@Override protected void doCollect(int doc) throws IOException {
if (!latFieldData.hasValue(doc) || !lonFieldData.hasValue(doc)) {
if (!fieldData.hasValue(doc)) {
return;
}
if (latFieldData.multiValued()) {
double[] lats = latFieldData.doubleValues(doc);
double[] lons = latFieldData.doubleValues(doc);
if (fieldData.multiValued()) {
GeoPoint[] points = fieldData.values(doc);
double[] values = valueFieldData.multiValued() ? valueFieldData.doubleValues(doc) : null;
for (int i = 0; i < lats.length; i++) {
double distance = geoDistance.calculate(lat, lon, lats[i], lons[i], unit);
for (int i = 0; i < points.length; i++) {
double distance = geoDistance.calculate(lat, lon, points[i].lat(), points[i].lon(), unit);
for (GeoDistanceFacet.Entry entry : entries) {
if (distance >= entry.getFrom() && distance < entry.getTo()) {
entry.count++;
@ -87,7 +87,8 @@ public class ValueGeoDistanceFacetCollector extends GeoDistanceFacetCollector {
}
}
} else {
double distance = geoDistance.calculate(lat, lon, latFieldData.doubleValue(doc), lonFieldData.doubleValue(doc), unit);
GeoPoint point = fieldData.value(doc);
double distance = geoDistance.calculate(lat, lon, point.lat(), point.lon(), unit);
for (GeoDistanceFacet.Entry entry : entries) {
if (distance >= entry.getFrom() && distance < entry.getTo()) {
entry.count++;

View File

@ -25,7 +25,7 @@ import org.elasticsearch.common.lucene.geo.GeoDistanceDataComparator;
import org.elasticsearch.common.lucene.geo.GeoHashUtils;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.xcontent.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.xcontent.geo.GeoPointFieldMapper;
import org.elasticsearch.search.internal.SearchContext;
/**

View File

@ -36,7 +36,7 @@ public class GeohashMappingGeoPointTests {
@Test public void testLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).field("lat_lon", false).endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
@ -49,12 +49,12 @@ public class GeohashMappingGeoPointTests {
MatcherAssert.assertThat(doc.doc().getField("point.lat"), nullValue());
MatcherAssert.assertThat(doc.doc().getField("point.lon"), nullValue());
MatcherAssert.assertThat(doc.doc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
MatcherAssert.assertThat(doc.doc().get("point"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test public void testLatLonInOneValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).field("lat_lon", false).endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
@ -67,12 +67,12 @@ public class GeohashMappingGeoPointTests {
MatcherAssert.assertThat(doc.doc().getField("point.lat"), nullValue());
MatcherAssert.assertThat(doc.doc().getField("point.lon"), nullValue());
MatcherAssert.assertThat(doc.doc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
MatcherAssert.assertThat(doc.doc().get("point"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test public void testGeoHashValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).field("lat_lon", false).endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
@ -85,6 +85,6 @@ public class GeohashMappingGeoPointTests {
MatcherAssert.assertThat(doc.doc().getField("point.lat"), nullValue());
MatcherAssert.assertThat(doc.doc().getField("point.lon"), nullValue());
MatcherAssert.assertThat(doc.doc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
MatcherAssert.assertThat(doc.doc().get("point"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
}

View File

@ -1,90 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper.xcontent.geopoint;
import org.elasticsearch.common.lucene.geo.GeoHashUtils;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.xcontent.MapperTests;
import org.elasticsearch.index.mapper.xcontent.XContentDocumentMapper;
import org.hamcrest.MatcherAssert;
import org.testng.annotations.Test;
import static org.hamcrest.Matchers.*;
/**
* @author kimchy (shay.banon)
*/
public class LatLonAndGeohashMappingGeoPointTests {
@Test public void testLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.startObject("point").field("lat", 1.2).field("lon", 1.3).endObject()
.endObject()
.copiedBytes());
MatcherAssert.assertThat(doc.doc().getField("point.lat"), notNullValue());
MatcherAssert.assertThat(doc.doc().getField("point.lon"), notNullValue());
MatcherAssert.assertThat(doc.doc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test public void testLatLonInOneValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("point", "1.2,1.3")
.endObject()
.copiedBytes());
MatcherAssert.assertThat(doc.doc().getField("point.lat"), notNullValue());
MatcherAssert.assertThat(doc.doc().getField("point.lon"), notNullValue());
MatcherAssert.assertThat(doc.doc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test public void testGeoHashValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("geohash", true).endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
ParsedDocument doc = defaultMapper.parse("type", "1", XContentFactory.jsonBuilder()
.startObject()
.field("point", GeoHashUtils.encode(1.2, 1.3))
.endObject()
.copiedBytes());
MatcherAssert.assertThat(doc.doc().getField("point.lat"), notNullValue());
MatcherAssert.assertThat(doc.doc().getField("point.lon"), notNullValue());
MatcherAssert.assertThat(doc.doc().get("point.geohash"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
}

View File

@ -37,7 +37,7 @@ public class LatLonMappingGeoPointTests {
@Test public void testLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
@ -52,12 +52,12 @@ public class LatLonMappingGeoPointTests {
assertThat(doc.doc().getField("point.lat").getBinaryValue(), nullValue());
assertThat(doc.doc().getField("point.lon"), notNullValue());
assertThat(doc.doc().getField("point.lon").getBinaryValue(), nullValue());
assertThat(doc.doc().getField("point.geohash"), nullValue());
assertThat(doc.doc().get("point"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test public void testLatLonValuesStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("store", "yes").endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
@ -72,12 +72,12 @@ public class LatLonMappingGeoPointTests {
assertThat(doc.doc().getField("point.lat").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
assertThat(doc.doc().getField("point.lon"), notNullValue());
assertThat(doc.doc().getField("point.lon").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
assertThat(doc.doc().getField("point.geohash"), nullValue());
assertThat(doc.doc().get("point"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test public void testArrayLatLonValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("store", "yes").endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
@ -95,15 +95,15 @@ public class LatLonMappingGeoPointTests {
assertThat(doc.doc().getFields("point.lon").length, equalTo(2));
assertThat(doc.doc().getFields("point.lat")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
assertThat(doc.doc().getFields("point.lon")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
assertThat(doc.doc().getFields("point")[0].stringValue(), equalTo(GeoHashUtils.encode(1.2, 1.3)));
assertThat(doc.doc().getFields("point.lat")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.4)));
assertThat(doc.doc().getFields("point.lon")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.5)));
assertThat(doc.doc().getField("point.geohash"), nullValue());
assertThat(doc.doc().getFields("point")[1].stringValue(), equalTo(GeoHashUtils.encode(1.4, 1.5)));
}
@Test public void testLatLonInOneValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
@ -116,12 +116,12 @@ public class LatLonMappingGeoPointTests {
assertThat(doc.doc().getField("point.lat"), notNullValue());
assertThat(doc.doc().getField("point.lon"), notNullValue());
assertThat(doc.doc().getField("point.geohash"), nullValue());
assertThat(doc.doc().get("point"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test public void testLatLonInOneValueStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("store", "yes").endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
@ -136,12 +136,12 @@ public class LatLonMappingGeoPointTests {
assertThat(doc.doc().getField("point.lat").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
assertThat(doc.doc().getField("point.lon"), notNullValue());
assertThat(doc.doc().getField("point.lon").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
assertThat(doc.doc().getField("point.geohash"), nullValue());
assertThat(doc.doc().get("point"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test public void testLatLonInOneValueArray() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("store", "yes").endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
@ -159,15 +159,15 @@ public class LatLonMappingGeoPointTests {
assertThat(doc.doc().getFields("point.lon").length, equalTo(2));
assertThat(doc.doc().getFields("point.lat")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
assertThat(doc.doc().getFields("point.lon")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
assertThat(doc.doc().getFields("point")[0].stringValue(), equalTo(GeoHashUtils.encode(1.2, 1.3)));
assertThat(doc.doc().getFields("point.lat")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.4)));
assertThat(doc.doc().getFields("point.lon")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.5)));
assertThat(doc.doc().getField("point.geohash"), nullValue());
assertThat(doc.doc().getFields("point")[1].stringValue(), equalTo(GeoHashUtils.encode(1.4, 1.5)));
}
@Test public void testGeoHashValue() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
@ -180,12 +180,12 @@ public class LatLonMappingGeoPointTests {
assertThat(doc.doc().getField("point.lat"), notNullValue());
assertThat(doc.doc().getField("point.lon"), notNullValue());
assertThat(doc.doc().getField("point.geohash"), nullValue());
assertThat(doc.doc().get("point"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test public void testLatLonArray() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
@ -200,12 +200,12 @@ public class LatLonMappingGeoPointTests {
assertThat(doc.doc().getField("point.lat").getBinaryValue(), nullValue());
assertThat(doc.doc().getField("point.lon"), notNullValue());
assertThat(doc.doc().getField("point.lon").getBinaryValue(), nullValue());
assertThat(doc.doc().getField("point.geohash"), nullValue());
assertThat(doc.doc().get("point"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test public void testLatLonArrayStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("store", "yes").endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
@ -220,12 +220,12 @@ public class LatLonMappingGeoPointTests {
assertThat(doc.doc().getField("point.lat").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
assertThat(doc.doc().getField("point.lon"), notNullValue());
assertThat(doc.doc().getField("point.lon").getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
assertThat(doc.doc().getField("point.geohash"), nullValue());
assertThat(doc.doc().get("point"), equalTo(GeoHashUtils.encode(1.2, 1.3)));
}
@Test public void testLatLonArrayArrayStored() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("store", "yes").endObject().endObject()
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("store", "yes").endObject().endObject()
.endObject().endObject().string();
XContentDocumentMapper defaultMapper = MapperTests.newParser().parse(mapping);
@ -243,9 +243,9 @@ public class LatLonMappingGeoPointTests {
assertThat(doc.doc().getFields("point.lon").length, equalTo(2));
assertThat(doc.doc().getFields("point.lat")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.2)));
assertThat(doc.doc().getFields("point.lon")[0].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.3)));
assertThat(doc.doc().getFields("point")[0].stringValue(), equalTo(GeoHashUtils.encode(1.2, 1.3)));
assertThat(doc.doc().getFields("point.lat")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.4)));
assertThat(doc.doc().getFields("point.lon")[1].getBinaryValue(), equalTo(Numbers.doubleToBytes(1.5)));
assertThat(doc.doc().getField("point.geohash"), nullValue());
assertThat(doc.doc().getFields("point")[1].stringValue(), equalTo(GeoHashUtils.encode(1.4, 1.5)));
}
}

View File

@ -90,6 +90,8 @@ public class SimpleIndexQueryParserTests {
new IndexNameModule(index)
).createInjector();
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/query/xcontent/mapping.json");
injector.getInstance(MapperService.class).add("person", mapping);
injector.getInstance(MapperService.class).type("person").parse(copyToBytesFromClasspath("/org/elasticsearch/index/query/xcontent/data.json"));
this.queryParser = injector.getInstance(IndexQueryParserService.class);
}
@ -1130,8 +1132,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery.query(), instanceOf(FilteredQuery.class));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery.query();
GeoDistanceFilter filter = (GeoDistanceFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.lat(), closeTo(40, 0.00001));
assertThat(filter.lon(), closeTo(-70, 0.00001));
assertThat(filter.distance(), closeTo(12, 0.00001));
@ -1144,8 +1145,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
GeoDistanceFilter filter = (GeoDistanceFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.lat(), closeTo(40, 0.00001));
assertThat(filter.lon(), closeTo(-70, 0.00001));
assertThat(filter.distance(), closeTo(12, 0.00001));
@ -1158,8 +1158,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
GeoDistanceFilter filter = (GeoDistanceFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.lat(), closeTo(40, 0.00001));
assertThat(filter.lon(), closeTo(-70, 0.00001));
assertThat(filter.distance(), closeTo(12, 0.00001));
@ -1172,8 +1171,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
GeoDistanceFilter filter = (GeoDistanceFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.lat(), closeTo(40, 0.00001));
assertThat(filter.lon(), closeTo(-70, 0.00001));
assertThat(filter.distance(), closeTo(12, 0.00001));
@ -1186,8 +1184,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
GeoDistanceFilter filter = (GeoDistanceFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.lat(), closeTo(40, 0.00001));
assertThat(filter.lon(), closeTo(-70, 0.00001));
assertThat(filter.distance(), closeTo(12, 0.00001));
@ -1201,8 +1198,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery.namedFilters().containsKey("test"), equalTo(true));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery.query();
GeoBoundingBoxFilter filter = (GeoBoundingBoxFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.topLeft().lat, closeTo(40, 0.00001));
assertThat(filter.topLeft().lon, closeTo(-70, 0.00001));
assertThat(filter.bottomRight().lat, closeTo(30, 0.00001));
@ -1217,8 +1213,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
GeoBoundingBoxFilter filter = (GeoBoundingBoxFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.topLeft().lat, closeTo(40, 0.00001));
assertThat(filter.topLeft().lon, closeTo(-70, 0.00001));
assertThat(filter.bottomRight().lat, closeTo(30, 0.00001));
@ -1232,8 +1227,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
GeoBoundingBoxFilter filter = (GeoBoundingBoxFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.topLeft().lat, closeTo(40, 0.00001));
assertThat(filter.topLeft().lon, closeTo(-70, 0.00001));
assertThat(filter.bottomRight().lat, closeTo(30, 0.00001));
@ -1247,8 +1241,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
GeoBoundingBoxFilter filter = (GeoBoundingBoxFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.topLeft().lat, closeTo(40, 0.00001));
assertThat(filter.topLeft().lon, closeTo(-70, 0.00001));
assertThat(filter.bottomRight().lat, closeTo(30, 0.00001));
@ -1262,8 +1255,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
GeoBoundingBoxFilter filter = (GeoBoundingBoxFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.topLeft().lat, closeTo(40, 0.00001));
assertThat(filter.topLeft().lon, closeTo(-70, 0.00001));
assertThat(filter.bottomRight().lat, closeTo(30, 0.00001));
@ -1278,8 +1270,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery.query(), instanceOf(FilteredQuery.class));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery.query();
GeoPolygonFilter filter = (GeoPolygonFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.points().length, equalTo(3));
assertThat(filter.points()[0].lat, closeTo(40, 0.00001));
assertThat(filter.points()[0].lon, closeTo(-70, 0.00001));
@ -1296,8 +1287,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
GeoPolygonFilter filter = (GeoPolygonFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.points().length, equalTo(3));
assertThat(filter.points()[0].lat, closeTo(40, 0.00001));
assertThat(filter.points()[0].lon, closeTo(-70, 0.00001));
@ -1314,8 +1304,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
GeoPolygonFilter filter = (GeoPolygonFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.points().length, equalTo(3));
assertThat(filter.points()[0].lat, closeTo(40, 0.00001));
assertThat(filter.points()[0].lon, closeTo(-70, 0.00001));
@ -1332,8 +1321,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
GeoPolygonFilter filter = (GeoPolygonFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.points().length, equalTo(3));
assertThat(filter.points()[0].lat, closeTo(40, 0.00001));
assertThat(filter.points()[0].lon, closeTo(-70, 0.00001));
@ -1350,8 +1338,7 @@ public class SimpleIndexQueryParserTests {
assertThat(parsedQuery, instanceOf(FilteredQuery.class));
FilteredQuery filteredQuery = (FilteredQuery) parsedQuery;
GeoPolygonFilter filter = (GeoPolygonFilter) filteredQuery.getFilter();
assertThat(filter.latFieldName(), equalTo("location.lat"));
assertThat(filter.lonFieldName(), equalTo("location.lon"));
assertThat(filter.fieldName(), equalTo("location"));
assertThat(filter.points().length, equalTo(3));
assertThat(filter.points()[0].lat, closeTo(40, 0.00001));
assertThat(filter.points()[0].lon, closeTo(-70, 0.00001));

View File

@ -0,0 +1,7 @@
{
"person" : {
"properties" : {
"location" : { "type" : "geo_point" }
}
}
}

View File

@ -21,6 +21,7 @@ package org.elasticsearch.test.integration.search.geo;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.test.integration.AbstractNodesTests;
import org.testng.annotations.AfterClass;
@ -62,7 +63,10 @@ public class GeoBoundingBoxTests extends AbstractNodesTests {
} catch (Exception e) {
// ignore
}
client.admin().indices().prepareCreate("test").execute().actionGet();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
client.admin().indices().prepareCreate("test").addMapping("type1", mapping).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
@ -125,7 +129,10 @@ public class GeoBoundingBoxTests extends AbstractNodesTests {
} catch (Exception e) {
// ignore
}
client.admin().indices().prepareCreate("test").setSettings(settingsBuilder().put("number_of_shards", "1")).execute().actionGet();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
client.admin().indices().prepareCreate("test").addMapping("type1", mapping).setSettings(settingsBuilder().put("number_of_shards", "1")).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()

View File

@ -22,6 +22,7 @@ package org.elasticsearch.test.integration.search.geo;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.search.facets.geodistance.GeoDistanceFacet;
import org.elasticsearch.test.integration.AbstractNodesTests;
import org.testng.annotations.AfterClass;
@ -62,7 +63,10 @@ public class GeoDistanceFacetTests extends AbstractNodesTests {
} catch (Exception e) {
// ignore
}
client.admin().indices().prepareCreate("test").execute().actionGet();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
client.admin().indices().prepareCreate("test").addMapping("type1", mapping).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
// to NY: 0

View File

@ -21,6 +21,7 @@ package org.elasticsearch.test.integration.search.geo;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
@ -63,7 +64,10 @@ public class GeoDistanceTests extends AbstractNodesTests {
} catch (Exception e) {
// ignore
}
client.admin().indices().prepareCreate("test").execute().actionGet();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
.endObject().endObject().string();
client.admin().indices().prepareCreate("test").addMapping("type1", mapping).execute().actionGet();
client.admin().cluster().prepareHealth().setWaitForGreenStatus().execute().actionGet();
client.prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()

View File

@ -106,7 +106,10 @@ public class TwitterRiver extends AbstractRiverComponent implements River {
@Override public void start() {
logger.info("starting twitter stream");
try {
client.admin().indices().prepareCreate(indexName).execute().actionGet();
String mapping = XContentFactory.jsonBuilder().startObject().startObject(typeName)
.startObject("properties").startObject("location").field("type", "geo_point").endObject().endObject()
.endObject().endObject().string();
client.admin().indices().prepareCreate(indexName).addMapping(typeName, mapping).execute().actionGet();
} catch (Exception e) {
if (ExceptionsHelper.unwrapCause(e) instanceof IndexAlreadyExistsException) {
// that's fine