improve geohash_filter to use terms filter
and various other cleanups
This commit is contained in:
parent
6fd74fa39e
commit
58e68db148
|
@ -107,30 +107,30 @@ public class GeoHashUtils {
|
|||
}
|
||||
|
||||
private static final char encode(int x, int y) {
|
||||
return BASE_32[((x&1) + ((y&1)*2) + ((x&2)*2) + ((y&2)*4) + ((x&4)*4)) % 32];
|
||||
return BASE_32[((x & 1) + ((y & 1) * 2) + ((x & 2) * 2) + ((y & 2) * 4) + ((x & 4) * 4)) % 32];
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate all neighbors of a given geohash cell.
|
||||
*
|
||||
* @param geohash Geohash of the defines cell
|
||||
* @return geohashes of all neighbor cells
|
||||
*/
|
||||
public static String[] neighbors(String geohash) {
|
||||
List<String> neighbors = addNeighbors(geohash, geohash.length(), new ArrayList<String>(8));
|
||||
return neighbors.toArray(new String[neighbors.size()]);
|
||||
public static List<String> neighbors(String geohash) {
|
||||
return addNeighbors(geohash, geohash.length(), new ArrayList<String>(8));
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the geohash of a neighbor of a geohash
|
||||
*
|
||||
*
|
||||
* @param geohash the geohash of a cell
|
||||
* @param level level of the geohash
|
||||
* @param dx delta of the first grid coordinate (must be -1, 0 or +1)
|
||||
* @param dy delta of the second grid coordinate (must be -1, 0 or +1)
|
||||
* @param level level of the geohash
|
||||
* @param dx delta of the first grid coordinate (must be -1, 0 or +1)
|
||||
* @param dy delta of the second grid coordinate (must be -1, 0 or +1)
|
||||
* @return geohash of the defined cell
|
||||
*/
|
||||
private final static String neighbor(String geohash, int level, int dx, int dy) {
|
||||
int cell = decode(geohash.charAt(level-1));
|
||||
int cell = decode(geohash.charAt(level - 1));
|
||||
|
||||
// Decoding the Geohash bit pattern to determine grid coordinates
|
||||
int x0 = cell & 1; // first bit of x
|
||||
|
@ -142,32 +142,32 @@ public class GeoHashUtils {
|
|||
// combine the bitpattern to grid coordinates.
|
||||
// note that the semantics of x and y are swapping
|
||||
// on each level
|
||||
int x = x0 + (x1/2) + (x2 / 4);
|
||||
int y = (y0/2) + (y1/4);
|
||||
int x = x0 + (x1 / 2) + (x2 / 4);
|
||||
int y = (y0 / 2) + (y1 / 4);
|
||||
|
||||
if(level == 1) {
|
||||
if (level == 1) {
|
||||
// Root cells at north (namely "bcfguvyz") or at
|
||||
// south (namely "0145hjnp") do not have neighbors
|
||||
// in north/south direction
|
||||
if((dy < 0 && y == 0) || (dy > 0 && y == 3)) {
|
||||
if ((dy < 0 && y == 0) || (dy > 0 && y == 3)) {
|
||||
return null;
|
||||
} else {
|
||||
return Character.toString(encode(x + dx, y + dy));
|
||||
}
|
||||
} else {
|
||||
// define grid coordinates for next level
|
||||
final int nx = ((level % 2) == 1) ?(x + dx) :(x + dy);
|
||||
final int ny = ((level % 2) == 1) ?(y + dy) :(y + dx);
|
||||
final int nx = ((level % 2) == 1) ? (x + dx) : (x + dy);
|
||||
final int ny = ((level % 2) == 1) ? (y + dy) : (y + dx);
|
||||
|
||||
// define grid limits for current level
|
||||
final int xLimit = ((level % 2) == 0) ?7 :3;
|
||||
final int yLimit = ((level % 2) == 0) ?3 :7;
|
||||
final int xLimit = ((level % 2) == 0) ? 7 : 3;
|
||||
final int yLimit = ((level % 2) == 0) ? 3 : 7;
|
||||
|
||||
// if the defined neighbor has the same parent a the current cell
|
||||
// encode the cell direcly. Otherwise find the cell next to this
|
||||
// cell recursively. Since encoding wraps around within a cell
|
||||
// it can be encoded here.
|
||||
if(nx >= 0 && nx<=xLimit && ny>=0 && ny<yLimit) {
|
||||
if (nx >= 0 && nx <= xLimit && ny >= 0 && ny < yLimit) {
|
||||
return geohash.substring(0, level - 1) + encode(nx, ny);
|
||||
} else {
|
||||
return neighbor(geohash, level - 1, dx, dy) + encode(nx, ny);
|
||||
|
@ -177,9 +177,9 @@ public class GeoHashUtils {
|
|||
|
||||
/**
|
||||
* Add all geohashes of the cells next to a given geohash to a list.
|
||||
*
|
||||
* @param geohash Geohash of a specified cell
|
||||
* @param length level of the given geohash
|
||||
*
|
||||
* @param geohash Geohash of a specified cell
|
||||
* @param length level of the given geohash
|
||||
* @param neighbors list to add the neighbors to
|
||||
* @return the given list
|
||||
*/
|
||||
|
@ -187,7 +187,7 @@ public class GeoHashUtils {
|
|||
String south = neighbor(geohash, length, 0, -1);
|
||||
String north = neighbor(geohash, length, 0, +1);
|
||||
|
||||
if(north != null) {
|
||||
if (north != null) {
|
||||
neighbors.add(neighbor(north, length, -1, 0));
|
||||
neighbors.add(north);
|
||||
neighbors.add(neighbor(north, length, +1, 0));
|
||||
|
@ -196,7 +196,7 @@ public class GeoHashUtils {
|
|||
neighbors.add(neighbor(geohash, length, -1, 0));
|
||||
neighbors.add(neighbor(geohash, length, +1, 0));
|
||||
|
||||
if(south != null) {
|
||||
if (south != null) {
|
||||
neighbors.add(neighbor(south, length, -1, 0));
|
||||
neighbors.add(south);
|
||||
neighbors.add(neighbor(south, length, +1, 0));
|
||||
|
@ -207,40 +207,72 @@ public class GeoHashUtils {
|
|||
|
||||
private static final int decode(char geo) {
|
||||
switch (geo) {
|
||||
case '0': return 0;
|
||||
case '1': return 1;
|
||||
case '2': return 2;
|
||||
case '3': return 3;
|
||||
case '4': return 4;
|
||||
case '5': return 5;
|
||||
case '6': return 6;
|
||||
case '7': return 7;
|
||||
case '8': return 8;
|
||||
case '9': return 9;
|
||||
case 'b': return 10;
|
||||
case 'c': return 11;
|
||||
case 'd': return 12;
|
||||
case 'e': return 13;
|
||||
case 'f': return 14;
|
||||
case 'g': return 15;
|
||||
case 'h': return 16;
|
||||
case 'j': return 17;
|
||||
case 'k': return 18;
|
||||
case 'm': return 19;
|
||||
case 'n': return 20;
|
||||
case 'p': return 21;
|
||||
case 'q': return 22;
|
||||
case 'r': return 23;
|
||||
case 's': return 24;
|
||||
case 't': return 25;
|
||||
case 'u': return 26;
|
||||
case 'v': return 27;
|
||||
case 'w': return 28;
|
||||
case 'x': return 29;
|
||||
case 'y': return 30;
|
||||
case 'z': return 31;
|
||||
default:
|
||||
throw new ElasticSearchIllegalArgumentException("the character '"+geo+"' is not a valid geohash character");
|
||||
case '0':
|
||||
return 0;
|
||||
case '1':
|
||||
return 1;
|
||||
case '2':
|
||||
return 2;
|
||||
case '3':
|
||||
return 3;
|
||||
case '4':
|
||||
return 4;
|
||||
case '5':
|
||||
return 5;
|
||||
case '6':
|
||||
return 6;
|
||||
case '7':
|
||||
return 7;
|
||||
case '8':
|
||||
return 8;
|
||||
case '9':
|
||||
return 9;
|
||||
case 'b':
|
||||
return 10;
|
||||
case 'c':
|
||||
return 11;
|
||||
case 'd':
|
||||
return 12;
|
||||
case 'e':
|
||||
return 13;
|
||||
case 'f':
|
||||
return 14;
|
||||
case 'g':
|
||||
return 15;
|
||||
case 'h':
|
||||
return 16;
|
||||
case 'j':
|
||||
return 17;
|
||||
case 'k':
|
||||
return 18;
|
||||
case 'm':
|
||||
return 19;
|
||||
case 'n':
|
||||
return 20;
|
||||
case 'p':
|
||||
return 21;
|
||||
case 'q':
|
||||
return 22;
|
||||
case 'r':
|
||||
return 23;
|
||||
case 's':
|
||||
return 24;
|
||||
case 't':
|
||||
return 25;
|
||||
case 'u':
|
||||
return 26;
|
||||
case 'v':
|
||||
return 27;
|
||||
case 'w':
|
||||
return 28;
|
||||
case 'x':
|
||||
return 29;
|
||||
case 'y':
|
||||
return 30;
|
||||
case 'z':
|
||||
return 31;
|
||||
default:
|
||||
throw new ElasticSearchIllegalArgumentException("the character '" + geo + "' is not a valid geohash character");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -265,7 +297,7 @@ public class GeoHashUtils {
|
|||
/**
|
||||
* Decodes the given geohash into a geohash cell defined by the points nothWest and southEast
|
||||
*
|
||||
* @param geohash Geohash to deocde
|
||||
* @param geohash Geohash to deocde
|
||||
* @param northWest the point north/west of the cell
|
||||
* @param southEast the point south/east of the cell
|
||||
*/
|
||||
|
|
|
@ -169,7 +169,7 @@ public interface FieldMapper<T> {
|
|||
|
||||
Filter termFilter(Object value, @Nullable QueryParseContext context);
|
||||
|
||||
Filter termsFilter(List<Object> values, @Nullable QueryParseContext context);
|
||||
Filter termsFilter(List values, @Nullable QueryParseContext context);
|
||||
|
||||
Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context);
|
||||
|
||||
|
@ -200,6 +200,6 @@ public interface FieldMapper<T> {
|
|||
FieldDataType fieldDataType();
|
||||
|
||||
PostingsFormatProvider postingsFormatProvider();
|
||||
|
||||
|
||||
boolean isNumeric();
|
||||
}
|
||||
|
|
|
@ -447,7 +447,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T>, Mapper {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter termsFilter(List<Object> values, @Nullable QueryParseContext context) {
|
||||
public Filter termsFilter(List values, @Nullable QueryParseContext context) {
|
||||
BytesRef[] bytesRefs = new BytesRef[values.size()];
|
||||
for (int i = 0; i < bytesRefs.length; i++) {
|
||||
bytesRefs[i] = indexedValueForSearch(values.get(i));
|
||||
|
@ -647,7 +647,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T>, Mapper {
|
|||
}
|
||||
|
||||
if (customFieldDataSettings != null) {
|
||||
builder.field("fielddata", (Map)customFieldDataSettings.getAsMap());
|
||||
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -669,7 +669,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T>, Mapper {
|
|||
public static String termVectorOptionsToString(FieldType fieldType) {
|
||||
if (!fieldType.storeTermVectors()) {
|
||||
return "no";
|
||||
} else if(!fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
|
||||
} else if (!fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
|
||||
return "yes";
|
||||
} else if (fieldType.storeTermVectorOffsets() && !fieldType.storeTermVectorPositions()) {
|
||||
return "with_offsets";
|
||||
|
|
|
@ -19,11 +19,8 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.geo;
|
||||
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.doubleField;
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.stringField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseStore;
|
||||
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -37,14 +34,7 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
|||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.codec.postingsformat.PostingsFormatProvider;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.FieldMapperListener;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeContext;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.ObjectMapperListener;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
|
@ -55,8 +45,10 @@ import java.io.IOException;
|
|||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.doubleField;
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.stringField;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseStore;
|
||||
|
||||
/**
|
||||
* Parsing: We handle:
|
||||
|
@ -109,7 +101,7 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
|||
private ContentPath.Type pathType = Defaults.PATH_TYPE;
|
||||
|
||||
private boolean enableGeoHash = Defaults.ENABLE_GEOHASH;
|
||||
|
||||
|
||||
private boolean enableGeohashPrefix = Defaults.ENABLE_GEOHASH_PREFIX;
|
||||
|
||||
private boolean enableLatLon = Defaults.ENABLE_LATLON;
|
||||
|
@ -219,6 +211,10 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
|||
builder.enableGeoHash(XContentMapValues.nodeBooleanValue(fieldNode));
|
||||
} else if (fieldName.equals("geohash_prefix")) {
|
||||
builder.geohashPrefix(XContentMapValues.nodeBooleanValue(fieldNode));
|
||||
if (XContentMapValues.nodeBooleanValue(fieldNode)) {
|
||||
// automatically set geohash to true as well...
|
||||
builder.enableGeoHash(true);
|
||||
}
|
||||
} else if (fieldName.equals("precision_step")) {
|
||||
builder.precisionStep(XContentMapValues.nodeIntegerValue(fieldNode));
|
||||
} else if (fieldName.equals("geohash_precision")) {
|
||||
|
@ -250,7 +246,7 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
|||
private final boolean enableLatLon;
|
||||
|
||||
private final boolean enableGeoHash;
|
||||
|
||||
|
||||
private final boolean enableGeohashPrefix;
|
||||
|
||||
private final Integer precisionStep;
|
||||
|
@ -282,7 +278,7 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
|||
this.enableGeohashPrefix = enableGeohashPrefix;
|
||||
this.precisionStep = precisionStep;
|
||||
this.precision = precision;
|
||||
|
||||
|
||||
this.latMapper = latMapper;
|
||||
this.lonMapper = lonMapper;
|
||||
this.geoStringMapper = geoStringMapper;
|
||||
|
@ -313,7 +309,7 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
|||
public GeoStringFieldMapper stringMapper() {
|
||||
return this.geoStringMapper;
|
||||
}
|
||||
|
||||
|
||||
public StringFieldMapper geoHashStringMapper() {
|
||||
return this.geohashMapper;
|
||||
}
|
||||
|
@ -414,15 +410,16 @@ public class GeoPointFieldMapper implements Mapper, ArrayValueMapperParser {
|
|||
}
|
||||
|
||||
private void parseGeohashField(ParseContext context, String geohash) throws IOException {
|
||||
int len = enableGeohashPrefix ?Math.min(precision, geohash.length()) :1;
|
||||
|
||||
for (int i = 0; i < len; i++) {
|
||||
context.externalValue(geohash.substring(0, geohash.length() - i));
|
||||
int len = Math.min(precision, geohash.length());
|
||||
int min = enableGeohashPrefix ? 1 : geohash.length();
|
||||
|
||||
for (int i = len; i >= min; i--) {
|
||||
context.externalValue(geohash.substring(0, i));
|
||||
// side effect of this call is adding the field
|
||||
geohashMapper.parse(context);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void parseLatLon(ParseContext context, double lat, double lon) throws IOException {
|
||||
if (normalizeLat || normalizeLon) {
|
||||
GeoPoint point = new GeoPoint(lat, lon);
|
||||
|
|
|
@ -19,12 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.internal;
|
||||
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.id;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import com.google.common.collect.Iterables;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.FieldInfo.IndexOptions;
|
||||
|
@ -46,7 +41,13 @@ import org.elasticsearch.index.mapper.*;
|
|||
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
|
||||
import com.google.common.collect.Iterables;
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.index.mapper.MapperBuilders.id;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -176,7 +177,7 @@ public class IdFieldMapper extends AbstractFieldMapper<String> implements Intern
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter termsFilter(List<Object> values, @Nullable QueryParseContext context) {
|
||||
public Filter termsFilter(List values, @Nullable QueryParseContext context) {
|
||||
if (fieldType.indexed() || context == null) {
|
||||
return super.termsFilter(values, context);
|
||||
}
|
||||
|
|
|
@ -249,7 +249,7 @@ public class ParentFieldMapper extends AbstractFieldMapper<Uid> implements Inter
|
|||
}
|
||||
|
||||
@Override
|
||||
public Filter termsFilter(List<Object> values, @Nullable QueryParseContext context) {
|
||||
public Filter termsFilter(List values, @Nullable QueryParseContext context) {
|
||||
if (context == null) {
|
||||
return super.termFilter(values, context);
|
||||
}
|
||||
|
|
|
@ -19,8 +19,11 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.ElasticSearchParseException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
|
@ -32,11 +35,8 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.core.StringFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
|
||||
import org.apache.lucene.queries.BooleanFilter;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* A gehash filter filters {@link GeoPoint}s by their geohashes. Basically the a
|
||||
|
@ -51,7 +51,7 @@ import java.io.IOException;
|
|||
* "geohash":"u33d8u5dkx8k",
|
||||
* "neighbors":false
|
||||
* }
|
||||
* </pre>
|
||||
* </pre>
|
||||
*/
|
||||
public class GeohashFilter {
|
||||
|
||||
|
@ -62,38 +62,32 @@ public class GeohashFilter {
|
|||
|
||||
/**
|
||||
* Create a new geohash filter for a given set of geohashes. In general this method
|
||||
* returns a boolean filter combining the geohashes OR-wise.
|
||||
*
|
||||
* @param context Context of the filter
|
||||
* returns a boolean filter combining the geohashes OR-wise.
|
||||
*
|
||||
* @param context Context of the filter
|
||||
* @param fieldMapper field mapper for geopoints
|
||||
* @param geohash mandatory geohash
|
||||
* @param geohashes optional array of additional geohashes
|
||||
*
|
||||
* @param geohash mandatory geohash
|
||||
* @param geohashes optional array of additional geohashes
|
||||
* @return a new GeoBoundinboxfilter
|
||||
*/
|
||||
public static Filter create(QueryParseContext context, GeoPointFieldMapper fieldMapper, String geohash, String...geohashes) {
|
||||
if(fieldMapper.geoHashStringMapper() == null) {
|
||||
throw new ElasticSearchIllegalArgumentException("geohash filter needs geohashes to be enabled");
|
||||
public static Filter create(QueryParseContext context, GeoPointFieldMapper fieldMapper, String geohash, @Nullable List<String> geohashes) {
|
||||
if (fieldMapper.geoHashStringMapper() == null) {
|
||||
throw new ElasticSearchIllegalArgumentException("geohash filter needs geohash_prefix to be enabled");
|
||||
}
|
||||
|
||||
StringFieldMapper stringMapper = fieldMapper.geoHashStringMapper();
|
||||
if(geohashes == null || geohashes.length == 0) {
|
||||
return stringMapper.termFilter(geohash, context);
|
||||
StringFieldMapper geoHashMapper = fieldMapper.geoHashStringMapper();
|
||||
if (geohashes == null || geohashes.size() == 0) {
|
||||
return geoHashMapper.termFilter(geohash, context);
|
||||
} else {
|
||||
BooleanFilter booleanFilter = new BooleanFilter();
|
||||
booleanFilter.add(stringMapper.termFilter(geohash, context), Occur.SHOULD);
|
||||
|
||||
for (int i = 0; i < geohashes.length; i++) {
|
||||
booleanFilter.add(stringMapper.termFilter(geohashes[i], context), Occur.SHOULD);
|
||||
}
|
||||
return booleanFilter;
|
||||
geohashes.add(geohash);
|
||||
return geoHashMapper.termsFilter(geohashes, context);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Builder for a geohashfilter. It needs the fields <code>fieldname</code> and
|
||||
* <code>geohash</code> to be set. the default for a neighbor filteing is
|
||||
* <code>false</code>.
|
||||
* <code>false</code>.
|
||||
*/
|
||||
public static class Builder extends BaseFilterBuilder {
|
||||
|
||||
|
@ -136,7 +130,7 @@ public class GeohashFilter {
|
|||
builder.startObject(NAME);
|
||||
builder.field(FIELDNAME, fieldname);
|
||||
builder.field(GEOHASH, geohash);
|
||||
if(neighbors) {
|
||||
if (neighbors) {
|
||||
builder.field(NEIGHBORS, neighbors);
|
||||
}
|
||||
builder.endObject();
|
||||
|
@ -151,7 +145,7 @@ public class GeohashFilter {
|
|||
|
||||
@Override
|
||||
public String[] names() {
|
||||
return new String[]{NAME};
|
||||
return new String[]{NAME, Strings.toCamelCase(NAME)};
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -163,15 +157,15 @@ public class GeohashFilter {
|
|||
boolean neighbors = false;
|
||||
|
||||
XContentParser.Token token;
|
||||
if((token = parser.currentToken()) != Token.START_OBJECT) {
|
||||
if ((token = parser.currentToken()) != Token.START_OBJECT) {
|
||||
throw new ElasticSearchParseException(NAME + " must be an object");
|
||||
}
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if(token == Token.FIELD_NAME) {
|
||||
if (token == Token.FIELD_NAME) {
|
||||
String field = parser.text();
|
||||
|
||||
if(FIELDNAME.equals(field)) {
|
||||
if (FIELDNAME.equals(field)) {
|
||||
parser.nextToken();
|
||||
fieldName = parser.text();
|
||||
} else if (GEOHASH.equals(field)) {
|
||||
|
@ -181,10 +175,10 @@ public class GeohashFilter {
|
|||
parser.nextToken();
|
||||
neighbors = parser.booleanValue();
|
||||
} else {
|
||||
throw new ElasticSearchParseException("unexpected field ["+field+"]");
|
||||
throw new ElasticSearchParseException("unexpected field [" + field + "]");
|
||||
}
|
||||
} else {
|
||||
throw new ElasticSearchParseException("unexpected token ["+token+"]");
|
||||
throw new ElasticSearchParseException("unexpected token [" + token + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -200,10 +194,10 @@ public class GeohashFilter {
|
|||
|
||||
GeoPointFieldMapper geoMapper = ((GeoPointFieldMapper.GeoStringFieldMapper) mapper).geoMapper();
|
||||
|
||||
if(neighbors) {
|
||||
if (neighbors) {
|
||||
return create(parseContext, geoMapper, geohash, GeoHashUtils.neighbors(geohash));
|
||||
} else {
|
||||
return create(parseContext, geoMapper, geohash);
|
||||
return create(parseContext, geoMapper, geohash, null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,27 +19,15 @@
|
|||
|
||||
package org.elasticsearch.test.integration.search.geo;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.FilterBuilders.geoBoundingBoxFilter;
|
||||
import static org.elasticsearch.index.query.FilterBuilders.geoDistanceFilter;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.fieldQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.Arrays;
|
||||
import java.util.Random;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
|
||||
import com.spatial4j.core.context.SpatialContext;
|
||||
import com.spatial4j.core.distance.DistanceUtils;
|
||||
import com.spatial4j.core.exception.InvalidShapeException;
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
|
||||
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
||||
import org.apache.lucene.spatial.query.SpatialArgs;
|
||||
import org.apache.lucene.spatial.query.SpatialOperation;
|
||||
import org.apache.lucene.spatial.query.UnsupportedSpatialOperation;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
|
@ -58,20 +46,24 @@ import org.elasticsearch.index.query.FilterBuilders;
|
|||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.integration.AbstractSharedClusterTest;
|
||||
|
||||
import org.apache.lucene.spatial.prefix.RecursivePrefixTreeStrategy;
|
||||
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
||||
import org.apache.lucene.spatial.query.SpatialArgs;
|
||||
import org.apache.lucene.spatial.query.SpatialOperation;
|
||||
import org.apache.lucene.spatial.query.UnsupportedSpatialOperation;
|
||||
|
||||
import org.testng.annotations.BeforeTest;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import com.spatial4j.core.context.SpatialContext;
|
||||
import com.spatial4j.core.distance.DistanceUtils;
|
||||
import com.spatial4j.core.exception.InvalidShapeException;
|
||||
import com.spatial4j.core.shape.Shape;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.zip.GZIPInputStream;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.FilterBuilders.geoBoundingBoxFilter;
|
||||
import static org.elasticsearch.index.query.FilterBuilders.geoDistanceFilter;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.*;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -113,59 +105,63 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
try {
|
||||
// self intersection polygon
|
||||
ShapeBuilder.newPolygon()
|
||||
.point(-10, -10)
|
||||
.point(10, 10)
|
||||
.point(-10, 10)
|
||||
.point(10, -10)
|
||||
.close().build();
|
||||
.point(-10, -10)
|
||||
.point(10, 10)
|
||||
.point(-10, 10)
|
||||
.point(10, -10)
|
||||
.close().build();
|
||||
assert false : "Self intersection not detected";
|
||||
} catch (InvalidShapeException e) {}
|
||||
} catch (InvalidShapeException e) {
|
||||
}
|
||||
|
||||
// polygon with hole
|
||||
ShapeBuilder.newPolygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.close().close().build();
|
||||
.close().close().build();
|
||||
|
||||
try {
|
||||
// polygon with overlapping hole
|
||||
ShapeBuilder.newPolygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-5, -5).point(-5, 11).point(5, 11).point(5, -5)
|
||||
.close().close().build();
|
||||
.close().close().build();
|
||||
|
||||
assert false : "Self intersection not detected";
|
||||
} catch (InvalidShapeException e) {}
|
||||
} catch (InvalidShapeException e) {
|
||||
}
|
||||
|
||||
try {
|
||||
// polygon with intersection holes
|
||||
ShapeBuilder.newPolygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.close()
|
||||
.hole()
|
||||
.close()
|
||||
.hole()
|
||||
.point(-5, -6).point(5, -6).point(5, -4).point(-5, -4)
|
||||
.close()
|
||||
.close().build();
|
||||
.close()
|
||||
.close().build();
|
||||
assert false : "Intersection of holes not detected";
|
||||
} catch (InvalidShapeException e) {}
|
||||
} catch (InvalidShapeException e) {
|
||||
}
|
||||
|
||||
try {
|
||||
// Common line in polygon
|
||||
ShapeBuilder.newPolygon()
|
||||
.point(-10, -10)
|
||||
.point(-10, 10)
|
||||
.point(-5, 10)
|
||||
.point(-5, -5)
|
||||
.point(-5, 20)
|
||||
.point(10, 20)
|
||||
.point(10, -10)
|
||||
.close().build();
|
||||
.point(-10, -10)
|
||||
.point(-10, 10)
|
||||
.point(-5, 10)
|
||||
.point(-5, -5)
|
||||
.point(-5, 20)
|
||||
.point(10, 20)
|
||||
.point(10, -10)
|
||||
.close().build();
|
||||
assert false : "Self intersection not detected";
|
||||
} catch (InvalidShapeException e) {}
|
||||
} catch (InvalidShapeException e) {
|
||||
}
|
||||
|
||||
// Not specified
|
||||
// try {
|
||||
|
@ -185,16 +181,16 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
|
||||
// Multipolygon: polygon with hole and polygon within the whole
|
||||
ShapeBuilder.newMultiPolygon()
|
||||
.polygon()
|
||||
.polygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.close()
|
||||
.close()
|
||||
.polygon()
|
||||
.close()
|
||||
.polygon()
|
||||
.point(-4, -4).point(-4, 4).point(4, 4).point(4, -4)
|
||||
.close()
|
||||
.build();
|
||||
.close()
|
||||
.build();
|
||||
|
||||
// Not supported
|
||||
// try {
|
||||
|
@ -218,22 +214,22 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
@Test
|
||||
public void testShapeRelations() throws Exception {
|
||||
|
||||
assert intersectSupport: "Intersect relation is not supported";
|
||||
assert disjointSupport: "Disjoint relation is not supported";
|
||||
assert withinSupport: "within relation is not supported";
|
||||
assert intersectSupport : "Intersect relation is not supported";
|
||||
assert disjointSupport : "Disjoint relation is not supported";
|
||||
assert withinSupport : "within relation is not supported";
|
||||
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("polygon")
|
||||
.startObject("properties")
|
||||
.startObject("area")
|
||||
.field("type", "geo_shape")
|
||||
.field("tree", "geohash")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject("polygon")
|
||||
.startObject("properties")
|
||||
.startObject("area")
|
||||
.field("type", "geo_shape")
|
||||
.field("tree", "geohash")
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().string();
|
||||
|
||||
CreateIndexRequestBuilder mappingRequest = client().admin().indices().prepareCreate("shapes").addMapping("polygon", mapping);
|
||||
|
@ -244,15 +240,15 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
// with a hole of size 5x5 equidistant from all sides. This hole in turn contains
|
||||
// the second polygon of size 4x4 equidistant from all sites
|
||||
MultiPolygonBuilder polygon = ShapeBuilder.newMultiPolygon()
|
||||
.polygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.polygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.close()
|
||||
.close()
|
||||
.polygon()
|
||||
.point(-4, -4).point(-4, 4).point(4, 4).point(4, -4)
|
||||
.close();
|
||||
.close()
|
||||
.close()
|
||||
.polygon()
|
||||
.point(-4, -4).point(-4, 4).point(4, 4).point(4, -4)
|
||||
.close();
|
||||
|
||||
BytesReference data = polygon.toXContent("area", jsonBuilder().startObject()).endObject().bytes();
|
||||
client().prepareIndex("shapes", "polygon", "1").setSource(data).execute().actionGet();
|
||||
|
@ -293,7 +289,7 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
assertHitCount(result, 1);
|
||||
assertFirstHit(result, hasId("1"));
|
||||
|
||||
if(disjointSupport) {
|
||||
if (disjointSupport) {
|
||||
// Point not in polygon
|
||||
result = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
|
@ -312,11 +308,11 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
|
||||
// Create a polygon that fills the empty area of the polygon defined above
|
||||
PolygonBuilder inverse = ShapeBuilder.newPolygon()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.hole()
|
||||
.point(-5, -5).point(-5, 5).point(5, 5).point(5, -5)
|
||||
.hole()
|
||||
.point(-4, -4).point(-4, 4).point(4, 4).point(4, -4)
|
||||
.close()
|
||||
.close();
|
||||
.close()
|
||||
.close();
|
||||
|
||||
data = inverse.toXContent("area", jsonBuilder().startObject()).endObject().bytes();
|
||||
client().prepareIndex("shapes", "polygon", "2").setSource(data).execute().actionGet();
|
||||
|
@ -334,11 +330,11 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
PolygonBuilder builder = ShapeBuilder.newPolygon()
|
||||
.point(-10, -10).point(-10, 10).point(10, 10).point(10, -10)
|
||||
.hole()
|
||||
.point(-5, -5).point(-5, 5).point(10, 5).point(10, -5)
|
||||
.point(-5, -5).point(-5, 5).point(10, 5).point(10, -5)
|
||||
.close()
|
||||
.close();
|
||||
|
||||
if(withinSupport) {
|
||||
if (withinSupport) {
|
||||
// Polygon WithIn Polygon
|
||||
builder = ShapeBuilder.newPolygon()
|
||||
.point(-30, -30).point(-30, 30).point(30, 30).point(30, -30).close();
|
||||
|
@ -358,7 +354,7 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
* first polygon. This approach can also applied to the holes because the
|
||||
* commonline of hole and polygon will not be recognized as intersection.
|
||||
*/
|
||||
|
||||
|
||||
// // Create a polygon crossing longitude 180.
|
||||
// builder = ShapeBuilder.newPolygon()
|
||||
// .point(170, -10).point(180, 10).point(170, -10).point(10, -10)
|
||||
|
@ -374,32 +370,32 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
byte[] bulkAction = unZipData("/org/elasticsearch/test/integration/search/geo/gzippedmap.json");
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject()
|
||||
.startObject("country")
|
||||
.startObject("properties")
|
||||
.startObject("pin")
|
||||
.field("type", "geo_point")
|
||||
.field("lat_lon", true)
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("lat_lon", true)
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject("properties")
|
||||
.startObject("pin")
|
||||
.field("type", "geo_point")
|
||||
.field("lat_lon", true)
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.string();
|
||||
.startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.field("lat_lon", true)
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.string();
|
||||
|
||||
client().admin().indices().prepareCreate("countries").addMapping("country", mapping).execute().actionGet();
|
||||
BulkResponse bulk = client().prepareBulk().add(bulkAction, 0, bulkAction.length, false, null, null).execute().actionGet();
|
||||
|
||||
for(BulkItemResponse item : bulk.getItems()) {
|
||||
assert !item.isFailed(): "unable to index data";
|
||||
for (BulkItemResponse item : bulk.getItems()) {
|
||||
assert !item.isFailed() : "unable to index data";
|
||||
}
|
||||
|
||||
client().admin().indices().prepareRefresh().execute().actionGet();
|
||||
client().admin().indices().prepareRefresh().execute().actionGet();
|
||||
String key = "DE";
|
||||
|
||||
SearchResponse searchResponse = client().prepareSearch()
|
||||
|
@ -416,8 +412,8 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
filteredQuery(
|
||||
matchAllQuery(),
|
||||
geoBoundingBoxFilter("pin")
|
||||
.topLeft(90, -179.99999)
|
||||
.bottomRight(-90, 179.99999))
|
||||
.topLeft(90, -179.99999)
|
||||
.bottomRight(-90, 179.99999))
|
||||
).execute().actionGet();
|
||||
|
||||
assertHitCount(world, 246);
|
||||
|
@ -437,7 +433,7 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
|
||||
assertThat("distance to '" + name + "'", dist, lessThanOrEqualTo(425000d));
|
||||
assertThat(name, anyOf(equalTo("CZ"), equalTo("DE"), equalTo("BE"), equalTo("NL"), equalTo("LU")));
|
||||
if(key.equals(name)) {
|
||||
if (key.equals(name)) {
|
||||
assertThat(dist, equalTo(0d));
|
||||
}
|
||||
}
|
||||
|
@ -446,80 +442,78 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
@Test
|
||||
public void testGeoHashFilter() throws IOException {
|
||||
String geohash = randomhash(12);
|
||||
String[] neighbors = GeoHashUtils.neighbors(geohash);
|
||||
List<String> neighbors = GeoHashUtils.neighbors(geohash);
|
||||
|
||||
logger.info("Testing geohash boundingbox filter for [{}]", geohash);
|
||||
logger.info("Neighbors {}", Arrays.toString(neighbors));
|
||||
logger.info("Neighbors {}", neighbors);
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("location")
|
||||
.startObject("properties")
|
||||
.startObject("pin")
|
||||
.field("type", "geo_point")
|
||||
.field("geohash", true)
|
||||
.field("geohash_prefix", true)
|
||||
.field("latlon", false)
|
||||
.field("store", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.startObject("location")
|
||||
.startObject("properties")
|
||||
.startObject("pin")
|
||||
.field("type", "geo_point")
|
||||
.field("geohash_prefix", true)
|
||||
.field("latlon", false)
|
||||
.endObject()
|
||||
.string();
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.string();
|
||||
|
||||
ensureYellow();
|
||||
|
||||
client().admin().indices().prepareCreate("locations").addMapping("location", mapping).execute().actionGet();
|
||||
|
||||
// Index a pin
|
||||
client().prepareIndex("locations", "location", "1").setCreate(true).setSource("{\"pin\":\""+geohash+"\"}").execute().actionGet();
|
||||
client().prepareIndex("locations", "location", "1").setCreate(true).setSource("{\"pin\":\"" + geohash + "\"}").execute().actionGet();
|
||||
|
||||
// index neighbors
|
||||
for (int i = 0; i < neighbors.length; i++) {
|
||||
client().prepareIndex("locations", "location", "N"+i).setCreate(true).setSource("{\"pin\":\""+neighbors[i]+"\"}").execute().actionGet();
|
||||
for (int i = 0; i < neighbors.size(); i++) {
|
||||
client().prepareIndex("locations", "location", "N" + i).setCreate(true).setSource("{\"pin\":\"" + neighbors.get(i) + "\"}").execute().actionGet();
|
||||
}
|
||||
|
||||
// Index parent cell
|
||||
client().prepareIndex("locations", "location", "p").setCreate(true).setSource("{\"pin\":\""+geohash.substring(0, geohash.length()-1)+"\"}").execute().actionGet();
|
||||
client().prepareIndex("locations", "location", "p").setCreate(true).setSource("{\"pin\":\"" + geohash.substring(0, geohash.length() - 1) + "\"}").execute().actionGet();
|
||||
|
||||
// index neighbors
|
||||
String[] parentNeighbors = GeoHashUtils.neighbors(geohash.substring(0, geohash.length()-1));
|
||||
for (int i = 0; i < parentNeighbors.length; i++) {
|
||||
client().prepareIndex("locations", "location", "p"+i).setCreate(true).setSource("{\"pin\":\""+parentNeighbors[i]+"\"}").execute().actionGet();
|
||||
List<String> parentNeighbors = GeoHashUtils.neighbors(geohash.substring(0, geohash.length() - 1));
|
||||
for (int i = 0; i < parentNeighbors.size(); i++) {
|
||||
client().prepareIndex("locations", "location", "p" + i).setCreate(true).setSource("{\"pin\":\"" + parentNeighbors.get(i) + "\"}").execute().actionGet();
|
||||
}
|
||||
|
||||
client().admin().indices().prepareRefresh("locations").execute().actionGet();
|
||||
|
||||
// Result of this geohash search should contain the geohash only
|
||||
SearchResponse results1 = client().prepareSearch("locations").setQuery(QueryBuilders.matchAllQuery()).setFilter("{\"geohash_cell\": {\"field\": \"pin\", \"geohash\": \""+geohash+"\", \"neighbors\": false}}").execute().actionGet();
|
||||
SearchResponse results1 = client().prepareSearch("locations").setQuery(QueryBuilders.matchAllQuery()).setFilter("{\"geohash_cell\": {\"field\": \"pin\", \"geohash\": \"" + geohash + "\", \"neighbors\": false}}").execute().actionGet();
|
||||
assertHitCount(results1, 1);
|
||||
|
||||
SearchResponse results2 = client().prepareSearch("locations").setQuery(QueryBuilders.matchAllQuery()).setFilter("{\"geohash_cell\": {\"field\": \"pin\", \"geohash\": \""+geohash.substring(0, geohash.length()-1)+"\", \"neighbors\": true}}").execute().actionGet();
|
||||
SearchResponse results2 = client().prepareSearch("locations").setQuery(QueryBuilders.matchAllQuery()).setFilter("{\"geohash_cell\": {\"field\": \"pin\", \"geohash\": \"" + geohash.substring(0, geohash.length() - 1) + "\", \"neighbors\": true}}").execute().actionGet();
|
||||
// Result of the parent query should contain the parent it self, its neighbors, the child and all its neighbors
|
||||
assertHitCount(results2, 2 + neighbors.length + parentNeighbors.length);
|
||||
assertHitCount(results2, 2 + neighbors.size() + parentNeighbors.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNeighbors() {
|
||||
// Simple root case
|
||||
assertThat(Arrays.asList(GeoHashUtils.neighbors("7")), containsInAnyOrder("4", "5", "6", "d", "e", "h", "k", "s"));
|
||||
assertThat(GeoHashUtils.neighbors("7"), containsInAnyOrder("4", "5", "6", "d", "e", "h", "k", "s"));
|
||||
|
||||
// Root cases (Outer cells)
|
||||
assertThat(Arrays.asList(GeoHashUtils.neighbors("0")), containsInAnyOrder("1", "2", "3", "p", "r"));
|
||||
assertThat(Arrays.asList(GeoHashUtils.neighbors("b")), containsInAnyOrder("8", "9", "c", "x", "z"));
|
||||
assertThat(Arrays.asList(GeoHashUtils.neighbors("p")), containsInAnyOrder("n", "q", "r", "0", "2"));
|
||||
assertThat(Arrays.asList(GeoHashUtils.neighbors("z")), containsInAnyOrder("8", "b", "w", "x", "y"));
|
||||
assertThat(GeoHashUtils.neighbors("0"), containsInAnyOrder("1", "2", "3", "p", "r"));
|
||||
assertThat(GeoHashUtils.neighbors("b"), containsInAnyOrder("8", "9", "c", "x", "z"));
|
||||
assertThat(GeoHashUtils.neighbors("p"), containsInAnyOrder("n", "q", "r", "0", "2"));
|
||||
assertThat(GeoHashUtils.neighbors("z"), containsInAnyOrder("8", "b", "w", "x", "y"));
|
||||
|
||||
// Root crossing dateline
|
||||
assertThat(Arrays.asList(GeoHashUtils.neighbors("2")), containsInAnyOrder("0", "1", "3", "8", "9", "p", "r", "x"));
|
||||
assertThat(Arrays.asList(GeoHashUtils.neighbors("r")), containsInAnyOrder("0", "2", "8", "n", "p", "q", "w", "x"));
|
||||
assertThat(GeoHashUtils.neighbors("2"), containsInAnyOrder("0", "1", "3", "8", "9", "p", "r", "x"));
|
||||
assertThat(GeoHashUtils.neighbors("r"), containsInAnyOrder("0", "2", "8", "n", "p", "q", "w", "x"));
|
||||
|
||||
// level1: simple case
|
||||
assertThat(Arrays.asList(GeoHashUtils.neighbors("dk")), containsInAnyOrder("d5", "d7", "de", "dh", "dj", "dm", "ds", "dt"));
|
||||
assertThat(GeoHashUtils.neighbors("dk"), containsInAnyOrder("d5", "d7", "de", "dh", "dj", "dm", "ds", "dt"));
|
||||
|
||||
// Level1: crossing cells
|
||||
assertThat(Arrays.asList(GeoHashUtils.neighbors("d5")), containsInAnyOrder("d4", "d6", "d7", "dh", "dk", "9f", "9g", "9u"));
|
||||
assertThat(Arrays.asList(GeoHashUtils.neighbors("d0")), containsInAnyOrder("d1", "d2", "d3", "9b", "9c", "6p", "6r", "3z"));
|
||||
assertThat(GeoHashUtils.neighbors("d5"), containsInAnyOrder("d4", "d6", "d7", "dh", "dk", "9f", "9g", "9u"));
|
||||
assertThat(GeoHashUtils.neighbors("d0"), containsInAnyOrder("d1", "d2", "d3", "9b", "9c", "6p", "6r", "3z"));
|
||||
}
|
||||
|
||||
public static double distance(double lat1, double lon1, double lat2, double lon2) {
|
||||
|
@ -528,7 +522,7 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
DistanceUtils.toRadians(lon1),
|
||||
DistanceUtils.toRadians(lat2),
|
||||
DistanceUtils.toRadians(lon2)
|
||||
);
|
||||
);
|
||||
}
|
||||
|
||||
protected static boolean testRelationSupport(SpatialOperation relation) {
|
||||
|
@ -543,7 +537,7 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected static String randomhash(int length) {
|
||||
return randomhash(new Random(), length);
|
||||
}
|
||||
|
@ -551,23 +545,23 @@ public class GeoFilterTests extends AbstractSharedClusterTest {
|
|||
protected static String randomhash(Random random) {
|
||||
return randomhash(random, 2 + random.nextInt(10));
|
||||
}
|
||||
|
||||
|
||||
protected static String randomhash() {
|
||||
return randomhash(new Random());
|
||||
}
|
||||
|
||||
|
||||
protected static String randomhash(Random random, int length) {
|
||||
final char[] BASE_32 = {
|
||||
'0', '1', '2', '3', '4', '5', '6', '7',
|
||||
'8', '9', 'b', 'c', 'd', 'e', 'f', 'g',
|
||||
'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r',
|
||||
's', 't', 'u', 'v', 'w', 'x', 'y', 'z'};
|
||||
|
||||
'0', '1', '2', '3', '4', '5', '6', '7',
|
||||
'8', '9', 'b', 'c', 'd', 'e', 'f', 'g',
|
||||
'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r',
|
||||
's', 't', 'u', 'v', 'w', 'x', 'y', 'z'};
|
||||
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (int i = 0; i < length; i++) {
|
||||
sb.append(BASE_32[random.nextInt(BASE_32.length)]);
|
||||
}
|
||||
|
||||
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue