Refactor GeoHashUtils (#40869)
This commit refactors GeoHashUtils class into a new Geohash utility class located in the ES geo library. The intent is to not only better control what geo methods are whitelisted for painless scripting but to clean up the geo utility API in general.
This commit is contained in:
parent
6a7459ff11
commit
113b24be4b
|
@ -163,6 +163,14 @@ public class Rectangle implements Geometry {
|
|||
return maxLon < minLon;
|
||||
}
|
||||
|
||||
/** returns true if rectangle (defined by minLat, maxLat, minLon, maxLon) contains the lat lon point */
|
||||
public boolean containsPoint(final double lat, final double lon) {
|
||||
if (lat >= minLat && lat <= maxLat) {
|
||||
return crossesDateline() ? lon >= minLon || lon <= maxLon : lon >= minLon && lon <= maxLon;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.geo.utils;
|
||||
|
||||
/**
|
||||
* Utilities for common Bit twiddling methods. Borrowed heavily from Lucene (org.apache.lucene.util.BitUtil).
|
||||
*/
|
||||
public class BitUtil { // magic numbers for bit interleaving
|
||||
private static final long MAGIC[] = {
|
||||
0x5555555555555555L, 0x3333333333333333L,
|
||||
0x0F0F0F0F0F0F0F0FL, 0x00FF00FF00FF00FFL,
|
||||
0x0000FFFF0000FFFFL, 0x00000000FFFFFFFFL,
|
||||
0xAAAAAAAAAAAAAAAAL
|
||||
};
|
||||
// shift values for bit interleaving
|
||||
private static final short SHIFT[] = {1, 2, 4, 8, 16};
|
||||
|
||||
/**
|
||||
* Interleaves the first 32 bits of each long value
|
||||
*
|
||||
* Adapted from: http://graphics.stanford.edu/~seander/bithacks.html#InterleaveBMN
|
||||
*/
|
||||
public static long interleave(int even, int odd) {
|
||||
long v1 = 0x00000000FFFFFFFFL & even;
|
||||
long v2 = 0x00000000FFFFFFFFL & odd;
|
||||
v1 = (v1 | (v1 << SHIFT[4])) & MAGIC[4];
|
||||
v1 = (v1 | (v1 << SHIFT[3])) & MAGIC[3];
|
||||
v1 = (v1 | (v1 << SHIFT[2])) & MAGIC[2];
|
||||
v1 = (v1 | (v1 << SHIFT[1])) & MAGIC[1];
|
||||
v1 = (v1 | (v1 << SHIFT[0])) & MAGIC[0];
|
||||
v2 = (v2 | (v2 << SHIFT[4])) & MAGIC[4];
|
||||
v2 = (v2 | (v2 << SHIFT[3])) & MAGIC[3];
|
||||
v2 = (v2 | (v2 << SHIFT[2])) & MAGIC[2];
|
||||
v2 = (v2 | (v2 << SHIFT[1])) & MAGIC[1];
|
||||
v2 = (v2 | (v2 << SHIFT[0])) & MAGIC[0];
|
||||
|
||||
return (v2<<1) | v1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract just the even-bits value as a long from the bit-interleaved value
|
||||
*/
|
||||
public static long deinterleave(long b) {
|
||||
b &= MAGIC[0];
|
||||
b = (b ^ (b >>> SHIFT[0])) & MAGIC[1];
|
||||
b = (b ^ (b >>> SHIFT[1])) & MAGIC[2];
|
||||
b = (b ^ (b >>> SHIFT[2])) & MAGIC[3];
|
||||
b = (b ^ (b >>> SHIFT[3])) & MAGIC[4];
|
||||
b = (b ^ (b >>> SHIFT[4])) & MAGIC[5];
|
||||
return b;
|
||||
}
|
||||
|
||||
/**
|
||||
* flip flops odd with even bits
|
||||
*/
|
||||
public static final long flipFlop(final long b) {
|
||||
return ((b & MAGIC[6]) >>> 1) | ((b & MAGIC[0]) << 1 );
|
||||
}
|
||||
}
|
|
@ -16,197 +16,52 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.common.geo;
|
||||
package org.elasticsearch.geo.utils;
|
||||
|
||||
import org.apache.lucene.geo.Rectangle;
|
||||
import org.apache.lucene.spatial.util.MortonEncoder;
|
||||
import org.apache.lucene.util.BitUtil;
|
||||
import org.elasticsearch.geo.geometry.Point;
|
||||
import org.elasticsearch.geo.geometry.Rectangle;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.apache.lucene.geo.GeoUtils.MAX_LAT_INCL;
|
||||
|
||||
/**
|
||||
* Utilities for converting to/from the GeoHash standard
|
||||
*
|
||||
* The geohash long format is represented as lon/lat (x/y) interleaved with the 4 least significant bits
|
||||
* representing the level (1-12) [xyxy...xyxyllll]
|
||||
*
|
||||
* This differs from a morton encoded value which interleaves lat/lon (y/x).*
|
||||
* This differs from a morton encoded value which interleaves lat/lon (y/x).
|
||||
*
|
||||
* NOTE: this will replace {@code org.elasticsearch.common.geo.GeoHashUtils}
|
||||
*/
|
||||
public class GeoHashUtils {
|
||||
public class Geohash {
|
||||
private static final char[] BASE_32 = {'0', '1', '2', '3', '4', '5', '6',
|
||||
'7', '8', '9', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n',
|
||||
'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'};
|
||||
|
||||
private static final String BASE_32_STRING = new String(BASE_32);
|
||||
|
||||
/** maximum precision for geohash strings */
|
||||
public static final int PRECISION = 12;
|
||||
/** number of bits used for quantizing latitude and longitude values */
|
||||
public static final short BITS = 31;
|
||||
/** scaling factors to convert lat/lon into unsigned space */
|
||||
private static final double LAT_SCALE = (0x1L<<BITS)/180.0D;
|
||||
private static final double LON_SCALE = (0x1L<<BITS)/360.0D;
|
||||
private static final short BITS = 32;
|
||||
private static final double LAT_SCALE = (0x1L<<(BITS-1))/180.0D;
|
||||
private static final double LAT_DECODE = 180.0D/(0x1L<<BITS);
|
||||
private static final double LON_SCALE = (0x1L<<(BITS-1))/360.0D;
|
||||
private static final double LON_DECODE = 360.0D/(0x1L<<BITS);
|
||||
|
||||
private static final short MORTON_OFFSET = (BITS<<1) - (PRECISION*5);
|
||||
/** Bit encoded representation of the latitude of north pole */
|
||||
private static final long MAX_LAT_BITS = (0x1L << (PRECISION * 5 / 2)) - 1;
|
||||
|
||||
// No instance:
|
||||
private GeoHashUtils() {
|
||||
|
||||
// no instance:
|
||||
private Geohash() {
|
||||
}
|
||||
|
||||
/*************************
|
||||
* 31 bit encoding utils *
|
||||
*************************/
|
||||
public static long encodeLatLon(final double lat, final double lon) {
|
||||
return MortonEncoder.encode(lat, lon) >>> 2;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode lon/lat to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
|
||||
*/
|
||||
public static final long longEncode(final double lon, final double lat, final int level) {
|
||||
// shift to appropriate level
|
||||
final short msf = (short)(((12 - level) * 5) + MORTON_OFFSET);
|
||||
return ((BitUtil.flipFlop(encodeLatLon(lat, lon)) >>> msf) << 4) | level;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode from geohash string to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
|
||||
*/
|
||||
private static long longEncode(final String hash, int length) {
|
||||
int level = length - 1;
|
||||
long b;
|
||||
long l = 0L;
|
||||
for(char c : hash.toCharArray()) {
|
||||
b = (long)(BASE_32_STRING.indexOf(c));
|
||||
l |= (b<<(level--*5));
|
||||
if (level < 0) {
|
||||
// We cannot handle more than 12 levels
|
||||
break;
|
||||
}
|
||||
}
|
||||
return (l << 4) | length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode an existing geohash long to the provided precision
|
||||
*/
|
||||
public static long longEncode(long geohash, int level) {
|
||||
final short precision = (short)(geohash & 15);
|
||||
if (precision == level) {
|
||||
return geohash;
|
||||
} else if (precision > level) {
|
||||
return ((geohash >>> (((precision - level) * 5) + 4)) << 4) | level;
|
||||
}
|
||||
return ((geohash >>> 4) << (((level - precision) * 5) + 4) | level);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert from a morton encoded long from a geohash encoded long
|
||||
*/
|
||||
public static long fromMorton(long morton, int level) {
|
||||
long mFlipped = BitUtil.flipFlop(morton);
|
||||
mFlipped >>>= (((GeoHashUtils.PRECISION - level) * 5) + MORTON_OFFSET);
|
||||
return (mFlipped << 4) | level;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a geohash string from the geohash based long format
|
||||
*/
|
||||
public static final String stringEncode(long geoHashLong) {
|
||||
int level = (int)geoHashLong&15;
|
||||
geoHashLong >>>= 4;
|
||||
char[] chars = new char[level];
|
||||
do {
|
||||
chars[--level] = BASE_32[(int) (geoHashLong&31L)];
|
||||
geoHashLong>>>=5;
|
||||
} while(level > 0);
|
||||
|
||||
return new String(chars);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a geohash string from full resolution longitude, latitude)
|
||||
*/
|
||||
public static final String stringEncode(final double lon, final double lat) {
|
||||
return stringEncode(lon, lat, 12);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a level specific geohash string from full resolution longitude, latitude
|
||||
*/
|
||||
public static final String stringEncode(final double lon, final double lat, final int level) {
|
||||
// convert to geohashlong
|
||||
final long ghLong = fromMorton(encodeLatLon(lat, lon), level);
|
||||
return stringEncode(ghLong);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a full precision geohash string from a given morton encoded long value
|
||||
*/
|
||||
public static final String stringEncodeFromMortonLong(final long hashedVal) throws Exception {
|
||||
return stringEncode(hashedVal, PRECISION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a geohash string at a given level from a morton long
|
||||
*/
|
||||
public static final String stringEncodeFromMortonLong(long hashedVal, final int level) {
|
||||
// bit twiddle to geohash (since geohash is a swapped (lon/lat) encoding)
|
||||
hashedVal = BitUtil.flipFlop(hashedVal);
|
||||
|
||||
StringBuilder geoHash = new StringBuilder();
|
||||
short precision = 0;
|
||||
final short msf = (BITS<<1)-5;
|
||||
long mask = 31L<<msf;
|
||||
do {
|
||||
geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]);
|
||||
// next 5 bits
|
||||
mask >>>= 5;
|
||||
} while (++precision < level);
|
||||
return geoHash.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a morton long value from a given geohash string
|
||||
*/
|
||||
public static final long mortonEncode(final String hash) {
|
||||
if (hash.isEmpty()) {
|
||||
throw new IllegalArgumentException("empty geohash");
|
||||
}
|
||||
int level = 11;
|
||||
long b;
|
||||
long l = 0L;
|
||||
for(char c : hash.toCharArray()) {
|
||||
b = (long)(BASE_32_STRING.indexOf(c));
|
||||
if (b < 0) {
|
||||
throw new IllegalArgumentException("unsupported symbol [" + c + "] in geohash [" + hash + "]");
|
||||
}
|
||||
l |= (b<<((level--*5) + MORTON_OFFSET));
|
||||
if (level < 0) {
|
||||
// We cannot handle more than 12 levels
|
||||
break;
|
||||
}
|
||||
}
|
||||
return BitUtil.flipFlop(l);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a morton long value from a given geohash long value
|
||||
*/
|
||||
public static final long mortonEncode(final long geoHashLong) {
|
||||
final int level = (int)(geoHashLong&15);
|
||||
final short odd = (short)(level & 1);
|
||||
|
||||
return BitUtil.flipFlop(((geoHashLong >>> 4) << odd) << (((12 - level) * 5) + (MORTON_OFFSET - odd)));
|
||||
}
|
||||
|
||||
private static char encode(int x, int y) {
|
||||
return BASE_32[((x & 1) + ((y & 1) * 2) + ((x & 2) * 2) + ((y & 2) * 4) + ((x & 4) * 4)) % 32];
|
||||
/** Returns a {@link Point} instance from a geohash string */
|
||||
public static Point toPoint(final String geohash) throws IllegalArgumentException {
|
||||
final long hash = mortonEncode(geohash);
|
||||
return new Point(decodeLatitude(hash), decodeLongitude(hash));
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -215,9 +70,9 @@ public class GeoHashUtils {
|
|||
* @param geohash Geohash of the defined cell
|
||||
* @return GeoRect rectangle defining the bounding box
|
||||
*/
|
||||
public static Rectangle bbox(final String geohash) {
|
||||
public static Rectangle toBoundingBox(final String geohash) {
|
||||
// bottom left is the coordinate
|
||||
GeoPoint bottomLeft = GeoPoint.fromGeohash(geohash);
|
||||
Point bottomLeft = toPoint(geohash);
|
||||
int len = Math.min(12, geohash.length());
|
||||
long ghLong = longEncode(geohash, len);
|
||||
// shift away the level
|
||||
|
@ -225,15 +80,20 @@ public class GeoHashUtils {
|
|||
// deinterleave
|
||||
long lon = BitUtil.deinterleave(ghLong >>> 1);
|
||||
long lat = BitUtil.deinterleave(ghLong);
|
||||
final int shift = (12 - len) * 5 + 2;
|
||||
if (lat < MAX_LAT_BITS) {
|
||||
// add 1 to lat and lon to get topRight
|
||||
GeoPoint topRight = GeoPoint.fromGeohash(BitUtil.interleave((int)(lat + 1), (int)(lon + 1)) << 4 | len);
|
||||
return new Rectangle(bottomLeft.lat(), topRight.lat(), bottomLeft.lon(), topRight.lon());
|
||||
ghLong = BitUtil.interleave((int)(lat + 1), (int)(lon + 1)) << 4 | len;
|
||||
final long mortonHash = BitUtil.flipFlop((ghLong >>> 4) << shift);
|
||||
Point topRight = new Point(decodeLatitude(mortonHash), decodeLongitude(mortonHash));
|
||||
return new Rectangle(bottomLeft.getLat(), topRight.getLat(), bottomLeft.getLon(), topRight.getLon());
|
||||
} else {
|
||||
// We cannot go north of north pole, so just using 90 degrees instead of calculating it using
|
||||
// add 1 to lon to get lon of topRight, we are going to use 90 for lat
|
||||
GeoPoint topRight = GeoPoint.fromGeohash(BitUtil.interleave((int)lat, (int)(lon + 1)) << 4 | len);
|
||||
return new Rectangle(bottomLeft.lat(), MAX_LAT_INCL, bottomLeft.lon(), topRight.lon());
|
||||
ghLong = BitUtil.interleave((int)lat, (int)(lon + 1)) << 4 | len;
|
||||
final long mortonHash = BitUtil.flipFlop((ghLong >>> 4) << shift);
|
||||
Point topRight = new Point(decodeLatitude(mortonHash), decodeLongitude(mortonHash));
|
||||
return new Rectangle(bottomLeft.getLat(), 90D, bottomLeft.getLon(), topRight.getLon());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -243,8 +103,48 @@ public class GeoHashUtils {
|
|||
* @param geohash Geohash of the defined cell
|
||||
* @return geohashes of all neighbor cells
|
||||
*/
|
||||
public static Collection<? extends CharSequence> neighbors(String geohash) {
|
||||
return addNeighbors(geohash, geohash.length(), new ArrayList<CharSequence>(8));
|
||||
public static Collection<? extends CharSequence> getNeighbors(String geohash) {
|
||||
return addNeighborsAtLevel(geohash, geohash.length(), new ArrayList<CharSequence>(8));
|
||||
}
|
||||
/**
|
||||
* Add all geohashes of the cells next to a given geohash to a list.
|
||||
*
|
||||
* @param geohash Geohash of a specified cell
|
||||
* @param neighbors list to add the neighbors to
|
||||
* @return the given list
|
||||
*/
|
||||
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, E neighbors) {
|
||||
return addNeighborsAtLevel(geohash, geohash.length(), neighbors);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all geohashes of the cells next to a given geohash to a list.
|
||||
*
|
||||
* @param geohash Geohash of a specified cell
|
||||
* @param level level of the given geohash
|
||||
* @param neighbors list to add the neighbors to
|
||||
* @return the given list
|
||||
*/
|
||||
public static final <E extends Collection<? super String>> E addNeighborsAtLevel(String geohash,
|
||||
int level, E neighbors) {
|
||||
String south = getNeighbor(geohash, level, 0, -1);
|
||||
String north = getNeighbor(geohash, level, 0, +1);
|
||||
if (north != null) {
|
||||
neighbors.add(getNeighbor(north, level, -1, 0));
|
||||
neighbors.add(north);
|
||||
neighbors.add(getNeighbor(north, level, +1, 0));
|
||||
}
|
||||
|
||||
neighbors.add(getNeighbor(geohash, level, -1, 0));
|
||||
neighbors.add(getNeighbor(geohash, level, +1, 0));
|
||||
|
||||
if (south != null) {
|
||||
neighbors.add(getNeighbor(south, level, -1, 0));
|
||||
neighbors.add(south);
|
||||
neighbors.add(getNeighbor(south, level, +1, 0));
|
||||
}
|
||||
|
||||
return neighbors;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -256,7 +156,7 @@ public class GeoHashUtils {
|
|||
* @param dy delta of the second grid coordinate (must be -1, 0 or +1)
|
||||
* @return geohash of the defined cell
|
||||
*/
|
||||
public static final String neighbor(String geohash, int level, int dx, int dy) {
|
||||
public static final String getNeighbor(String geohash, int level, int dx, int dy) {
|
||||
int cell = BASE_32_STRING.indexOf(geohash.charAt(level -1));
|
||||
|
||||
// Decoding the Geohash bit pattern to determine grid coordinates
|
||||
|
@ -279,7 +179,7 @@ public class GeoHashUtils {
|
|||
if ((dy < 0 && y == 0) || (dy > 0 && y == 3)) {
|
||||
return null;
|
||||
} else {
|
||||
return Character.toString(encode(x + dx, y + dy));
|
||||
return Character.toString(encodeBase32(x + dx, y + dy));
|
||||
}
|
||||
} else {
|
||||
// define grid coordinates for next level
|
||||
|
@ -293,79 +193,154 @@ public class GeoHashUtils {
|
|||
// xLimit and YLimit must always be respectively 7 and 3
|
||||
// since x and y semantics are swapping on each level.
|
||||
if (nx >= 0 && nx <= 7 && ny >= 0 && ny <= 3) {
|
||||
return geohash.substring(0, level - 1) + encode(nx, ny);
|
||||
return geohash.substring(0, level - 1) + encodeBase32(nx, ny);
|
||||
} else {
|
||||
String neighbor = neighbor(geohash, level - 1, dx, dy);
|
||||
return (neighbor != null) ? neighbor + encode(nx, ny) : neighbor;
|
||||
String neighbor = getNeighbor(geohash, level - 1, dx, dy);
|
||||
return (neighbor != null) ? neighbor + encodeBase32(nx, ny) : neighbor;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all geohashes of the cells next to a given geohash to a list.
|
||||
*
|
||||
* @param geohash Geohash of a specified cell
|
||||
* @param neighbors list to add the neighbors to
|
||||
* @return the given list
|
||||
* Encode lon/lat to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
|
||||
*/
|
||||
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, E neighbors) {
|
||||
return addNeighbors(geohash, geohash.length(), neighbors);
|
||||
public static final long longEncode(final double lon, final double lat, final int level) {
|
||||
// shift to appropriate level
|
||||
final short msf = (short)(((12 - level) * 5) + (MORTON_OFFSET - 2));
|
||||
return ((encodeLatLon(lat, lon) >>> msf) << 4) | level;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all geohashes of the cells next to a given geohash to a list.
|
||||
*
|
||||
* @param geohash Geohash of a specified cell
|
||||
* @param length level of the given geohash
|
||||
* @param neighbors list to add the neighbors to
|
||||
* @return the given list
|
||||
* Encode to a geohash string from full resolution longitude, latitude)
|
||||
*/
|
||||
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, int length, E neighbors) {
|
||||
String south = neighbor(geohash, length, 0, -1);
|
||||
String north = neighbor(geohash, length, 0, +1);
|
||||
if (north != null) {
|
||||
neighbors.add(neighbor(north, length, -1, 0));
|
||||
neighbors.add(north);
|
||||
neighbors.add(neighbor(north, length, +1, 0));
|
||||
public static final String stringEncode(final double lon, final double lat) {
|
||||
return stringEncode(lon, lat, 12);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a level specific geohash string from full resolution longitude, latitude
|
||||
*/
|
||||
public static final String stringEncode(final double lon, final double lat, final int level) {
|
||||
// convert to geohashlong
|
||||
long interleaved = encodeLatLon(lat, lon);
|
||||
interleaved >>>= (((PRECISION - level) * 5) + (MORTON_OFFSET - 2));
|
||||
final long geohash = (interleaved << 4) | level;
|
||||
return stringEncode(geohash);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a geohash string from the geohash based long format
|
||||
*/
|
||||
public static final String stringEncode(long geoHashLong) {
|
||||
int level = (int)geoHashLong&15;
|
||||
geoHashLong >>>= 4;
|
||||
char[] chars = new char[level];
|
||||
do {
|
||||
chars[--level] = BASE_32[(int) (geoHashLong&31L)];
|
||||
geoHashLong>>>=5;
|
||||
} while(level > 0);
|
||||
|
||||
return new String(chars);
|
||||
}
|
||||
|
||||
/** base32 encode at the given grid coordinate */
|
||||
private static char encodeBase32(int x, int y) {
|
||||
return BASE_32[((x & 1) + ((y & 1) * 2) + ((x & 2) * 2) + ((y & 2) * 4) + ((x & 4) * 4)) % 32];
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode from geohash string to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
|
||||
*/
|
||||
private static long longEncode(final String hash, int length) {
|
||||
int level = length - 1;
|
||||
long b;
|
||||
long l = 0L;
|
||||
for(char c : hash.toCharArray()) {
|
||||
b = (long)(BASE_32_STRING.indexOf(c));
|
||||
l |= (b<<(level--*5));
|
||||
if (level < 0) {
|
||||
// We cannot handle more than 12 levels
|
||||
break;
|
||||
}
|
||||
}
|
||||
return (l << 4) | length;
|
||||
}
|
||||
|
||||
neighbors.add(neighbor(geohash, length, -1, 0));
|
||||
neighbors.add(neighbor(geohash, length, +1, 0));
|
||||
|
||||
if (south != null) {
|
||||
neighbors.add(neighbor(south, length, -1, 0));
|
||||
neighbors.add(south);
|
||||
neighbors.add(neighbor(south, length, +1, 0));
|
||||
/**
|
||||
* Encode to a morton long value from a given geohash string
|
||||
*/
|
||||
public static long mortonEncode(final String hash) {
|
||||
if (hash.isEmpty()) {
|
||||
throw new IllegalArgumentException("empty geohash");
|
||||
}
|
||||
|
||||
return neighbors;
|
||||
int level = 11;
|
||||
long b;
|
||||
long l = 0L;
|
||||
for(char c : hash.toCharArray()) {
|
||||
b = (long)(BASE_32_STRING.indexOf(c));
|
||||
if (b < 0) {
|
||||
throw new IllegalArgumentException("unsupported symbol [" + c + "] in geohash [" + hash + "]");
|
||||
}
|
||||
l |= (b<<((level--*5) + (MORTON_OFFSET - 2)));
|
||||
if (level < 0) {
|
||||
// We cannot handle more than 12 levels
|
||||
break;
|
||||
}
|
||||
}
|
||||
return BitUtil.flipFlop(l);
|
||||
}
|
||||
|
||||
/** decode longitude value from morton encoded geo point */
|
||||
public static final double decodeLongitude(final long hash) {
|
||||
return unscaleLon(BitUtil.deinterleave(hash));
|
||||
private static long encodeLatLon(final double lat, final double lon) {
|
||||
// encode lat/lon flipping the sign bit so negative ints sort before positive ints
|
||||
final int latEnc = encodeLatitude(lat) ^ 0x80000000;
|
||||
final int lonEnc = encodeLongitude(lon) ^ 0x80000000;
|
||||
return BitUtil.interleave(latEnc, lonEnc) >>> 2;
|
||||
}
|
||||
|
||||
/** decode latitude value from morton encoded geo point */
|
||||
public static final double decodeLatitude(final long hash) {
|
||||
return unscaleLat(BitUtil.deinterleave(hash >>> 1));
|
||||
|
||||
/** encode latitude to integer */
|
||||
public static int encodeLatitude(double latitude) {
|
||||
// the maximum possible value cannot be encoded without overflow
|
||||
if (latitude == 90.0D) {
|
||||
latitude = Math.nextDown(latitude);
|
||||
}
|
||||
return (int) Math.floor(latitude / LAT_DECODE);
|
||||
}
|
||||
|
||||
private static double unscaleLon(final long val) {
|
||||
return (val / LON_SCALE) - 180;
|
||||
}
|
||||
|
||||
private static double unscaleLat(final long val) {
|
||||
return (val / LAT_SCALE) - 90;
|
||||
/** encode longitude to integer */
|
||||
public static int encodeLongitude(double longitude) {
|
||||
// the maximum possible value cannot be encoded without overflow
|
||||
if (longitude == 180.0D) {
|
||||
longitude = Math.nextDown(longitude);
|
||||
}
|
||||
return (int) Math.floor(longitude / LON_DECODE);
|
||||
}
|
||||
|
||||
/** returns the latitude value from the string based geohash */
|
||||
public static final double decodeLatitude(final String geohash) {
|
||||
return decodeLatitude(mortonEncode(geohash));
|
||||
return decodeLatitude(Geohash.mortonEncode(geohash));
|
||||
}
|
||||
|
||||
/** returns the latitude value from the string based geohash */
|
||||
public static final double decodeLongitude(final String geohash) {
|
||||
return decodeLongitude(mortonEncode(geohash));
|
||||
return decodeLongitude(Geohash.mortonEncode(geohash));
|
||||
}
|
||||
|
||||
/** decode longitude value from morton encoded geo point */
|
||||
public static double decodeLongitude(final long hash) {
|
||||
return unscaleLon(BitUtil.deinterleave(hash));
|
||||
}
|
||||
|
||||
/** decode latitude value from morton encoded geo point */
|
||||
public static double decodeLatitude(final long hash) {
|
||||
return unscaleLat(BitUtil.deinterleave(hash >>> 1));
|
||||
}
|
||||
|
||||
private static double unscaleLon(final long val) {
|
||||
return (val / LON_SCALE) - 180;
|
||||
}
|
||||
|
||||
private static double unscaleLat(final long val) {
|
||||
return (val / LAT_SCALE) - 90;
|
||||
}
|
||||
}
|
|
@ -16,13 +16,14 @@
|
|||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
package org.elasticsearch.common.geo;
|
||||
package org.elasticsearch.geo.utils;
|
||||
|
||||
import org.apache.lucene.geo.Rectangle;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.geo.geometry.Rectangle;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
/**
|
||||
* Tests for {@link org.elasticsearch.common.geo.GeoHashUtils}
|
||||
* Tests for {@link org.elasticsearch.geo.utils.Geohash}
|
||||
*/
|
||||
public class GeoHashTests extends ESTestCase {
|
||||
public void testGeohashAsLongRoutines() {
|
||||
|
@ -37,13 +38,13 @@ public class GeoHashTests extends ESTestCase {
|
|||
{
|
||||
for(int p=1;p<=12;p++)
|
||||
{
|
||||
long geoAsLong = GeoHashUtils.longEncode(lng, lat, p);
|
||||
long geoAsLong = Geohash.longEncode(lng, lat, p);
|
||||
|
||||
// string encode from geohashlong encoded location
|
||||
String geohashFromLong = GeoHashUtils.stringEncode(geoAsLong);
|
||||
String geohashFromLong = Geohash.stringEncode(geoAsLong);
|
||||
|
||||
// string encode from full res lat lon
|
||||
String geohash = GeoHashUtils.stringEncode(lng, lat, p);
|
||||
String geohash = Geohash.stringEncode(lng, lat, p);
|
||||
|
||||
// ensure both strings are the same
|
||||
assertEquals(geohash, geohashFromLong);
|
||||
|
@ -62,25 +63,25 @@ public class GeoHashTests extends ESTestCase {
|
|||
public void testBboxFromHash() {
|
||||
String hash = randomGeohash(1, 12);
|
||||
int level = hash.length();
|
||||
Rectangle bbox = GeoHashUtils.bbox(hash);
|
||||
Rectangle bbox = Geohash.toBoundingBox(hash);
|
||||
// check that the length is as expected
|
||||
double expectedLonDiff = 360.0 / (Math.pow(8.0, (level + 1) / 2) * Math.pow(4.0, level / 2));
|
||||
double expectedLatDiff = 180.0 / (Math.pow(4.0, (level + 1) / 2) * Math.pow(8.0, level / 2));
|
||||
assertEquals(expectedLonDiff, bbox.maxLon - bbox.minLon, 0.00001);
|
||||
assertEquals(expectedLatDiff, bbox.maxLat - bbox.minLat, 0.00001);
|
||||
assertEquals(hash, GeoHashUtils.stringEncode(bbox.minLon, bbox.minLat, level));
|
||||
assertEquals(expectedLonDiff, bbox.getMaxLon() - bbox.getMinLon(), 0.00001);
|
||||
assertEquals(expectedLatDiff, bbox.getMaxLat() - bbox.getMinLat(), 0.00001);
|
||||
assertEquals(hash, Geohash.stringEncode(bbox.getMinLon(), bbox.getMinLat(), level));
|
||||
}
|
||||
|
||||
public void testGeohashExtremes() {
|
||||
assertEquals("000000000000", GeoHashUtils.stringEncode(-180, -90));
|
||||
assertEquals("800000000000", GeoHashUtils.stringEncode(-180, 0));
|
||||
assertEquals("bpbpbpbpbpbp", GeoHashUtils.stringEncode(-180, 90));
|
||||
assertEquals("h00000000000", GeoHashUtils.stringEncode(0, -90));
|
||||
assertEquals("s00000000000", GeoHashUtils.stringEncode(0, 0));
|
||||
assertEquals("upbpbpbpbpbp", GeoHashUtils.stringEncode(0, 90));
|
||||
assertEquals("pbpbpbpbpbpb", GeoHashUtils.stringEncode(180, -90));
|
||||
assertEquals("xbpbpbpbpbpb", GeoHashUtils.stringEncode(180, 0));
|
||||
assertEquals("zzzzzzzzzzzz", GeoHashUtils.stringEncode(180, 90));
|
||||
assertEquals("000000000000", Geohash.stringEncode(-180, -90));
|
||||
assertEquals("800000000000", Geohash.stringEncode(-180, 0));
|
||||
assertEquals("bpbpbpbpbpbp", Geohash.stringEncode(-180, 90));
|
||||
assertEquals("h00000000000", Geohash.stringEncode(0, -90));
|
||||
assertEquals("s00000000000", Geohash.stringEncode(0, 0));
|
||||
assertEquals("upbpbpbpbpbp", Geohash.stringEncode(0, 90));
|
||||
assertEquals("pbpbpbpbpbpb", Geohash.stringEncode(180, -90));
|
||||
assertEquals("xbpbpbpbpbpb", Geohash.stringEncode(180, 0));
|
||||
assertEquals("zzzzzzzzzzzz", Geohash.stringEncode(180, 90));
|
||||
}
|
||||
|
||||
public void testLongGeohashes() {
|
||||
|
@ -92,24 +93,24 @@ public class GeoHashTests extends ESTestCase {
|
|||
GeoPoint actual = GeoPoint.fromGeohash(extendedGeohash);
|
||||
assertEquals("Additional data points above 12 should be ignored [" + extendedGeohash + "]" , expected, actual);
|
||||
|
||||
Rectangle expectedBbox = GeoHashUtils.bbox(geohash);
|
||||
Rectangle actualBbox = GeoHashUtils.bbox(extendedGeohash);
|
||||
Rectangle expectedBbox = Geohash.toBoundingBox(geohash);
|
||||
Rectangle actualBbox = Geohash.toBoundingBox(extendedGeohash);
|
||||
assertEquals("Additional data points above 12 should be ignored [" + extendedGeohash + "]" , expectedBbox, actualBbox);
|
||||
}
|
||||
}
|
||||
|
||||
public void testNorthPoleBoundingBox() {
|
||||
Rectangle bbox = GeoHashUtils.bbox("zzbxfpgzupbx"); // Bounding box with maximum precision touching north pole
|
||||
assertEquals(90.0, bbox.maxLat, 0.0000001); // Should be 90 degrees
|
||||
Rectangle bbox = Geohash.toBoundingBox("zzbxfpgzupbx"); // Bounding box with maximum precision touching north pole
|
||||
assertEquals(90.0, bbox.getMaxLat(), 0.0000001); // Should be 90 degrees
|
||||
}
|
||||
|
||||
public void testInvalidGeohashes() {
|
||||
IllegalArgumentException ex;
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> GeoHashUtils.mortonEncode("55.5"));
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> Geohash.mortonEncode("55.5"));
|
||||
assertEquals("unsupported symbol [.] in geohash [55.5]", ex.getMessage());
|
||||
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> GeoHashUtils.mortonEncode(""));
|
||||
ex = expectThrows(IllegalArgumentException.class, () -> Geohash.mortonEncode(""));
|
||||
assertEquals("empty geohash", ex.getMessage());
|
||||
}
|
||||
|
|
@ -28,12 +28,11 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.elasticsearch.common.xcontent.ToXContentFragment;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.geo.utils.Geohash;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.mortonEncode;
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
|
||||
import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_Z_VALUE;
|
||||
|
||||
public final class GeoPoint implements ToXContentFragment {
|
||||
|
@ -117,8 +116,8 @@ public final class GeoPoint implements ToXContentFragment {
|
|||
|
||||
|
||||
public GeoPoint resetFromIndexHash(long hash) {
|
||||
lon = GeoHashUtils.decodeLongitude(hash);
|
||||
lat = GeoHashUtils.decodeLatitude(hash);
|
||||
lon = Geohash.decodeLongitude(hash);
|
||||
lat = Geohash.decodeLatitude(hash);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -143,11 +142,11 @@ public final class GeoPoint implements ToXContentFragment {
|
|||
public GeoPoint resetFromGeoHash(String geohash) {
|
||||
final long hash;
|
||||
try {
|
||||
hash = mortonEncode(geohash);
|
||||
hash = Geohash.mortonEncode(geohash);
|
||||
} catch (IllegalArgumentException ex) {
|
||||
throw new ElasticsearchParseException(ex.getMessage(), ex);
|
||||
}
|
||||
return this.reset(GeoHashUtils.decodeLatitude(hash), GeoHashUtils.decodeLongitude(hash));
|
||||
return this.reset(Geohash.decodeLatitude(hash), Geohash.decodeLongitude(hash));
|
||||
}
|
||||
|
||||
public GeoPoint resetFromGeoHash(long geohashLong) {
|
||||
|
@ -172,11 +171,11 @@ public final class GeoPoint implements ToXContentFragment {
|
|||
}
|
||||
|
||||
public String geohash() {
|
||||
return stringEncode(lon, lat);
|
||||
return Geohash.stringEncode(lon, lat);
|
||||
}
|
||||
|
||||
public String getGeohash() {
|
||||
return stringEncode(lon, lat);
|
||||
return Geohash.stringEncode(lon, lat);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -208,11 +207,6 @@ public final class GeoPoint implements ToXContentFragment {
|
|||
return lat + ", " + lon;
|
||||
}
|
||||
|
||||
public static GeoPoint parseFromLatLon(String latLon) {
|
||||
GeoPoint point = new GeoPoint(latLon);
|
||||
return point;
|
||||
}
|
||||
|
||||
public static GeoPoint fromGeohash(String geohash) {
|
||||
return new GeoPoint().resetFromGeoHash(geohash);
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.common.geo;
|
||||
|
||||
import org.apache.lucene.geo.Rectangle;
|
||||
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
||||
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
|
||||
import org.apache.lucene.util.SloppyMath;
|
||||
|
@ -34,6 +33,8 @@ import org.elasticsearch.common.xcontent.XContentParser.Token;
|
|||
import org.elasticsearch.common.xcontent.XContentSubParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.geo.geometry.Rectangle;
|
||||
import org.elasticsearch.geo.utils.Geohash;
|
||||
import org.elasticsearch.index.fielddata.FieldData;
|
||||
import org.elasticsearch.index.fielddata.GeoPointValues;
|
||||
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
||||
|
@ -535,14 +536,14 @@ public class GeoUtils {
|
|||
if (effectivePoint == EffectivePoint.BOTTOM_LEFT) {
|
||||
return point.resetFromGeoHash(geohash);
|
||||
} else {
|
||||
Rectangle rectangle = GeoHashUtils.bbox(geohash);
|
||||
Rectangle rectangle = Geohash.toBoundingBox(geohash);
|
||||
switch (effectivePoint) {
|
||||
case TOP_LEFT:
|
||||
return point.reset(rectangle.maxLat, rectangle.minLon);
|
||||
return point.reset(rectangle.getMaxLat(), rectangle.getMinLon());
|
||||
case TOP_RIGHT:
|
||||
return point.reset(rectangle.maxLat, rectangle.maxLon);
|
||||
return point.reset(rectangle.getMaxLat(), rectangle.getMaxLon());
|
||||
case BOTTOM_RIGHT:
|
||||
return point.reset(rectangle.minLat, rectangle.maxLon);
|
||||
return point.reset(rectangle.getMinLat(), rectangle.getMaxLon());
|
||||
default:
|
||||
throw new IllegalArgumentException("Unsupported effective point " + effectivePoint);
|
||||
}
|
||||
|
@ -639,17 +640,6 @@ public class GeoUtils {
|
|||
return Math.sqrt(x * x + y * y) * EARTH_MEAN_RADIUS;
|
||||
}
|
||||
|
||||
/** check if point is within a rectangle
|
||||
* todo: move this to lucene Rectangle class
|
||||
*/
|
||||
public static boolean rectangleContainsPoint(Rectangle r, double lat, double lon) {
|
||||
if (lat >= r.minLat && lat <= r.maxLat) {
|
||||
// if rectangle crosses the dateline we only check if the lon is >= min or max
|
||||
return r.crossesDateline() ? lon >= r.minLon || lon <= r.maxLon : lon >= r.minLon && lon <= r.maxLon;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a {@link SortedNumericDoubleValues} instance that returns the distances to a list of geo-points
|
||||
* for each document.
|
||||
|
|
|
@ -23,10 +23,10 @@ import org.apache.lucene.index.SortedNumericDocValues;
|
|||
import org.apache.lucene.util.ArrayUtil;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.time.DateUtils;
|
||||
import org.elasticsearch.geo.utils.Geohash;
|
||||
import org.elasticsearch.script.JodaCompatibleZonedDateTime;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -365,8 +365,8 @@ public abstract class ScriptDocValues<T> extends AbstractList<T> {
|
|||
|
||||
public double geohashDistance(String geohash) {
|
||||
GeoPoint point = getValue();
|
||||
return GeoUtils.arcDistance(point.lat(), point.lon(), GeoHashUtils.decodeLatitude(geohash),
|
||||
GeoHashUtils.decodeLongitude(geohash));
|
||||
return GeoUtils.arcDistance(point.lat(), point.lon(), Geohash.decodeLatitude(geohash),
|
||||
Geohash.decodeLongitude(geohash));
|
||||
}
|
||||
|
||||
public double geohashDistanceWithDefault(String geohash, double defaultValue) {
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.elasticsearch.index.query;
|
|||
|
||||
import org.apache.lucene.document.LatLonDocValuesField;
|
||||
import org.apache.lucene.document.LatLonPoint;
|
||||
import org.apache.lucene.geo.Rectangle;
|
||||
//import org.apache.lucene.geo.Rectangle;
|
||||
import org.apache.lucene.search.IndexOrDocValuesQuery;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
|
@ -29,7 +29,6 @@ import org.elasticsearch.ElasticsearchParseException;
|
|||
import org.elasticsearch.common.Numbers;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoShapeType;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
|
@ -39,6 +38,8 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
|||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.geo.geometry.Rectangle;
|
||||
import org.elasticsearch.geo.utils.Geohash;
|
||||
import org.elasticsearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
|
||||
|
@ -181,8 +182,8 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
|
|||
*/
|
||||
public GeoBoundingBoxQueryBuilder setCorners(final String geohash) {
|
||||
// get the bounding box of the geohash and set topLeft and bottomRight
|
||||
Rectangle ghBBox = GeoHashUtils.bbox(geohash);
|
||||
return setCorners(new GeoPoint(ghBBox.maxLat, ghBBox.minLon), new GeoPoint(ghBBox.minLat, ghBBox.maxLon));
|
||||
Rectangle ghBBox = Geohash.toBoundingBox(geohash);
|
||||
return setCorners(new GeoPoint(ghBBox.getMaxLat(), ghBBox.getMinLon()), new GeoPoint(ghBBox.getMinLat(), ghBBox.getMaxLon()));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -22,7 +22,6 @@ package org.elasticsearch.search;
|
|||
import org.apache.lucene.document.InetAddressPoint;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -31,6 +30,7 @@ import org.elasticsearch.common.network.NetworkAddress;
|
|||
import org.elasticsearch.common.time.DateFormatter;
|
||||
import org.elasticsearch.common.time.DateMathParser;
|
||||
import org.elasticsearch.common.time.DateUtils;
|
||||
import org.elasticsearch.geo.utils.Geohash;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -248,7 +248,7 @@ public interface DocValueFormat extends NamedWriteable {
|
|||
|
||||
@Override
|
||||
public String format(long value) {
|
||||
return GeoHashUtils.stringEncode(value);
|
||||
return Geohash.stringEncode(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.bucket.geogrid;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.geo.utils.Geohash;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactory;
|
||||
|
@ -71,7 +71,7 @@ public class GeoHashGridAggregatorFactory extends ValuesSourceAggregatorFactory<
|
|||
if (collectsFromSingleBucket == false) {
|
||||
return asMultiBucketAggregator(this, context, parent);
|
||||
}
|
||||
CellIdSource cellIdSource = new CellIdSource(valuesSource, precision, GeoHashUtils::longEncode);
|
||||
CellIdSource cellIdSource = new CellIdSource(valuesSource, precision, Geohash::longEncode);
|
||||
return new GeoHashGridAggregator(name, factories, cellIdSource, requiredSize, shardSize, context, parent,
|
||||
pipelineAggregators, metaData);
|
||||
}
|
||||
|
|
|
@ -18,9 +18,9 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.geogrid;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.geo.utils.Geohash;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -45,7 +45,7 @@ public class InternalGeoHashGridBucket extends InternalGeoGridBucket<InternalGeo
|
|||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
return GeoHashUtils.stringEncode(hashAsLong);
|
||||
return Geohash.stringEncode(hashAsLong);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -50,8 +50,8 @@ import java.util.Set;
|
|||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.addNeighbors;
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
|
||||
import static org.elasticsearch.geo.utils.Geohash.addNeighbors;
|
||||
import static org.elasticsearch.geo.utils.Geohash.stringEncode;
|
||||
|
||||
/**
|
||||
* A {@link ContextMapping} that uses a geo location/area as a
|
||||
|
|
|
@ -18,10 +18,10 @@
|
|||
*/
|
||||
package org.elasticsearch.common.geo;
|
||||
|
||||
import org.apache.lucene.geo.Rectangle;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.geo.geometry.Rectangle;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -70,15 +70,17 @@ public class GeoDistanceTests extends ESTestCase {
|
|||
public void testDistanceCheck() {
|
||||
// Note, is within is an approximation, so, even though 0.52 is outside 50mi, we still get "true"
|
||||
double radius = DistanceUnit.convert(50, DistanceUnit.MILES, DistanceUnit.METERS);
|
||||
Rectangle box = Rectangle.fromPointDistance(0, 0, radius);
|
||||
assertThat(GeoUtils.rectangleContainsPoint(box, 0.5, 0.5), equalTo(true));
|
||||
assertThat(GeoUtils.rectangleContainsPoint(box, 0.52, 0.52), equalTo(true));
|
||||
assertThat(GeoUtils.rectangleContainsPoint(box, 1, 1), equalTo(false));
|
||||
org.apache.lucene.geo.Rectangle r = org.apache.lucene.geo.Rectangle.fromPointDistance(0, 0, radius);
|
||||
Rectangle box = new Rectangle(r.minLat, r.maxLat, r.minLon, r.maxLon);
|
||||
assertThat(box.containsPoint(0.5, 0.5), equalTo(true));
|
||||
assertThat(box.containsPoint(0.52, 0.52), equalTo(true));
|
||||
assertThat(box.containsPoint(1, 1), equalTo(false));
|
||||
|
||||
radius = DistanceUnit.convert(200, DistanceUnit.MILES, DistanceUnit.METERS);
|
||||
box = Rectangle.fromPointDistance(0, 179, radius);
|
||||
assertThat(GeoUtils.rectangleContainsPoint(box, 0, -179), equalTo(true));
|
||||
assertThat(GeoUtils.rectangleContainsPoint(box, 0, -178), equalTo(false));
|
||||
r = org.apache.lucene.geo.Rectangle.fromPointDistance(0, 179, radius);
|
||||
box = new Rectangle(r.minLat, r.maxLat, r.minLon, r.maxLon);
|
||||
assertThat(box.containsPoint(0, -179), equalTo(true));
|
||||
assertThat(box.containsPoint(0, -178), equalTo(false));
|
||||
}
|
||||
|
||||
private static double arcDistance(GeoPoint p1, GeoPoint p2) {
|
||||
|
|
|
@ -39,7 +39,7 @@ import java.io.IOException;
|
|||
import java.util.Collection;
|
||||
|
||||
import static org.elasticsearch.action.support.WriteRequest.RefreshPolicy.IMMEDIATE;
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
|
||||
import static org.elasticsearch.geo.utils.Geohash.stringEncode;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.IGNORE_Z_VALUE;
|
||||
import static org.elasticsearch.index.mapper.GeoPointFieldMapper.Names.NULL_VALUE;
|
||||
|
|
|
@ -32,7 +32,7 @@ import org.elasticsearch.test.geo.RandomGeoGenerator;
|
|||
import java.io.IOException;
|
||||
import java.util.function.DoubleSupplier;
|
||||
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
|
||||
import static org.elasticsearch.geo.utils.Geohash.stringEncode;
|
||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
|
|
|
@ -23,12 +23,12 @@ import org.apache.lucene.spatial.prefix.tree.Cell;
|
|||
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
||||
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.geo.utils.Geohash;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.locationtech.spatial4j.context.SpatialContext;
|
||||
import org.locationtech.spatial4j.distance.DistanceUtils;
|
||||
|
@ -457,7 +457,7 @@ public class GeoUtilsTests extends ESTestCase {
|
|||
|
||||
public void testParseGeoPointGeohash() throws IOException {
|
||||
for (int i = 0; i < 100; i++) {
|
||||
int geoHashLength = randomIntBetween(1, GeoHashUtils.PRECISION);
|
||||
int geoHashLength = randomIntBetween(1, Geohash.PRECISION);
|
||||
StringBuilder geohashBuilder = new StringBuilder(geoHashLength);
|
||||
for (int j = 0; j < geoHashLength; j++) {
|
||||
geohashBuilder.append(BASE_32[randomInt(BASE_32.length - 1)]);
|
||||
|
|
|
@ -44,8 +44,8 @@ import java.util.List;
|
|||
import java.util.Random;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.PRECISION;
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
|
||||
import static org.elasticsearch.geo.utils.Geohash.PRECISION;
|
||||
import static org.elasticsearch.geo.utils.Geohash.stringEncode;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.geohashGrid;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.bucket;
|
|||
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.geo.utils.Geohash;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
|
||||
import org.elasticsearch.search.aggregations.bucket.filter.Filter;
|
||||
|
@ -65,7 +65,7 @@ public class ShardReduceIT extends ESIntegTestCase {
|
|||
.startObject()
|
||||
.field("value", value)
|
||||
.field("ip", "10.0.0." + value)
|
||||
.field("location", GeoHashUtils.stringEncode(5, 52, GeoHashUtils.PRECISION))
|
||||
.field("location", Geohash.stringEncode(5, 52, Geohash.PRECISION))
|
||||
.field("date", date)
|
||||
.field("term-l", 1)
|
||||
.field("term-d", 1.5)
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.bucket.geogrid;
|
||||
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
|
||||
import static org.elasticsearch.geo.utils.Geohash.stringEncode;
|
||||
|
||||
public class GeoHashGridAggregatorTests extends GeoGridAggregatorTestCase<InternalGeoHashGridBucket> {
|
||||
|
||||
|
|
|
@ -18,8 +18,8 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket.geogrid;
|
||||
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.geo.utils.Geohash;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
|
||||
|
@ -46,7 +46,7 @@ public class GeoHashGridTests extends GeoGridTestCase<InternalGeoHashGridBucket,
|
|||
|
||||
@Override
|
||||
protected long longEncode(double lng, double lat, int precision) {
|
||||
return GeoHashUtils.longEncode(lng, lat, precision);
|
||||
return Geohash.longEncode(lng, lat, precision);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -28,12 +28,12 @@ import org.elasticsearch.action.index.IndexRequestBuilder;
|
|||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.geo.utils.Geohash;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
@ -216,8 +216,8 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
private void updateGeohashBucketsCentroid(final GeoPoint location) {
|
||||
String hash = GeoHashUtils.stringEncode(location.lon(), location.lat(), GeoHashUtils.PRECISION);
|
||||
for (int precision = GeoHashUtils.PRECISION; precision > 0; --precision) {
|
||||
String hash = Geohash.stringEncode(location.lon(), location.lat(), Geohash.PRECISION);
|
||||
for (int precision = Geohash.PRECISION; precision > 0; --precision) {
|
||||
final String h = hash.substring(0, precision);
|
||||
expectedDocCountsForGeoHash.put(h, expectedDocCountsForGeoHash.getOrDefault(h, 0) + 1);
|
||||
expectedCentroidsForGeoHash.put(h, updateHashCentroid(h, location));
|
||||
|
|
|
@ -23,13 +23,13 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.geo.utils.Geohash;
|
||||
import org.elasticsearch.index.fielddata.ScriptDocValues;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
|
@ -62,7 +62,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
private static final double src_lon = -117.151;
|
||||
private static final double tgt_lat = 32.81;
|
||||
private static final double tgt_lon = -117.21;
|
||||
private static final String tgt_geohash = GeoHashUtils.stringEncode(tgt_lon, tgt_lat);
|
||||
private static final String tgt_geohash = Geohash.stringEncode(tgt_lon, tgt_lat);
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
|
@ -149,8 +149,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
Collections.emptyMap())).get();
|
||||
Double resultDistance4 = searchResponse4.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultDistance4,
|
||||
closeTo(GeoUtils.arcDistance(src_lat, src_lon, GeoHashUtils.decodeLatitude(tgt_geohash),
|
||||
GeoHashUtils.decodeLongitude(tgt_geohash)), 0.01d));
|
||||
closeTo(GeoUtils.arcDistance(src_lat, src_lon, Geohash.decodeLatitude(tgt_geohash),
|
||||
Geohash.decodeLongitude(tgt_geohash)), 0.01d));
|
||||
|
||||
// Test doc['location'].arcDistance(lat, lon + 360)/1000d
|
||||
SearchResponse searchResponse5 = client().prepareSearch().addStoredField("_source")
|
||||
|
|
|
@ -35,7 +35,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
|
|||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
|
||||
|
@ -67,6 +66,7 @@ import java.util.Random;
|
|||
import java.util.zip.GZIPInputStream;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.geo.utils.Geohash.addNeighbors;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.geoBoundingBoxQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.geoDistanceQuery;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
|
@ -434,26 +434,26 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
|
||||
public void testNeighbors() {
|
||||
// Simple root case
|
||||
assertThat(GeoHashUtils.addNeighbors("7", new ArrayList<String>()), containsInAnyOrder("4", "5", "6", "d", "e", "h", "k", "s"));
|
||||
assertThat(addNeighbors("7", new ArrayList<>()), containsInAnyOrder("4", "5", "6", "d", "e", "h", "k", "s"));
|
||||
|
||||
// Root cases (Outer cells)
|
||||
assertThat(GeoHashUtils.addNeighbors("0", new ArrayList<String>()), containsInAnyOrder("1", "2", "3", "p", "r"));
|
||||
assertThat(GeoHashUtils.addNeighbors("b", new ArrayList<String>()), containsInAnyOrder("8", "9", "c", "x", "z"));
|
||||
assertThat(GeoHashUtils.addNeighbors("p", new ArrayList<String>()), containsInAnyOrder("n", "q", "r", "0", "2"));
|
||||
assertThat(GeoHashUtils.addNeighbors("z", new ArrayList<String>()), containsInAnyOrder("8", "b", "w", "x", "y"));
|
||||
assertThat(addNeighbors("0", new ArrayList<>()), containsInAnyOrder("1", "2", "3", "p", "r"));
|
||||
assertThat(addNeighbors("b", new ArrayList<>()), containsInAnyOrder("8", "9", "c", "x", "z"));
|
||||
assertThat(addNeighbors("p", new ArrayList<>()), containsInAnyOrder("n", "q", "r", "0", "2"));
|
||||
assertThat(addNeighbors("z", new ArrayList<>()), containsInAnyOrder("8", "b", "w", "x", "y"));
|
||||
|
||||
// Root crossing dateline
|
||||
assertThat(GeoHashUtils.addNeighbors("2", new ArrayList<String>()), containsInAnyOrder("0", "1", "3", "8", "9", "p", "r", "x"));
|
||||
assertThat(GeoHashUtils.addNeighbors("r", new ArrayList<String>()), containsInAnyOrder("0", "2", "8", "n", "p", "q", "w", "x"));
|
||||
assertThat(addNeighbors("2", new ArrayList<>()), containsInAnyOrder("0", "1", "3", "8", "9", "p", "r", "x"));
|
||||
assertThat(addNeighbors("r", new ArrayList<>()), containsInAnyOrder("0", "2", "8", "n", "p", "q", "w", "x"));
|
||||
|
||||
// level1: simple case
|
||||
assertThat(GeoHashUtils.addNeighbors("dk", new ArrayList<String>()),
|
||||
assertThat(addNeighbors("dk", new ArrayList<>()),
|
||||
containsInAnyOrder("d5", "d7", "de", "dh", "dj", "dm", "ds", "dt"));
|
||||
|
||||
// Level1: crossing cells
|
||||
assertThat(GeoHashUtils.addNeighbors("d5", new ArrayList<String>()),
|
||||
assertThat(addNeighbors("d5", new ArrayList<>()),
|
||||
containsInAnyOrder("d4", "d6", "d7", "dh", "dk", "9f", "9g", "9u"));
|
||||
assertThat(GeoHashUtils.addNeighbors("d0", new ArrayList<String>()),
|
||||
assertThat(addNeighbors("d0", new ArrayList<>()),
|
||||
containsInAnyOrder("d1", "d2", "d3", "9b", "9c", "6p", "6r", "3z"));
|
||||
}
|
||||
|
||||
|
|
|
@ -23,11 +23,11 @@ import org.elasticsearch.Version;
|
|||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoHashUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.geo.utils.Geohash;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
@ -384,7 +384,7 @@ public class GeoDistanceIT extends ESIntegTestCase {
|
|||
.startObject("pin").field("type", "geo_point");
|
||||
mapping.endObject().endObject().endObject().endObject();
|
||||
|
||||
XContentBuilder source = JsonXContent.contentBuilder().startObject().field("pin", GeoHashUtils.stringEncode(lon, lat)).endObject();
|
||||
XContentBuilder source = JsonXContent.contentBuilder().startObject().field("pin", Geohash.stringEncode(lon, lat)).endObject();
|
||||
|
||||
assertAcked(prepareCreate("locations").setSettings(settings).addMapping("location", mapping));
|
||||
client().prepareIndex("locations", "location", "1").setCreate(true).setSource(source).get();
|
||||
|
|
|
@ -40,7 +40,7 @@ import java.util.ArrayList;
|
|||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import static org.elasticsearch.common.geo.GeoHashUtils.addNeighbors;
|
||||
import static org.elasticsearch.geo.utils.Geohash.addNeighborsAtLevel;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.search.suggest.completion.CategoryContextMappingTests.assertContextSuggestFields;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
@ -273,7 +273,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase {
|
|||
assertThat(internalQueryContexts.size(), equalTo(1 + 8));
|
||||
Collection<String> locations = new ArrayList<>();
|
||||
locations.add("ezs42e");
|
||||
addNeighbors("ezs42e", GeoContextMapping.DEFAULT_PRECISION, locations);
|
||||
addNeighborsAtLevel("ezs42e", GeoContextMapping.DEFAULT_PRECISION, locations);
|
||||
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
|
||||
assertThat(internalQueryContext.context, isIn(locations));
|
||||
assertThat(internalQueryContext.boost, equalTo(1));
|
||||
|
@ -292,7 +292,7 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase {
|
|||
assertThat(internalQueryContexts.size(), equalTo(1 + 8));
|
||||
Collection<String> locations = new ArrayList<>();
|
||||
locations.add("wh0n94");
|
||||
addNeighbors("wh0n94", GeoContextMapping.DEFAULT_PRECISION, locations);
|
||||
addNeighborsAtLevel("wh0n94", GeoContextMapping.DEFAULT_PRECISION, locations);
|
||||
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
|
||||
assertThat(internalQueryContext.context, isIn(locations));
|
||||
assertThat(internalQueryContext.boost, equalTo(1));
|
||||
|
@ -316,11 +316,11 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase {
|
|||
Collection<String> locations = new ArrayList<>();
|
||||
locations.add("wh0n94");
|
||||
locations.add("w");
|
||||
addNeighbors("w", 1, locations);
|
||||
addNeighborsAtLevel("w", 1, locations);
|
||||
locations.add("wh");
|
||||
addNeighbors("wh", 2, locations);
|
||||
addNeighborsAtLevel("wh", 2, locations);
|
||||
locations.add("wh0");
|
||||
addNeighbors("wh0", 3, locations);
|
||||
addNeighborsAtLevel("wh0", 3, locations);
|
||||
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
|
||||
assertThat(internalQueryContext.context, isIn(locations));
|
||||
assertThat(internalQueryContext.boost, equalTo(10));
|
||||
|
@ -354,15 +354,15 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase {
|
|||
Collection<String> firstLocations = new ArrayList<>();
|
||||
firstLocations.add("wh0n94");
|
||||
firstLocations.add("w");
|
||||
addNeighbors("w", 1, firstLocations);
|
||||
addNeighborsAtLevel("w", 1, firstLocations);
|
||||
firstLocations.add("wh");
|
||||
addNeighbors("wh", 2, firstLocations);
|
||||
addNeighborsAtLevel("wh", 2, firstLocations);
|
||||
firstLocations.add("wh0");
|
||||
addNeighbors("wh0", 3, firstLocations);
|
||||
addNeighborsAtLevel("wh0", 3, firstLocations);
|
||||
Collection<String> secondLocations = new ArrayList<>();
|
||||
secondLocations.add("w5cx04");
|
||||
secondLocations.add("w5cx0");
|
||||
addNeighbors("w5cx0", 5, secondLocations);
|
||||
addNeighborsAtLevel("w5cx0", 5, secondLocations);
|
||||
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
|
||||
if (firstLocations.contains(internalQueryContext.context)) {
|
||||
assertThat(internalQueryContext.boost, equalTo(10));
|
||||
|
@ -397,12 +397,12 @@ public class GeoContextMappingTests extends ESSingleNodeTestCase {
|
|||
Collection<String> firstLocations = new ArrayList<>();
|
||||
firstLocations.add("wh0n94");
|
||||
firstLocations.add("w");
|
||||
addNeighbors("w", 1, firstLocations);
|
||||
addNeighborsAtLevel("w", 1, firstLocations);
|
||||
firstLocations.add("wh");
|
||||
addNeighbors("wh", 2, firstLocations);
|
||||
addNeighborsAtLevel("wh", 2, firstLocations);
|
||||
Collection<String> secondLocations = new ArrayList<>();
|
||||
secondLocations.add("w5cx04");
|
||||
addNeighbors("w5cx04", 6, secondLocations);
|
||||
addNeighborsAtLevel("w5cx04", 6, secondLocations);
|
||||
for (ContextMapping.InternalQueryContext internalQueryContext : internalQueryContexts) {
|
||||
if (firstLocations.contains(internalQueryContext.context)) {
|
||||
assertThat(internalQueryContext.boost, equalTo(10));
|
||||
|
|
Loading…
Reference in New Issue