Refactor Geo utilities to Lucene 5.4
Remove local lucene/XGeo* classes and refactor to Lucene 5.4 Geo Utility classes.
This commit is contained in:
parent
1390e68465
commit
8c535e0f6e
|
@ -1,279 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.apache.lucene.util;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
|
||||
/**
|
||||
* Utilities for converting to/from the GeoHash standard
|
||||
*
|
||||
* The geohash long format is represented as lon/lat (x/y) interleaved with the 4 least significant bits
|
||||
* representing the level (1-12) [xyxy...xyxyllll]
|
||||
*
|
||||
* This differs from a morton encoded value which interleaves lat/lon (y/x).
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class XGeoHashUtils {
|
||||
public static final char[] BASE_32 = {'0', '1', '2', '3', '4', '5', '6',
|
||||
'7', '8', '9', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n',
|
||||
'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'};
|
||||
|
||||
public static final String BASE_32_STRING = new String(BASE_32);
|
||||
|
||||
public static final int PRECISION = 12;
|
||||
private static final short MORTON_OFFSET = (XGeoUtils.BITS<<1) - (PRECISION*5);
|
||||
|
||||
/**
|
||||
* Encode lon/lat to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
|
||||
*/
|
||||
public static final long longEncode(final double lon, final double lat, final int level) {
|
||||
// shift to appropriate level
|
||||
final short msf = (short)(((12 - level) * 5) + MORTON_OFFSET);
|
||||
return ((BitUtil.flipFlop(XGeoUtils.mortonHash(lon, lat)) >>> msf) << 4) | level;
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode from geohash string to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
|
||||
*/
|
||||
public static final long longEncode(final String hash) {
|
||||
int level = hash.length()-1;
|
||||
long b;
|
||||
long l = 0L;
|
||||
for(char c : hash.toCharArray()) {
|
||||
b = (long)(BASE_32_STRING.indexOf(c));
|
||||
l |= (b<<(level--*5));
|
||||
}
|
||||
return (l<<4)|hash.length();
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode an existing geohash long to the provided precision
|
||||
*/
|
||||
public static long longEncode(long geohash, int level) {
|
||||
final short precision = (short)(geohash & 15);
|
||||
if (precision == level) {
|
||||
return geohash;
|
||||
} else if (precision > level) {
|
||||
return ((geohash >>> (((precision - level) * 5) + 4)) << 4) | level;
|
||||
}
|
||||
return ((geohash >>> 4) << (((level - precision) * 5) + 4) | level);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a geohash string from the geohash based long format
|
||||
*/
|
||||
public static final String stringEncode(long geoHashLong) {
|
||||
int level = (int)geoHashLong&15;
|
||||
geoHashLong >>>= 4;
|
||||
char[] chars = new char[level];
|
||||
do {
|
||||
chars[--level] = BASE_32[(int)(geoHashLong&31L)];
|
||||
geoHashLong>>>=5;
|
||||
} while(level > 0);
|
||||
|
||||
return new String(chars);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a geohash string from full resolution longitude, latitude)
|
||||
*/
|
||||
public static final String stringEncode(final double lon, final double lat) {
|
||||
return stringEncode(lon, lat, 12);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a level specific geohash string from full resolution longitude, latitude
|
||||
*/
|
||||
public static final String stringEncode(final double lon, final double lat, final int level) {
|
||||
// bit twiddle to geohash (since geohash is a swapped (lon/lat) encoding)
|
||||
final long hashedVal = BitUtil.flipFlop(XGeoUtils.mortonHash(lon, lat));
|
||||
|
||||
StringBuilder geoHash = new StringBuilder();
|
||||
short precision = 0;
|
||||
final short msf = (XGeoUtils.BITS<<1)-5;
|
||||
long mask = 31L<<msf;
|
||||
do {
|
||||
geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]);
|
||||
// next 5 bits
|
||||
mask >>>= 5;
|
||||
} while (++precision < level);
|
||||
return geoHash.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a full precision geohash string from a given morton encoded long value
|
||||
*/
|
||||
public static final String stringEncodeFromMortonLong(final long hashedVal) throws Exception {
|
||||
return stringEncode(hashedVal, PRECISION);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a geohash string at a given level from a morton long
|
||||
*/
|
||||
public static final String stringEncodeFromMortonLong(long hashedVal, final int level) {
|
||||
// bit twiddle to geohash (since geohash is a swapped (lon/lat) encoding)
|
||||
hashedVal = BitUtil.flipFlop(hashedVal);
|
||||
|
||||
StringBuilder geoHash = new StringBuilder();
|
||||
short precision = 0;
|
||||
final short msf = (XGeoUtils.BITS<<1)-5;
|
||||
long mask = 31L<<msf;
|
||||
do {
|
||||
geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]);
|
||||
// next 5 bits
|
||||
mask >>>= 5;
|
||||
} while (++precision < level);
|
||||
return geoHash.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a morton long value from a given geohash string
|
||||
*/
|
||||
public static final long mortonEncode(final String hash) {
|
||||
int level = 11;
|
||||
long b;
|
||||
long l = 0L;
|
||||
for(char c : hash.toCharArray()) {
|
||||
b = (long)(BASE_32_STRING.indexOf(c));
|
||||
l |= (b<<((level--*5) + MORTON_OFFSET));
|
||||
}
|
||||
return BitUtil.flipFlop(l);
|
||||
}
|
||||
|
||||
/**
|
||||
* Encode to a morton long value from a given geohash long value
|
||||
*/
|
||||
public static final long mortonEncode(final long geoHashLong) {
|
||||
final int level = (int)(geoHashLong&15);
|
||||
final short odd = (short)(level & 1);
|
||||
|
||||
return BitUtil.flipFlop((geoHashLong >>> 4) << odd) << (((12 - level) * 5) + (MORTON_OFFSET - odd));
|
||||
}
|
||||
|
||||
private static final char encode(int x, int y) {
|
||||
return BASE_32[((x & 1) + ((y & 1) * 2) + ((x & 2) * 2) + ((y & 2) * 4) + ((x & 4) * 4)) % 32];
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate all neighbors of a given geohash cell.
|
||||
*
|
||||
* @param geohash Geohash of the defined cell
|
||||
* @return geohashes of all neighbor cells
|
||||
*/
|
||||
public static Collection<? extends CharSequence> neighbors(String geohash) {
|
||||
return addNeighbors(geohash, geohash.length(), new ArrayList<CharSequence>(8));
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the geohash of a neighbor of a geohash
|
||||
*
|
||||
* @param geohash the geohash of a cell
|
||||
* @param level level of the geohash
|
||||
* @param dx delta of the first grid coordinate (must be -1, 0 or +1)
|
||||
* @param dy delta of the second grid coordinate (must be -1, 0 or +1)
|
||||
* @return geohash of the defined cell
|
||||
*/
|
||||
private final static String neighbor(String geohash, int level, int dx, int dy) {
|
||||
int cell = BASE_32_STRING.indexOf(geohash.charAt(level -1));
|
||||
|
||||
// Decoding the Geohash bit pattern to determine grid coordinates
|
||||
int x0 = cell & 1; // first bit of x
|
||||
int y0 = cell & 2; // first bit of y
|
||||
int x1 = cell & 4; // second bit of x
|
||||
int y1 = cell & 8; // second bit of y
|
||||
int x2 = cell & 16; // third bit of x
|
||||
|
||||
// combine the bitpattern to grid coordinates.
|
||||
// note that the semantics of x and y are swapping
|
||||
// on each level
|
||||
int x = x0 + (x1 / 2) + (x2 / 4);
|
||||
int y = (y0 / 2) + (y1 / 4);
|
||||
|
||||
if (level == 1) {
|
||||
// Root cells at north (namely "bcfguvyz") or at
|
||||
// south (namely "0145hjnp") do not have neighbors
|
||||
// in north/south direction
|
||||
if ((dy < 0 && y == 0) || (dy > 0 && y == 3)) {
|
||||
return null;
|
||||
} else {
|
||||
return Character.toString(encode(x + dx, y + dy));
|
||||
}
|
||||
} else {
|
||||
// define grid coordinates for next level
|
||||
final int nx = ((level % 2) == 1) ? (x + dx) : (x + dy);
|
||||
final int ny = ((level % 2) == 1) ? (y + dy) : (y + dx);
|
||||
|
||||
// if the defined neighbor has the same parent a the current cell
|
||||
// encode the cell directly. Otherwise find the cell next to this
|
||||
// cell recursively. Since encoding wraps around within a cell
|
||||
// it can be encoded here.
|
||||
// xLimit and YLimit must always be respectively 7 and 3
|
||||
// since x and y semantics are swapping on each level.
|
||||
if (nx >= 0 && nx <= 7 && ny >= 0 && ny <= 3) {
|
||||
return geohash.substring(0, level - 1) + encode(nx, ny);
|
||||
} else {
|
||||
String neighbor = neighbor(geohash, level - 1, dx, dy);
|
||||
return (neighbor != null) ? neighbor + encode(nx, ny) : neighbor;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all geohashes of the cells next to a given geohash to a list.
|
||||
*
|
||||
* @param geohash Geohash of a specified cell
|
||||
* @param neighbors list to add the neighbors to
|
||||
* @return the given list
|
||||
*/
|
||||
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, E neighbors) {
|
||||
return addNeighbors(geohash, geohash.length(), neighbors);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add all geohashes of the cells next to a given geohash to a list.
|
||||
*
|
||||
* @param geohash Geohash of a specified cell
|
||||
* @param length level of the given geohash
|
||||
* @param neighbors list to add the neighbors to
|
||||
* @return the given list
|
||||
*/
|
||||
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, int length, E neighbors) {
|
||||
String south = neighbor(geohash, length, 0, -1);
|
||||
String north = neighbor(geohash, length, 0, +1);
|
||||
if (north != null) {
|
||||
neighbors.add(neighbor(north, length, -1, 0));
|
||||
neighbors.add(north);
|
||||
neighbors.add(neighbor(north, length, +1, 0));
|
||||
}
|
||||
|
||||
neighbors.add(neighbor(geohash, length, -1, 0));
|
||||
neighbors.add(neighbor(geohash, length, +1, 0));
|
||||
|
||||
if (south != null) {
|
||||
neighbors.add(neighbor(south, length, -1, 0));
|
||||
neighbors.add(south);
|
||||
neighbors.add(neighbor(south, length, +1, 0));
|
||||
}
|
||||
|
||||
return neighbors;
|
||||
}
|
||||
}
|
|
@ -1,383 +0,0 @@
|
|||
package org.apache.lucene.util;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* Reusable geo-spatial projection utility methods.
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public class XGeoProjectionUtils {
|
||||
// WGS84 earth-ellipsoid major (a) minor (b) radius, (f) flattening and eccentricity (e)
|
||||
static final double SEMIMAJOR_AXIS = 6_378_137; // [m]
|
||||
static final double FLATTENING = 1.0/298.257223563;
|
||||
static final double SEMIMINOR_AXIS = SEMIMAJOR_AXIS * (1.0 - FLATTENING); //6_356_752.31420; // [m]
|
||||
static final double ECCENTRICITY = StrictMath.sqrt((2.0 - FLATTENING) * FLATTENING);
|
||||
static final double PI_OVER_2 = StrictMath.PI / 2.0D;
|
||||
static final double SEMIMAJOR_AXIS2 = SEMIMAJOR_AXIS * SEMIMAJOR_AXIS;
|
||||
static final double SEMIMINOR_AXIS2 = SEMIMINOR_AXIS * SEMIMINOR_AXIS;
|
||||
|
||||
/**
|
||||
* Converts from geocentric earth-centered earth-fixed to geodesic lat/lon/alt
|
||||
* @param x Cartesian x coordinate
|
||||
* @param y Cartesian y coordinate
|
||||
* @param z Cartesian z coordinate
|
||||
* @param lla 0: longitude 1: latitude: 2: altitude
|
||||
* @return double array as 0: longitude 1: latitude 2: altitude
|
||||
*/
|
||||
public static final double[] ecfToLLA(final double x, final double y, final double z, double[] lla) {
|
||||
boolean atPole = false;
|
||||
final double ad_c = 1.0026000D;
|
||||
final double e2 = (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2)/(SEMIMAJOR_AXIS2);
|
||||
final double ep2 = (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2)/(SEMIMINOR_AXIS2);
|
||||
final double cos67P5 = 0.38268343236508977D;
|
||||
|
||||
if (lla == null) {
|
||||
lla = new double[3];
|
||||
}
|
||||
|
||||
if (x != 0.0) {
|
||||
lla[0] = StrictMath.atan2(y,x);
|
||||
} else {
|
||||
if (y > 0) {
|
||||
lla[0] = PI_OVER_2;
|
||||
} else if (y < 0) {
|
||||
lla[0] = -PI_OVER_2;
|
||||
} else {
|
||||
atPole = true;
|
||||
lla[0] = 0.0D;
|
||||
if (z > 0.0) {
|
||||
lla[1] = PI_OVER_2;
|
||||
} else if (z < 0.0) {
|
||||
lla[1] = -PI_OVER_2;
|
||||
} else {
|
||||
lla[1] = PI_OVER_2;
|
||||
lla[2] = -SEMIMINOR_AXIS;
|
||||
return lla;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
final double w2 = x*x + y*y;
|
||||
final double w = StrictMath.sqrt(w2);
|
||||
final double t0 = z * ad_c;
|
||||
final double s0 = StrictMath.sqrt(t0 * t0 + w2);
|
||||
final double sinB0 = t0 / s0;
|
||||
final double cosB0 = w / s0;
|
||||
final double sin3B0 = sinB0 * sinB0 * sinB0;
|
||||
final double t1 = z + SEMIMINOR_AXIS * ep2 * sin3B0;
|
||||
final double sum = w - SEMIMAJOR_AXIS * e2 * cosB0 * cosB0 * cosB0;
|
||||
final double s1 = StrictMath.sqrt(t1 * t1 + sum * sum);
|
||||
final double sinP1 = t1 / s1;
|
||||
final double cosP1 = sum / s1;
|
||||
final double rn = SEMIMAJOR_AXIS / StrictMath.sqrt(1.0D - e2 * sinP1 * sinP1);
|
||||
|
||||
if (cosP1 >= cos67P5) {
|
||||
lla[2] = w / cosP1 - rn;
|
||||
} else if (cosP1 <= -cos67P5) {
|
||||
lla[2] = w / -cosP1 - rn;
|
||||
} else {
|
||||
lla[2] = z / sinP1 + rn * (e2 - 1.0);
|
||||
}
|
||||
if (!atPole) {
|
||||
lla[1] = StrictMath.atan(sinP1/cosP1);
|
||||
}
|
||||
lla[0] = StrictMath.toDegrees(lla[0]);
|
||||
lla[1] = StrictMath.toDegrees(lla[1]);
|
||||
|
||||
return lla;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts from geodesic lon lat alt to geocentric earth-centered earth-fixed
|
||||
* @param lon geodesic longitude
|
||||
* @param lat geodesic latitude
|
||||
* @param alt geodesic altitude
|
||||
* @param ecf reusable earth-centered earth-fixed result
|
||||
* @return either a new ecef array or the reusable ecf parameter
|
||||
*/
|
||||
public static final double[] llaToECF(double lon, double lat, double alt, double[] ecf) {
|
||||
lon = StrictMath.toRadians(lon);
|
||||
lat = StrictMath.toRadians(lat);
|
||||
|
||||
final double sl = StrictMath.sin(lat);
|
||||
final double s2 = sl*sl;
|
||||
final double cl = StrictMath.cos(lat);
|
||||
final double ge2 = (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2)/(SEMIMAJOR_AXIS2);
|
||||
|
||||
if (ecf == null) {
|
||||
ecf = new double[3];
|
||||
}
|
||||
|
||||
if (lat < -PI_OVER_2 && lat > -1.001D * PI_OVER_2) {
|
||||
lat = -PI_OVER_2;
|
||||
} else if (lat > PI_OVER_2 && lat < 1.001D * PI_OVER_2) {
|
||||
lat = PI_OVER_2;
|
||||
}
|
||||
assert (lat >= -PI_OVER_2) || (lat <= PI_OVER_2);
|
||||
|
||||
if (lon > StrictMath.PI) {
|
||||
lon -= (2*StrictMath.PI);
|
||||
}
|
||||
|
||||
final double rn = SEMIMAJOR_AXIS / StrictMath.sqrt(1.0D - ge2 * s2);
|
||||
ecf[0] = (rn+alt) * cl * StrictMath.cos(lon);
|
||||
ecf[1] = (rn+alt) * cl * StrictMath.sin(lon);
|
||||
ecf[2] = ((rn*(1.0-ge2))+alt)*sl;
|
||||
|
||||
return ecf;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts from lat lon alt (in degrees) to East North Up right-hand coordinate system
|
||||
* @param lon longitude in degrees
|
||||
* @param lat latitude in degrees
|
||||
* @param alt altitude in meters
|
||||
* @param centerLon reference point longitude in degrees
|
||||
* @param centerLat reference point latitude in degrees
|
||||
* @param centerAlt reference point altitude in meters
|
||||
* @param enu result east, north, up coordinate
|
||||
* @return east, north, up coordinate
|
||||
*/
|
||||
public static double[] llaToENU(final double lon, final double lat, final double alt, double centerLon,
|
||||
double centerLat, final double centerAlt, double[] enu) {
|
||||
if (enu == null) {
|
||||
enu = new double[3];
|
||||
}
|
||||
|
||||
// convert point to ecf coordinates
|
||||
final double[] ecf = llaToECF(lon, lat, alt, null);
|
||||
|
||||
// convert from ecf to enu
|
||||
return ecfToENU(ecf[0], ecf[1], ecf[2], centerLon, centerLat, centerAlt, enu);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts from East North Up right-hand rule to lat lon alt in degrees
|
||||
* @param x easting (in meters)
|
||||
* @param y northing (in meters)
|
||||
* @param z up (in meters)
|
||||
* @param centerLon reference point longitude (in degrees)
|
||||
* @param centerLat reference point latitude (in degrees)
|
||||
* @param centerAlt reference point altitude (in meters)
|
||||
* @param lla resulting lat, lon, alt point (in degrees)
|
||||
* @return lat, lon, alt point (in degrees)
|
||||
*/
|
||||
public static double[] enuToLLA(final double x, final double y, final double z, final double centerLon,
|
||||
final double centerLat, final double centerAlt, double[] lla) {
|
||||
// convert enuToECF
|
||||
if (lla == null) {
|
||||
lla = new double[3];
|
||||
}
|
||||
|
||||
// convert enuToECF, storing intermediate result in lla
|
||||
lla = enuToECF(x, y, z, centerLon, centerLat, centerAlt, lla);
|
||||
|
||||
// convert ecf to LLA
|
||||
return ecfToLLA(lla[0], lla[1], lla[2], lla);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert from Earth-Centered-Fixed to Easting, Northing, Up Right Hand System
|
||||
* @param x ECF X coordinate (in meters)
|
||||
* @param y ECF Y coordinate (in meters)
|
||||
* @param z ECF Z coordinate (in meters)
|
||||
* @param centerLon ENU origin longitude (in degrees)
|
||||
* @param centerLat ENU origin latitude (in degrees)
|
||||
* @param centerAlt ENU altitude (in meters)
|
||||
* @param enu reusable enu result
|
||||
* @return Easting, Northing, Up coordinate
|
||||
*/
|
||||
public static double[] ecfToENU(double x, double y, double z, final double centerLon,
|
||||
final double centerLat, final double centerAlt, double[] enu) {
|
||||
if (enu == null) {
|
||||
enu = new double[3];
|
||||
}
|
||||
|
||||
// create rotation matrix and rotate to enu orientation
|
||||
final double[][] phi = createPhiTransform(centerLon, centerLat, null);
|
||||
|
||||
// convert origin to ENU
|
||||
final double[] originECF = llaToECF(centerLon, centerLat, centerAlt, null);
|
||||
final double[] originENU = new double[3];
|
||||
originENU[0] = ((phi[0][0] * originECF[0]) + (phi[0][1] * originECF[1]) + (phi[0][2] * originECF[2]));
|
||||
originENU[1] = ((phi[1][0] * originECF[0]) + (phi[1][1] * originECF[1]) + (phi[1][2] * originECF[2]));
|
||||
originENU[2] = ((phi[2][0] * originECF[0]) + (phi[2][1] * originECF[1]) + (phi[2][2] * originECF[2]));
|
||||
|
||||
// rotate then translate
|
||||
enu[0] = ((phi[0][0] * x) + (phi[0][1] * y) + (phi[0][2] * z)) - originENU[0];
|
||||
enu[1] = ((phi[1][0] * x) + (phi[1][1] * y) + (phi[1][2] * z)) - originENU[1];
|
||||
enu[2] = ((phi[2][0] * x) + (phi[2][1] * y) + (phi[2][2] * z)) - originENU[2];
|
||||
|
||||
return enu;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert from Easting, Northing, Up Right-Handed system to Earth Centered Fixed system
|
||||
* @param x ENU x coordinate (in meters)
|
||||
* @param y ENU y coordinate (in meters)
|
||||
* @param z ENU z coordinate (in meters)
|
||||
* @param centerLon ENU origin longitude (in degrees)
|
||||
* @param centerLat ENU origin latitude (in degrees)
|
||||
* @param centerAlt ENU origin altitude (in meters)
|
||||
* @param ecf reusable ecf result
|
||||
* @return ecf result coordinate
|
||||
*/
|
||||
public static double[] enuToECF(final double x, final double y, final double z, double centerLon,
|
||||
double centerLat, final double centerAlt, double[] ecf) {
|
||||
if (ecf == null) {
|
||||
ecf = new double[3];
|
||||
}
|
||||
|
||||
double[][] phi = createTransposedPhiTransform(centerLon, centerLat, null);
|
||||
double[] ecfOrigin = llaToECF(centerLon, centerLat, centerAlt, null);
|
||||
|
||||
// rotate and translate
|
||||
ecf[0] = (phi[0][0]*x + phi[0][1]*y + phi[0][2]*z) + ecfOrigin[0];
|
||||
ecf[1] = (phi[1][0]*x + phi[1][1]*y + phi[1][2]*z) + ecfOrigin[1];
|
||||
ecf[2] = (phi[2][0]*x + phi[2][1]*y + phi[2][2]*z) + ecfOrigin[2];
|
||||
|
||||
return ecf;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the rotation matrix for converting Earth Centered Fixed to Easting Northing Up
|
||||
* @param originLon ENU origin longitude (in degrees)
|
||||
* @param originLat ENU origin latitude (in degrees)
|
||||
* @param phiMatrix reusable phi matrix result
|
||||
* @return phi rotation matrix
|
||||
*/
|
||||
private static double[][] createPhiTransform(double originLon, double originLat, double[][] phiMatrix) {
|
||||
|
||||
if (phiMatrix == null) {
|
||||
phiMatrix = new double[3][3];
|
||||
}
|
||||
|
||||
originLon = StrictMath.toRadians(originLon);
|
||||
originLat = StrictMath.toRadians(originLat);
|
||||
|
||||
final double sLon = StrictMath.sin(originLon);
|
||||
final double cLon = StrictMath.cos(originLon);
|
||||
final double sLat = StrictMath.sin(originLat);
|
||||
final double cLat = StrictMath.cos(originLat);
|
||||
|
||||
phiMatrix[0][0] = -sLon;
|
||||
phiMatrix[0][1] = cLon;
|
||||
phiMatrix[0][2] = 0.0D;
|
||||
phiMatrix[1][0] = -sLat * cLon;
|
||||
phiMatrix[1][1] = -sLat * sLon;
|
||||
phiMatrix[1][2] = cLat;
|
||||
phiMatrix[2][0] = cLat * cLon;
|
||||
phiMatrix[2][1] = cLat * sLon;
|
||||
phiMatrix[2][2] = sLat;
|
||||
|
||||
return phiMatrix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the transposed rotation matrix for converting Easting Northing Up coordinates to Earth Centered Fixed
|
||||
* @param originLon ENU origin longitude (in degrees)
|
||||
* @param originLat ENU origin latitude (in degrees)
|
||||
* @param phiMatrix reusable phi rotation matrix result
|
||||
* @return transposed phi rotation matrix
|
||||
*/
|
||||
private static double[][] createTransposedPhiTransform(double originLon, double originLat, double[][] phiMatrix) {
|
||||
|
||||
if (phiMatrix == null) {
|
||||
phiMatrix = new double[3][3];
|
||||
}
|
||||
|
||||
originLon = StrictMath.toRadians(originLon);
|
||||
originLat = StrictMath.toRadians(originLat);
|
||||
|
||||
final double sLat = StrictMath.sin(originLat);
|
||||
final double cLat = StrictMath.cos(originLat);
|
||||
final double sLon = StrictMath.sin(originLon);
|
||||
final double cLon = StrictMath.cos(originLon);
|
||||
|
||||
phiMatrix[0][0] = -sLon;
|
||||
phiMatrix[1][0] = cLon;
|
||||
phiMatrix[2][0] = 0.0D;
|
||||
phiMatrix[0][1] = -sLat * cLon;
|
||||
phiMatrix[1][1] = -sLat * sLon;
|
||||
phiMatrix[2][1] = cLat;
|
||||
phiMatrix[0][2] = cLat * cLon;
|
||||
phiMatrix[1][2] = cLat * sLon;
|
||||
phiMatrix[2][2] = sLat;
|
||||
|
||||
return phiMatrix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds a point along a bearing from a given lon,lat geolocation using vincenty's distance formula
|
||||
*
|
||||
* @param lon origin longitude in degrees
|
||||
* @param lat origin latitude in degrees
|
||||
* @param bearing azimuthal bearing in degrees
|
||||
* @param dist distance in meters
|
||||
* @param pt resulting point
|
||||
* @return the point along a bearing at a given distance in meters
|
||||
*/
|
||||
public static final double[] pointFromLonLatBearing(double lon, double lat, double bearing, double dist, double[] pt) {
|
||||
|
||||
if (pt == null) {
|
||||
pt = new double[2];
|
||||
}
|
||||
|
||||
final double alpha1 = StrictMath.toRadians(bearing);
|
||||
final double cosA1 = StrictMath.cos(alpha1);
|
||||
final double sinA1 = StrictMath.sin(alpha1);
|
||||
final double tanU1 = (1-FLATTENING) * StrictMath.tan(StrictMath.toRadians(lat));
|
||||
final double cosU1 = 1 / StrictMath.sqrt((1+tanU1*tanU1));
|
||||
final double sinU1 = tanU1*cosU1;
|
||||
final double sig1 = StrictMath.atan2(tanU1, cosA1);
|
||||
final double sinAlpha = cosU1 * sinA1;
|
||||
final double cosSqAlpha = 1 - sinAlpha*sinAlpha;
|
||||
final double uSq = cosSqAlpha * (SEMIMAJOR_AXIS2 - SEMIMINOR_AXIS2) / SEMIMINOR_AXIS2;
|
||||
final double A = 1 + uSq/16384D*(4096D + uSq * (-768D + uSq * (320D - 175D*uSq)));
|
||||
final double B = uSq/1024D * (256D + uSq * (-128D + uSq * (74D - 47D * uSq)));
|
||||
|
||||
double sigma = dist / (SEMIMINOR_AXIS*A);
|
||||
double sigmaP;
|
||||
double sinSigma, cosSigma, cos2SigmaM, deltaSigma;
|
||||
|
||||
do {
|
||||
cos2SigmaM = StrictMath.cos(2*sig1 + sigma);
|
||||
sinSigma = StrictMath.sin(sigma);
|
||||
cosSigma = StrictMath.cos(sigma);
|
||||
|
||||
deltaSigma = B * sinSigma * (cos2SigmaM + (B/4D) * (cosSigma*(-1+2*cos2SigmaM*cos2SigmaM)-
|
||||
(B/6) * cos2SigmaM*(-3+4*sinSigma*sinSigma)*(-3+4*cos2SigmaM*cos2SigmaM)));
|
||||
sigmaP = sigma;
|
||||
sigma = dist / (SEMIMINOR_AXIS*A) + deltaSigma;
|
||||
} while (StrictMath.abs(sigma-sigmaP) > 1E-12);
|
||||
|
||||
final double tmp = sinU1*sinSigma - cosU1*cosSigma*cosA1;
|
||||
final double lat2 = StrictMath.atan2(sinU1*cosSigma + cosU1*sinSigma*cosA1,
|
||||
(1-FLATTENING) * StrictMath.sqrt(sinAlpha*sinAlpha + tmp*tmp));
|
||||
final double lambda = StrictMath.atan2(sinSigma*sinA1, cosU1*cosSigma - sinU1*sinSigma*cosA1);
|
||||
final double c = FLATTENING/16 * cosSqAlpha * (4 + FLATTENING * (4 - 3 * cosSqAlpha));
|
||||
|
||||
final double lam = lambda - (1-c) * FLATTENING * sinAlpha *
|
||||
(sigma + c * sinSigma * (cos2SigmaM + c * cosSigma * (-1 + 2* cos2SigmaM*cos2SigmaM)));
|
||||
pt[0] = lon + StrictMath.toDegrees(lam);
|
||||
pt[1] = StrictMath.toDegrees(lat2);
|
||||
|
||||
return pt;
|
||||
}
|
||||
}
|
|
@ -1,429 +0,0 @@
|
|||
package org.apache.lucene.util;
|
||||
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright ownership.
|
||||
* The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
* (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
import java.util.ArrayList;
|
||||
|
||||
/**
|
||||
* Basic reusable geo-spatial utility methods
|
||||
*
|
||||
* @lucene.experimental
|
||||
*/
|
||||
public final class XGeoUtils {
|
||||
private static final short MIN_LON = -180;
|
||||
private static final short MIN_LAT = -90;
|
||||
public static final short BITS = 31;
|
||||
private static final double LON_SCALE = (0x1L<<BITS)/360.0D;
|
||||
private static final double LAT_SCALE = (0x1L<<BITS)/180.0D;
|
||||
public static final double TOLERANCE = 1E-5;
|
||||
|
||||
/** Minimum longitude value. */
|
||||
public static final double MIN_LON_INCL = -180.0D;
|
||||
|
||||
/** Maximum longitude value. */
|
||||
public static final double MAX_LON_INCL = 180.0D;
|
||||
|
||||
/** Minimum latitude value. */
|
||||
public static final double MIN_LAT_INCL = -90.0D;
|
||||
|
||||
/** Maximum latitude value. */
|
||||
public static final double MAX_LAT_INCL = 90.0D;
|
||||
|
||||
// magic numbers for bit interleaving
|
||||
private static final long MAGIC[] = {
|
||||
0x5555555555555555L, 0x3333333333333333L,
|
||||
0x0F0F0F0F0F0F0F0FL, 0x00FF00FF00FF00FFL,
|
||||
0x0000FFFF0000FFFFL, 0x00000000FFFFFFFFL,
|
||||
0xAAAAAAAAAAAAAAAAL
|
||||
};
|
||||
// shift values for bit interleaving
|
||||
private static final short SHIFT[] = {1, 2, 4, 8, 16};
|
||||
|
||||
public static double LOG2 = StrictMath.log(2);
|
||||
|
||||
// No instance:
|
||||
private XGeoUtils() {
|
||||
}
|
||||
|
||||
public static Long mortonHash(final double lon, final double lat) {
|
||||
return interleave(scaleLon(lon), scaleLat(lat));
|
||||
}
|
||||
|
||||
public static double mortonUnhashLon(final long hash) {
|
||||
return unscaleLon(deinterleave(hash));
|
||||
}
|
||||
|
||||
public static double mortonUnhashLat(final long hash) {
|
||||
return unscaleLat(deinterleave(hash >>> 1));
|
||||
}
|
||||
|
||||
private static long scaleLon(final double val) {
|
||||
return (long) ((val-MIN_LON) * LON_SCALE);
|
||||
}
|
||||
|
||||
private static long scaleLat(final double val) {
|
||||
return (long) ((val-MIN_LAT) * LAT_SCALE);
|
||||
}
|
||||
|
||||
private static double unscaleLon(final long val) {
|
||||
return (val / LON_SCALE) + MIN_LON;
|
||||
}
|
||||
|
||||
private static double unscaleLat(final long val) {
|
||||
return (val / LAT_SCALE) + MIN_LAT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Interleaves the first 32 bits of each long value
|
||||
*
|
||||
* Adapted from: http://graphics.stanford.edu/~seander/bithacks.html#InterleaveBMN
|
||||
*/
|
||||
public static long interleave(long v1, long v2) {
|
||||
v1 = (v1 | (v1 << SHIFT[4])) & MAGIC[4];
|
||||
v1 = (v1 | (v1 << SHIFT[3])) & MAGIC[3];
|
||||
v1 = (v1 | (v1 << SHIFT[2])) & MAGIC[2];
|
||||
v1 = (v1 | (v1 << SHIFT[1])) & MAGIC[1];
|
||||
v1 = (v1 | (v1 << SHIFT[0])) & MAGIC[0];
|
||||
v2 = (v2 | (v2 << SHIFT[4])) & MAGIC[4];
|
||||
v2 = (v2 | (v2 << SHIFT[3])) & MAGIC[3];
|
||||
v2 = (v2 | (v2 << SHIFT[2])) & MAGIC[2];
|
||||
v2 = (v2 | (v2 << SHIFT[1])) & MAGIC[1];
|
||||
v2 = (v2 | (v2 << SHIFT[0])) & MAGIC[0];
|
||||
|
||||
return (v2<<1) | v1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Deinterleaves long value back to two concatenated 32bit values
|
||||
*/
|
||||
public static long deinterleave(long b) {
|
||||
b &= MAGIC[0];
|
||||
b = (b ^ (b >>> SHIFT[0])) & MAGIC[1];
|
||||
b = (b ^ (b >>> SHIFT[1])) & MAGIC[2];
|
||||
b = (b ^ (b >>> SHIFT[2])) & MAGIC[3];
|
||||
b = (b ^ (b >>> SHIFT[3])) & MAGIC[4];
|
||||
b = (b ^ (b >>> SHIFT[4])) & MAGIC[5];
|
||||
return b;
|
||||
}
|
||||
|
||||
public static double compare(final double v1, final double v2) {
|
||||
final double compare = v1-v2;
|
||||
return Math.abs(compare) <= TOLERANCE ? 0 : compare;
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts longitude in range of -180 to +180.
|
||||
*/
|
||||
public static double normalizeLon(double lon_deg) {
|
||||
if (lon_deg >= -180 && lon_deg <= 180) {
|
||||
return lon_deg; //common case, and avoids slight double precision shifting
|
||||
}
|
||||
double off = (lon_deg + 180) % 360;
|
||||
if (off < 0) {
|
||||
return 180 + off;
|
||||
} else if (off == 0 && lon_deg > 0) {
|
||||
return 180;
|
||||
} else {
|
||||
return -180 + off;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Puts latitude in range of -90 to 90.
|
||||
*/
|
||||
public static double normalizeLat(double lat_deg) {
|
||||
if (lat_deg >= -90 && lat_deg <= 90) {
|
||||
return lat_deg; //common case, and avoids slight double precision shifting
|
||||
}
|
||||
double off = Math.abs((lat_deg + 90) % 360);
|
||||
return (off <= 180 ? off : 360-off) - 90;
|
||||
}
|
||||
|
||||
public static final boolean bboxContains(final double lon, final double lat, final double minLon,
|
||||
final double minLat, final double maxLon, final double maxLat) {
|
||||
return (compare(lon, minLon) >= 0 && compare(lon, maxLon) <= 0
|
||||
&& compare(lat, minLat) >= 0 && compare(lat, maxLat) <= 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* simple even-odd point in polygon computation
|
||||
* 1. Determine if point is contained in the longitudinal range
|
||||
* 2. Determine whether point crosses the edge by computing the latitudinal delta
|
||||
* between the end-point of a parallel vector (originating at the point) and the
|
||||
* y-component of the edge sink
|
||||
*
|
||||
* NOTE: Requires polygon point (x,y) order either clockwise or counter-clockwise
|
||||
*/
|
||||
public static boolean pointInPolygon(double[] x, double[] y, double lat, double lon) {
|
||||
assert x.length == y.length;
|
||||
boolean inPoly = false;
|
||||
/**
|
||||
* Note: This is using a euclidean coordinate system which could result in
|
||||
* upwards of 110KM error at the equator.
|
||||
* TODO convert coordinates to cylindrical projection (e.g. mercator)
|
||||
*/
|
||||
for (int i = 1; i < x.length; i++) {
|
||||
if (x[i] < lon && x[i-1] >= lon || x[i-1] < lon && x[i] >= lon) {
|
||||
if (y[i] + (lon - x[i]) / (x[i-1] - x[i]) * (y[i-1] - y[i]) < lat) {
|
||||
inPoly = !inPoly;
|
||||
}
|
||||
}
|
||||
}
|
||||
return inPoly;
|
||||
}
|
||||
|
||||
public static String geoTermToString(long term) {
|
||||
StringBuilder s = new StringBuilder(64);
|
||||
final int numberOfLeadingZeros = Long.numberOfLeadingZeros(term);
|
||||
for (int i = 0; i < numberOfLeadingZeros; i++) {
|
||||
s.append('0');
|
||||
}
|
||||
if (term != 0) {
|
||||
s.append(Long.toBinaryString(term));
|
||||
}
|
||||
return s.toString();
|
||||
}
|
||||
|
||||
|
||||
public static boolean rectDisjoint(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
|
||||
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
|
||||
return (aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes whether a rectangle is wholly within another rectangle (shared boundaries allowed)
|
||||
*/
|
||||
public static boolean rectWithin(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
|
||||
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
|
||||
return !(aMinX < bMinX || aMinY < bMinY || aMaxX > bMaxX || aMaxY > bMaxY);
|
||||
}
|
||||
|
||||
public static boolean rectCrosses(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
|
||||
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
|
||||
return !(rectDisjoint(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY) ||
|
||||
rectWithin(aMinX, aMinY, aMaxX, aMaxY, bMinX, bMinY, bMaxX, bMaxY));
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes whether rectangle a contains rectangle b (touching allowed)
|
||||
*/
|
||||
public static boolean rectContains(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
|
||||
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
|
||||
return !(bMinX < aMinX || bMinY < aMinY || bMaxX > aMaxX || bMaxY > aMaxY);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes whether a rectangle intersects another rectangle (crosses, within, touching, etc)
|
||||
*/
|
||||
public static boolean rectIntersects(final double aMinX, final double aMinY, final double aMaxX, final double aMaxY,
|
||||
final double bMinX, final double bMinY, final double bMaxX, final double bMaxY) {
|
||||
return !((aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY) );
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes whether a rectangle crosses a shape. (touching not allowed)
|
||||
*/
|
||||
public static boolean rectCrossesPoly(final double rMinX, final double rMinY, final double rMaxX,
|
||||
final double rMaxY, final double[] shapeX, final double[] shapeY,
|
||||
final double sMinX, final double sMinY, final double sMaxX,
|
||||
final double sMaxY) {
|
||||
// short-circuit: if the bounding boxes are disjoint then the shape does not cross
|
||||
if (rectDisjoint(rMinX, rMinY, rMaxX, rMaxY, sMinX, sMinY, sMaxX, sMaxY)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
final double[][] bbox = new double[][] { {rMinX, rMinY}, {rMaxX, rMinY}, {rMaxX, rMaxY}, {rMinX, rMaxY}, {rMinX, rMinY} };
|
||||
final int polyLength = shapeX.length-1;
|
||||
double d, s, t, a1, b1, c1, a2, b2, c2;
|
||||
double x00, y00, x01, y01, x10, y10, x11, y11;
|
||||
|
||||
// computes the intersection point between each bbox edge and the polygon edge
|
||||
for (short b=0; b<4; ++b) {
|
||||
a1 = bbox[b+1][1]-bbox[b][1];
|
||||
b1 = bbox[b][0]-bbox[b+1][0];
|
||||
c1 = a1*bbox[b+1][0] + b1*bbox[b+1][1];
|
||||
for (int p=0; p<polyLength; ++p) {
|
||||
a2 = shapeY[p+1]-shapeY[p];
|
||||
b2 = shapeX[p]-shapeX[p+1];
|
||||
// compute determinant
|
||||
d = a1*b2 - a2*b1;
|
||||
if (d != 0) {
|
||||
// lines are not parallel, check intersecting points
|
||||
c2 = a2*shapeX[p+1] + b2*shapeY[p+1];
|
||||
s = (1/d)*(b2*c1 - b1*c2);
|
||||
t = (1/d)*(a1*c2 - a2*c1);
|
||||
x00 = StrictMath.min(bbox[b][0], bbox[b+1][0]) - TOLERANCE;
|
||||
x01 = StrictMath.max(bbox[b][0], bbox[b+1][0]) + TOLERANCE;
|
||||
y00 = StrictMath.min(bbox[b][1], bbox[b+1][1]) - TOLERANCE;
|
||||
y01 = StrictMath.max(bbox[b][1], bbox[b+1][1]) + TOLERANCE;
|
||||
x10 = StrictMath.min(shapeX[p], shapeX[p+1]) - TOLERANCE;
|
||||
x11 = StrictMath.max(shapeX[p], shapeX[p+1]) + TOLERANCE;
|
||||
y10 = StrictMath.min(shapeY[p], shapeY[p+1]) - TOLERANCE;
|
||||
y11 = StrictMath.max(shapeY[p], shapeY[p+1]) + TOLERANCE;
|
||||
// check whether the intersection point is touching one of the line segments
|
||||
boolean touching = ((x00 == s && y00 == t) || (x01 == s && y01 == t))
|
||||
|| ((x10 == s && y10 == t) || (x11 == s && y11 == t));
|
||||
// if line segments are not touching and the intersection point is within the range of either segment
|
||||
if (!(touching || x00 > s || x01 < s || y00 > t || y01 < t || x10 > s || x11 < s || y10 > t || y11 < t)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} // for each poly edge
|
||||
} // for each bbox edge
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a given circle (defined as a point/radius) to an approximated line-segment polygon
|
||||
*
|
||||
* @param lon longitudinal center of circle (in degrees)
|
||||
* @param lat latitudinal center of circle (in degrees)
|
||||
* @param radius distance radius of circle (in meters)
|
||||
* @return a list of lon/lat points representing the circle
|
||||
*/
|
||||
@SuppressWarnings({"unchecked","rawtypes"})
|
||||
public static ArrayList<double[]> circleToPoly(final double lon, final double lat, final double radius) {
|
||||
double angle;
|
||||
// a little under-sampling (to limit the number of polygonal points): using archimedes estimation of pi
|
||||
final int sides = 25;
|
||||
ArrayList<double[]> geometry = new ArrayList();
|
||||
double[] lons = new double[sides];
|
||||
double[] lats = new double[sides];
|
||||
|
||||
double[] pt = new double[2];
|
||||
final int sidesLen = sides-1;
|
||||
for (int i=0; i<sidesLen; ++i) {
|
||||
angle = (i*360/sides);
|
||||
pt = XGeoProjectionUtils.pointFromLonLatBearing(lon, lat, angle, radius, pt);
|
||||
lons[i] = pt[0];
|
||||
lats[i] = pt[1];
|
||||
}
|
||||
// close the poly
|
||||
lons[sidesLen] = lons[0];
|
||||
lats[sidesLen] = lats[0];
|
||||
geometry.add(lons);
|
||||
geometry.add(lats);
|
||||
|
||||
return geometry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes whether a rectangle is within a given polygon (shared boundaries allowed)
|
||||
*/
|
||||
public static boolean rectWithinPoly(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
|
||||
final double[] shapeX, final double[] shapeY, final double sMinX,
|
||||
final double sMinY, final double sMaxX, final double sMaxY) {
|
||||
// check if rectangle crosses poly (to handle concave/pacman polys), then check that all 4 corners
|
||||
// are contained
|
||||
return !(rectCrossesPoly(rMinX, rMinY, rMaxX, rMaxY, shapeX, shapeY, sMinX, sMinY, sMaxX, sMaxY) ||
|
||||
!pointInPolygon(shapeX, shapeY, rMinY, rMinX) || !pointInPolygon(shapeX, shapeY, rMinY, rMaxX) ||
|
||||
!pointInPolygon(shapeX, shapeY, rMaxY, rMaxX) || !pointInPolygon(shapeX, shapeY, rMaxY, rMinX));
|
||||
}
|
||||
|
||||
private static boolean rectAnyCornersOutsideCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
|
||||
final double centerLon, final double centerLat, final double radius) {
|
||||
return (SloppyMath.haversin(centerLat, centerLon, rMinY, rMinX)*1000.0 > radius
|
||||
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMinX)*1000.0 > radius
|
||||
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMaxX)*1000.0 > radius
|
||||
|| SloppyMath.haversin(centerLat, centerLon, rMinY, rMaxX)*1000.0 > radius);
|
||||
}
|
||||
|
||||
private static boolean rectAnyCornersInCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
|
||||
final double centerLon, final double centerLat, final double radius) {
|
||||
return (SloppyMath.haversin(centerLat, centerLon, rMinY, rMinX)*1000.0 <= radius
|
||||
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMinX)*1000.0 <= radius
|
||||
|| SloppyMath.haversin(centerLat, centerLon, rMaxY, rMaxX)*1000.0 <= radius
|
||||
|| SloppyMath.haversin(centerLat, centerLon, rMinY, rMaxX)*1000.0 <= radius);
|
||||
}
|
||||
|
||||
public static boolean rectWithinCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
|
||||
final double centerLon, final double centerLat, final double radius) {
|
||||
return !(rectAnyCornersOutsideCircle(rMinX, rMinY, rMaxX, rMaxY, centerLon, centerLat, radius));
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes whether a rectangle crosses a circle
|
||||
*/
|
||||
public static boolean rectCrossesCircle(final double rMinX, final double rMinY, final double rMaxX, final double rMaxY,
|
||||
final double centerLon, final double centerLat, final double radius) {
|
||||
return rectAnyCornersInCircle(rMinX, rMinY, rMaxX, rMaxY, centerLon, centerLat, radius)
|
||||
|| lineCrossesSphere(rMinX, rMinY, 0, rMaxX, rMinY, 0, centerLon, centerLat, 0, radius)
|
||||
|| lineCrossesSphere(rMaxX, rMinY, 0, rMaxX, rMaxY, 0, centerLon, centerLat, 0, radius)
|
||||
|| lineCrossesSphere(rMaxX, rMaxY, 0, rMinX, rMaxY, 0, centerLon, centerLat, 0, radius)
|
||||
|| lineCrossesSphere(rMinX, rMaxY, 0, rMinX, rMinY, 0, centerLon, centerLat, 0, radius);
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes whether or a 3dimensional line segment intersects or crosses a sphere
|
||||
*
|
||||
* @param lon1 longitudinal location of the line segment start point (in degrees)
|
||||
* @param lat1 latitudinal location of the line segment start point (in degrees)
|
||||
* @param alt1 altitude of the line segment start point (in degrees)
|
||||
* @param lon2 longitudinal location of the line segment end point (in degrees)
|
||||
* @param lat2 latitudinal location of the line segment end point (in degrees)
|
||||
* @param alt2 altitude of the line segment end point (in degrees)
|
||||
* @param centerLon longitudinal location of center search point (in degrees)
|
||||
* @param centerLat latitudinal location of center search point (in degrees)
|
||||
* @param centerAlt altitude of the center point (in meters)
|
||||
* @param radius search sphere radius (in meters)
|
||||
* @return whether the provided line segment is a secant of the
|
||||
*/
|
||||
private static boolean lineCrossesSphere(double lon1, double lat1, double alt1, double lon2,
|
||||
double lat2, double alt2, double centerLon, double centerLat,
|
||||
double centerAlt, double radius) {
|
||||
// convert to cartesian 3d (in meters)
|
||||
double[] ecf1 = XGeoProjectionUtils.llaToECF(lon1, lat1, alt1, null);
|
||||
double[] ecf2 = XGeoProjectionUtils.llaToECF(lon2, lat2, alt2, null);
|
||||
double[] cntr = XGeoProjectionUtils.llaToECF(centerLon, centerLat, centerAlt, null);
|
||||
|
||||
final double dX = ecf2[0] - ecf1[0];
|
||||
final double dY = ecf2[1] - ecf1[1];
|
||||
final double dZ = ecf2[2] - ecf1[2];
|
||||
final double fX = ecf1[0] - cntr[0];
|
||||
final double fY = ecf1[1] - cntr[1];
|
||||
final double fZ = ecf1[2] - cntr[2];
|
||||
|
||||
final double a = dX*dX + dY*dY + dZ*dZ;
|
||||
final double b = 2 * (fX*dX + fY*dY + fZ*dZ);
|
||||
final double c = (fX*fX + fY*fY + fZ*fZ) - (radius*radius);
|
||||
|
||||
double discrim = (b*b)-(4*a*c);
|
||||
if (discrim < 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
discrim = StrictMath.sqrt(discrim);
|
||||
final double a2 = 2*a;
|
||||
final double t1 = (-b - discrim)/a2;
|
||||
final double t2 = (-b + discrim)/a2;
|
||||
|
||||
if ( (t1 < 0 || t1 > 1) ) {
|
||||
return !(t2 < 0 || t2 > 1);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public static boolean isValidLat(double lat) {
|
||||
return Double.isNaN(lat) == false && lat >= MIN_LAT_INCL && lat <= MAX_LAT_INCL;
|
||||
}
|
||||
|
||||
public static boolean isValidLon(double lon) {
|
||||
return Double.isNaN(lon) == false && lon >= MIN_LON_INCL && lon <= MAX_LON_INCL;
|
||||
}
|
||||
}
|
|
@ -20,8 +20,8 @@
|
|||
package org.elasticsearch.common.geo;
|
||||
|
||||
import org.apache.lucene.util.BitUtil;
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.XGeoUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.apache.lucene.util.GeoUtils;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -81,14 +81,14 @@ public final class GeoPoint {
|
|||
}
|
||||
|
||||
public GeoPoint resetFromIndexHash(long hash) {
|
||||
lon = XGeoUtils.mortonUnhashLon(hash);
|
||||
lat = XGeoUtils.mortonUnhashLat(hash);
|
||||
lon = GeoUtils.mortonUnhashLon(hash);
|
||||
lat = GeoUtils.mortonUnhashLat(hash);
|
||||
return this;
|
||||
}
|
||||
|
||||
public GeoPoint resetFromGeoHash(String geohash) {
|
||||
final long hash = XGeoHashUtils.mortonEncode(geohash);
|
||||
return this.reset(XGeoUtils.mortonUnhashLat(hash), XGeoUtils.mortonUnhashLon(hash));
|
||||
final long hash = GeoHashUtils.mortonEncode(geohash);
|
||||
return this.reset(GeoUtils.mortonUnhashLat(hash), GeoUtils.mortonUnhashLon(hash));
|
||||
}
|
||||
|
||||
public GeoPoint resetFromGeoHash(long geohashLong) {
|
||||
|
@ -113,11 +113,11 @@ public final class GeoPoint {
|
|||
}
|
||||
|
||||
public final String geohash() {
|
||||
return XGeoHashUtils.stringEncode(lon, lat);
|
||||
return GeoHashUtils.stringEncode(lon, lat);
|
||||
}
|
||||
|
||||
public final String getGeohash() {
|
||||
return XGeoHashUtils.stringEncode(lon, lat);
|
||||
return GeoHashUtils.stringEncode(lon, lat);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.lucene.document.Field;
|
|||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.Explicit;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -82,7 +82,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||
public static final boolean ENABLE_LATLON = false;
|
||||
public static final boolean ENABLE_GEOHASH = false;
|
||||
public static final boolean ENABLE_GEOHASH_PREFIX = false;
|
||||
public static final int GEO_HASH_PRECISION = XGeoHashUtils.PRECISION;
|
||||
public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
|
||||
|
||||
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit(false, false);
|
||||
public static final Explicit<Boolean> COERCE = new Explicit(false, false);
|
||||
|
@ -705,7 +705,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||
}
|
||||
if (fieldType().isGeohashEnabled()) {
|
||||
if (geohash == null) {
|
||||
geohash = XGeoHashUtils.stringEncode(point.lon(), point.lat());
|
||||
geohash = GeoHashUtils.stringEncode(point.lon(), point.lat());
|
||||
}
|
||||
addGeohashField(context, geohash);
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
|
@ -131,7 +131,7 @@ public class GeohashCellQuery {
|
|||
}
|
||||
|
||||
public Builder point(double lat, double lon) {
|
||||
this.geohash = XGeoHashUtils.stringEncode(lon, lat);
|
||||
this.geohash = GeoHashUtils.stringEncode(lon, lat);
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -205,7 +205,7 @@ public class GeohashCellQuery {
|
|||
|
||||
Query query;
|
||||
if (neighbors) {
|
||||
query = create(context, geoFieldType, geohash, XGeoHashUtils.addNeighbors(geohash, new ArrayList<CharSequence>(8)));
|
||||
query = create(context, geoFieldType, geohash, GeoHashUtils.addNeighbors(geohash, new ArrayList<CharSequence>(8)));
|
||||
} else {
|
||||
query = create(context, geoFieldType, geohash, null);
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.geogrid;
|
|||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.util.LongArray;
|
||||
|
|
|
@ -20,7 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.geogrid;
|
|||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
|
||||
|
@ -163,7 +163,7 @@ public class GeoHashGridParser implements Aggregator.Parser {
|
|||
resize(geoValues.count());
|
||||
for (int i = 0; i < count(); ++i) {
|
||||
GeoPoint target = geoValues.valueAt(i);
|
||||
values[i] = XGeoHashUtils.longEncode(target.getLon(), target.getLat(), precision);
|
||||
values[i] = GeoHashUtils.longEncode(target.getLon(), target.getLat(), precision);
|
||||
}
|
||||
sort();
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.geogrid;
|
||||
|
||||
import org.apache.lucene.util.PriorityQueue;
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -101,7 +101,7 @@ public class InternalGeoHashGrid extends InternalMultiBucketAggregation<Internal
|
|||
|
||||
@Override
|
||||
public String getKeyAsString() {
|
||||
return XGeoHashUtils.stringEncode(geohashAsLong);
|
||||
return GeoHashUtils.stringEncode(geohashAsLong);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,8 +20,8 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.geocentroid;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.XGeoUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.apache.lucene.util.GeoUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
|
@ -96,7 +96,7 @@ public final class GeoCentroidAggregator extends MetricsAggregator {
|
|||
pt[0] = pt[0] + (value.getLon() - pt[0]) / ++prevCounts;
|
||||
pt[1] = pt[1] + (value.getLat() - pt[1]) / prevCounts;
|
||||
}
|
||||
centroids.set(bucket, XGeoUtils.mortonHash(pt[0], pt[1]));
|
||||
centroids.set(bucket, GeoUtils.mortonHash(pt[0], pt[1]));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.metrics.geocentroid;
|
||||
|
||||
import org.apache.lucene.util.XGeoUtils;
|
||||
import org.apache.lucene.util.GeoUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
@ -140,7 +140,7 @@ public class InternalGeoCentroid extends InternalMetricsAggregation implements G
|
|||
out.writeVLong(count);
|
||||
if (centroid != null) {
|
||||
out.writeBoolean(true);
|
||||
out.writeLong(XGeoUtils.mortonHash(centroid.lon(), centroid.lat()));
|
||||
out.writeLong(GeoUtils.mortonHash(centroid.lon(), centroid.lat()));
|
||||
} else {
|
||||
out.writeBoolean(false);
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.support.format;
|
||||
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Streamable;
|
||||
|
@ -250,7 +250,7 @@ public interface ValueFormatter extends Streamable {
|
|||
|
||||
@Override
|
||||
public String format(long value) {
|
||||
return XGeoHashUtils.stringEncode(value);
|
||||
return GeoHashUtils.stringEncode(value);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -24,7 +24,7 @@ import org.apache.lucene.analysis.PrefixAnalyzer.PrefixTokenFilter;
|
|||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.apache.lucene.util.automaton.Automata;
|
||||
import org.apache.lucene.util.automaton.Automaton;
|
||||
import org.apache.lucene.util.automaton.Operations;
|
||||
|
@ -227,7 +227,7 @@ public class GeolocationContextMapping extends ContextMapping {
|
|||
if(parser.nextToken() == Token.VALUE_NUMBER) {
|
||||
double lat = parser.doubleValue();
|
||||
if(parser.nextToken() == Token.END_ARRAY) {
|
||||
return Collections.singleton(XGeoHashUtils.stringEncode(lon, lat));
|
||||
return Collections.singleton(GeoHashUtils.stringEncode(lon, lat));
|
||||
} else {
|
||||
throw new ElasticsearchParseException("only two values expected");
|
||||
}
|
||||
|
@ -294,7 +294,7 @@ public class GeolocationContextMapping extends ContextMapping {
|
|||
* @return new geolocation query
|
||||
*/
|
||||
public static GeoQuery query(String name, double lat, double lon, int ... precisions) {
|
||||
return query(name, XGeoHashUtils.stringEncode(lon, lat), precisions);
|
||||
return query(name, GeoHashUtils.stringEncode(lon, lat), precisions);
|
||||
}
|
||||
|
||||
public static GeoQuery query(String name, double lat, double lon, String ... precisions) {
|
||||
|
@ -302,7 +302,7 @@ public class GeolocationContextMapping extends ContextMapping {
|
|||
for (int i = 0 ; i < precisions.length; i++) {
|
||||
precisionInts[i] = GeoUtils.geoHashLevelsForPrecision(precisions[i]);
|
||||
}
|
||||
return query(name, XGeoHashUtils.stringEncode(lon, lat), precisionInts);
|
||||
return query(name, GeoHashUtils.stringEncode(lon, lat), precisionInts);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -574,7 +574,7 @@ public class GeolocationContextMapping extends ContextMapping {
|
|||
* @return this
|
||||
*/
|
||||
public Builder addDefaultLocation(double lat, double lon) {
|
||||
this.defaultLocations.add(XGeoHashUtils.stringEncode(lon, lat));
|
||||
this.defaultLocations.add(GeoHashUtils.stringEncode(lon, lat));
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -604,7 +604,7 @@ public class GeolocationContextMapping extends ContextMapping {
|
|||
@Override
|
||||
public GeolocationContextMapping build() {
|
||||
if(precisions.isEmpty()) {
|
||||
precisions.add(XGeoHashUtils.PRECISION);
|
||||
precisions.add(GeoHashUtils.PRECISION);
|
||||
}
|
||||
int[] precisionArray = precisions.toArray();
|
||||
Arrays.sort(precisionArray);
|
||||
|
@ -670,7 +670,7 @@ public class GeolocationContextMapping extends ContextMapping {
|
|||
int precision = Math.min(p, geohash.length());
|
||||
String truncatedGeohash = geohash.substring(0, precision);
|
||||
if(mapping.neighbors) {
|
||||
XGeoHashUtils.addNeighbors(truncatedGeohash, precision, locations);
|
||||
GeoHashUtils.addNeighbors(truncatedGeohash, precision, locations);
|
||||
}
|
||||
locations.add(truncatedGeohash);
|
||||
}
|
||||
|
|
|
@ -18,13 +18,13 @@
|
|||
*/
|
||||
package org.elasticsearch.common.geo;
|
||||
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Tests for {@link org.apache.lucene.util.XGeoHashUtils}
|
||||
* Tests for {@link org.apache.lucene.util.GeoHashUtils}
|
||||
*/
|
||||
public class GeoHashTests extends ESTestCase {
|
||||
public void testGeohashAsLongRoutines() {
|
||||
|
@ -39,13 +39,13 @@ public class GeoHashTests extends ESTestCase {
|
|||
{
|
||||
for(int p=1;p<=12;p++)
|
||||
{
|
||||
long geoAsLong = XGeoHashUtils.longEncode(lng, lat, p);
|
||||
long geoAsLong = GeoHashUtils.longEncode(lng, lat, p);
|
||||
|
||||
// string encode from geohashlong encoded location
|
||||
String geohashFromLong = XGeoHashUtils.stringEncode(geoAsLong);
|
||||
String geohashFromLong = GeoHashUtils.stringEncode(geoAsLong);
|
||||
|
||||
// string encode from full res lat lon
|
||||
String geohash = XGeoHashUtils.stringEncode(lng, lat, p);
|
||||
String geohash = GeoHashUtils.stringEncode(lng, lat, p);
|
||||
|
||||
// ensure both strings are the same
|
||||
assertEquals(geohash, geohashFromLong);
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.index.mapper.geo;
|
||||
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
@ -86,7 +86,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
|
||||
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
|
||||
assertThat(doc.rootDoc().get("point.geohash"), equalTo(XGeoHashUtils.stringEncode(1.3, 1.2)));
|
||||
assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.stringEncode(1.3, 1.2)));
|
||||
}
|
||||
|
||||
public void testLatLonInOneValueWithGeohash() throws Exception {
|
||||
|
@ -104,7 +104,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
|
||||
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
|
||||
assertThat(doc.rootDoc().get("point.geohash"), equalTo(XGeoHashUtils.stringEncode(1.3, 1.2)));
|
||||
assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.stringEncode(1.3, 1.2)));
|
||||
}
|
||||
|
||||
public void testGeoHashIndexValue() throws Exception {
|
||||
|
@ -116,13 +116,13 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("point", XGeoHashUtils.stringEncode(1.3, 1.2))
|
||||
.field("point", GeoHashUtils.stringEncode(1.3, 1.2))
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
|
||||
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
|
||||
assertThat(doc.rootDoc().get("point.geohash"), equalTo(XGeoHashUtils.stringEncode(1.3, 1.2)));
|
||||
assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.stringEncode(1.3, 1.2)));
|
||||
}
|
||||
|
||||
public void testGeoHashValue() throws Exception {
|
||||
|
@ -134,7 +134,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("point", XGeoHashUtils.stringEncode(1.3, 1.2))
|
||||
.field("point", GeoHashUtils.stringEncode(1.3, 1.2))
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.mapper.geo;
|
||||
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
|
@ -82,13 +82,13 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
|
|||
|
||||
ParsedDocument doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.field("point", XGeoHashUtils.stringEncode(1.3, 1.2))
|
||||
.field("point", GeoHashUtils.stringEncode(1.3, 1.2))
|
||||
.endObject()
|
||||
.bytes());
|
||||
|
||||
MatcherAssert.assertThat(doc.rootDoc().getField("point.lat"), nullValue());
|
||||
MatcherAssert.assertThat(doc.rootDoc().getField("point.lon"), nullValue());
|
||||
MatcherAssert.assertThat(doc.rootDoc().get("point.geohash"), equalTo(XGeoHashUtils.stringEncode(1.3, 1.2)));
|
||||
MatcherAssert.assertThat(doc.rootDoc().get("point.geohash"), equalTo(GeoHashUtils.stringEncode(1.3, 1.2)));
|
||||
MatcherAssert.assertThat(doc.rootDoc().get("point"), notNullValue());
|
||||
}
|
||||
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.index.search.geo;
|
||||
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
|
@ -51,7 +51,7 @@ public class GeoPointParsingTests extends ESTestCase {
|
|||
assertPointsEqual(point.resetLat(0), point2.reset(0, 0));
|
||||
assertPointsEqual(point.resetLon(lon), point2.reset(0, lon));
|
||||
assertPointsEqual(point.resetLon(0), point2.reset(0, 0));
|
||||
assertCloseTo(point.resetFromGeoHash(XGeoHashUtils.stringEncode(lon, lat)), lat, lon);
|
||||
assertCloseTo(point.resetFromGeoHash(GeoHashUtils.stringEncode(lon, lat)), lat, lon);
|
||||
assertPointsEqual(point.reset(0, 0), point2.reset(0, 0));
|
||||
assertPointsEqual(point.resetFromString(Double.toString(lat) + ", " + Double.toHexString(lon)), point2.reset(lat, lon));
|
||||
assertPointsEqual(point.reset(0, 0), point2.reset(0, 0));
|
||||
|
@ -125,7 +125,7 @@ public class GeoPointParsingTests extends ESTestCase {
|
|||
public void testInvalidPointLatHashMix() throws IOException {
|
||||
XContentBuilder content = JsonXContent.contentBuilder();
|
||||
content.startObject();
|
||||
content.field("lat", 0).field("geohash", XGeoHashUtils.stringEncode(0d, 0d));
|
||||
content.field("lat", 0).field("geohash", GeoHashUtils.stringEncode(0d, 0d));
|
||||
content.endObject();
|
||||
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
||||
|
@ -142,7 +142,7 @@ public class GeoPointParsingTests extends ESTestCase {
|
|||
public void testInvalidPointLonHashMix() throws IOException {
|
||||
XContentBuilder content = JsonXContent.contentBuilder();
|
||||
content.startObject();
|
||||
content.field("lon", 0).field("geohash", XGeoHashUtils.stringEncode(0d, 0d));
|
||||
content.field("lon", 0).field("geohash", GeoHashUtils.stringEncode(0d, 0d));
|
||||
content.endObject();
|
||||
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
||||
|
@ -201,7 +201,7 @@ public class GeoPointParsingTests extends ESTestCase {
|
|||
|
||||
private static XContentParser geohash(double lat, double lon) throws IOException {
|
||||
XContentBuilder content = JsonXContent.contentBuilder();
|
||||
content.value(XGeoHashUtils.stringEncode(lon, lat));
|
||||
content.value(GeoHashUtils.stringEncode(lon, lat));
|
||||
XContentParser parser = JsonXContent.jsonXContent.createParser(content.bytes());
|
||||
parser.nextToken();
|
||||
return parser;
|
||||
|
|
|
@ -25,7 +25,7 @@ import com.spatial4j.core.distance.DistanceUtils;
|
|||
import org.apache.lucene.spatial.prefix.tree.Cell;
|
||||
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
||||
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
|
@ -414,7 +414,7 @@ public class GeoUtilsTests extends ESTestCase {
|
|||
|
||||
public void testParseGeoPointGeohash() throws IOException {
|
||||
for (int i = 0; i < 100; i++) {
|
||||
int geoHashLength = randomIntBetween(1, XGeoHashUtils.PRECISION);
|
||||
int geoHashLength = randomIntBetween(1, GeoHashUtils.PRECISION);
|
||||
StringBuilder geohashBuilder = new StringBuilder(geoHashLength);
|
||||
for (int j = 0; j < geoHashLength; j++) {
|
||||
geohashBuilder.append(BASE_32[randomInt(BASE_32.length - 1)]);
|
||||
|
|
|
@ -20,11 +20,9 @@ package org.elasticsearch.search.aggregations.bucket;
|
|||
|
||||
import com.carrotsearch.hppc.ObjectIntHashMap;
|
||||
import com.carrotsearch.hppc.ObjectIntMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectMap;
|
||||
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
|
||||
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
|
@ -86,13 +84,13 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
//generate random point
|
||||
double lat = (180d * random.nextDouble()) - 90d;
|
||||
double lng = (360d * random.nextDouble()) - 180d;
|
||||
String randomGeoHash = XGeoHashUtils.stringEncode(lng, lat, XGeoHashUtils.PRECISION);
|
||||
String randomGeoHash = GeoHashUtils.stringEncode(lng, lat, GeoHashUtils.PRECISION);
|
||||
//Index at the highest resolution
|
||||
cities.add(indexCity("idx", randomGeoHash, lat + ", " + lng));
|
||||
expectedDocCountsForGeoHash.put(randomGeoHash, expectedDocCountsForGeoHash.getOrDefault(randomGeoHash, 0) + 1);
|
||||
//Update expected doc counts for all resolutions..
|
||||
for (int precision = XGeoHashUtils.PRECISION - 1; precision > 0; precision--) {
|
||||
String hash = XGeoHashUtils.stringEncode(lng, lat, precision);
|
||||
for (int precision = GeoHashUtils.PRECISION - 1; precision > 0; precision--) {
|
||||
String hash = GeoHashUtils.stringEncode(lng, lat, precision);
|
||||
if ((smallestGeoHash == null) || (hash.length() < smallestGeoHash.length())) {
|
||||
smallestGeoHash = hash;
|
||||
}
|
||||
|
@ -115,8 +113,8 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
double lng = (360d * random.nextDouble()) - 180d;
|
||||
points.add(lat + "," + lng);
|
||||
// Update expected doc counts for all resolutions..
|
||||
for (int precision = XGeoHashUtils.PRECISION; precision > 0; precision--) {
|
||||
final String geoHash = XGeoHashUtils.stringEncode(lng, lat, precision);
|
||||
for (int precision = GeoHashUtils.PRECISION; precision > 0; precision--) {
|
||||
final String geoHash = GeoHashUtils.stringEncode(lng, lat, precision);
|
||||
geoHashes.add(geoHash);
|
||||
}
|
||||
}
|
||||
|
@ -131,7 +129,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSimple() throws Exception {
|
||||
for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) {
|
||||
for (int precision = 1; precision <= GeoHashUtils.PRECISION; precision++) {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(geohashGrid("geohashgrid")
|
||||
.field("location")
|
||||
|
@ -155,14 +153,14 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
assertEquals("Geohash " + geohash + " has wrong doc count ",
|
||||
expectedBucketCount, bucketCount);
|
||||
GeoPoint geoPoint = (GeoPoint) propertiesKeys[i];
|
||||
assertThat(XGeoHashUtils.stringEncode(geoPoint.lon(), geoPoint.lat(), precision), equalTo(geohash));
|
||||
assertThat(GeoHashUtils.stringEncode(geoPoint.lon(), geoPoint.lat(), precision), equalTo(geohash));
|
||||
assertThat((long) propertiesDocCounts[i], equalTo(bucketCount));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testMultivalued() throws Exception {
|
||||
for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) {
|
||||
for (int precision = 1; precision <= GeoHashUtils.PRECISION; precision++) {
|
||||
SearchResponse response = client().prepareSearch("multi_valued_idx")
|
||||
.addAggregation(geohashGrid("geohashgrid")
|
||||
.field("location")
|
||||
|
@ -188,7 +186,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
public void testFiltered() throws Exception {
|
||||
GeoBoundingBoxQueryBuilder bbox = new GeoBoundingBoxQueryBuilder("location");
|
||||
bbox.setCorners(smallestGeoHash, smallestGeoHash).queryName("bbox");
|
||||
for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) {
|
||||
for (int precision = 1; precision <= GeoHashUtils.PRECISION; precision++) {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(
|
||||
AggregationBuilders.filter("filtered").filter(bbox)
|
||||
|
@ -219,7 +217,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testUnmapped() throws Exception {
|
||||
for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) {
|
||||
for (int precision = 1; precision <= GeoHashUtils.PRECISION; precision++) {
|
||||
SearchResponse response = client().prepareSearch("idx_unmapped")
|
||||
.addAggregation(geohashGrid("geohashgrid")
|
||||
.field("location")
|
||||
|
@ -236,7 +234,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testPartiallyUnmapped() throws Exception {
|
||||
for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) {
|
||||
for (int precision = 1; precision <= GeoHashUtils.PRECISION; precision++) {
|
||||
SearchResponse response = client().prepareSearch("idx", "idx_unmapped")
|
||||
.addAggregation(geohashGrid("geohashgrid")
|
||||
.field("location")
|
||||
|
@ -260,7 +258,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testTopMatch() throws Exception {
|
||||
for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) {
|
||||
for (int precision = 1; precision <= GeoHashUtils.PRECISION; precision++) {
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
.addAggregation(geohashGrid("geohashgrid")
|
||||
.field("location")
|
||||
|
@ -293,7 +291,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
|
|||
|
||||
// making sure this doesn't runs into an OOME
|
||||
public void testSizeIsZero() {
|
||||
for (int precision = 1; precision <= XGeoHashUtils.PRECISION; precision++) {
|
||||
for (int precision = 1; precision <= GeoHashUtils.PRECISION; precision++) {
|
||||
final int size = randomBoolean() ? 0 : randomIntBetween(1, Integer.MAX_VALUE);
|
||||
final int shardSize = randomBoolean() ? -1 : 0;
|
||||
SearchResponse response = client().prepareSearch("idx")
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations.bucket;
|
||||
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
|
@ -64,7 +64,7 @@ public class ShardReduceIT extends ESIntegTestCase {
|
|||
.startObject()
|
||||
.field("value", value)
|
||||
.field("ip", "10.0.0." + value)
|
||||
.field("location", XGeoHashUtils.stringEncode(5, 52, XGeoHashUtils.PRECISION))
|
||||
.field("location", GeoHashUtils.stringEncode(5, 52, GeoHashUtils.PRECISION))
|
||||
.field("date", date)
|
||||
.field("term-l", 1)
|
||||
.field("term-d", 1.5)
|
||||
|
|
|
@ -23,7 +23,7 @@ import com.carrotsearch.hppc.ObjectIntHashMap;
|
|||
import com.carrotsearch.hppc.ObjectIntMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectHashMap;
|
||||
import com.carrotsearch.hppc.ObjectObjectMap;
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
|
@ -203,8 +203,8 @@ public abstract class AbstractGeoTestCase extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
private void updateGeohashBucketsCentroid(final GeoPoint location) {
|
||||
String hash = XGeoHashUtils.stringEncode(location.lon(), location.lat(), XGeoHashUtils.PRECISION);
|
||||
for (int precision = XGeoHashUtils.PRECISION; precision > 0; --precision) {
|
||||
String hash = GeoHashUtils.stringEncode(location.lon(), location.lat(), GeoHashUtils.PRECISION);
|
||||
for (int precision = GeoHashUtils.PRECISION; precision > 0; --precision) {
|
||||
final String h = hash.substring(0, precision);
|
||||
expectedDocCountsForGeoHash.put(h, expectedDocCountsForGeoHash.getOrDefault(h, 0) + 1);
|
||||
expectedCentroidsForGeoHash.put(h, updateHashCentroid(h, location));
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
|
|||
import org.apache.lucene.spatial.query.SpatialArgs;
|
||||
import org.apache.lucene.spatial.query.SpatialOperation;
|
||||
import org.apache.lucene.spatial.query.UnsupportedSpatialOperation;
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
|
@ -465,8 +465,8 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
String geohash = randomhash(10);
|
||||
logger.info("Testing geohash_cell filter for [{}]", geohash);
|
||||
|
||||
Collection<? extends CharSequence> neighbors = XGeoHashUtils.neighbors(geohash);
|
||||
Collection<? extends CharSequence> parentNeighbors = XGeoHashUtils.neighbors(geohash.substring(0, geohash.length() - 1));
|
||||
Collection<? extends CharSequence> neighbors = GeoHashUtils.neighbors(geohash);
|
||||
Collection<? extends CharSequence> parentNeighbors = GeoHashUtils.neighbors(geohash.substring(0, geohash.length() - 1));
|
||||
|
||||
logger.info("Neighbors {}", neighbors);
|
||||
logger.info("Parent Neighbors {}", parentNeighbors);
|
||||
|
@ -536,24 +536,24 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
|
||||
public void testNeighbors() {
|
||||
// Simple root case
|
||||
assertThat(XGeoHashUtils.addNeighbors("7", new ArrayList<String>()), containsInAnyOrder("4", "5", "6", "d", "e", "h", "k", "s"));
|
||||
assertThat(GeoHashUtils.addNeighbors("7", new ArrayList<String>()), containsInAnyOrder("4", "5", "6", "d", "e", "h", "k", "s"));
|
||||
|
||||
// Root cases (Outer cells)
|
||||
assertThat(XGeoHashUtils.addNeighbors("0", new ArrayList<String>()), containsInAnyOrder("1", "2", "3", "p", "r"));
|
||||
assertThat(XGeoHashUtils.addNeighbors("b", new ArrayList<String>()), containsInAnyOrder("8", "9", "c", "x", "z"));
|
||||
assertThat(XGeoHashUtils.addNeighbors("p", new ArrayList<String>()), containsInAnyOrder("n", "q", "r", "0", "2"));
|
||||
assertThat(XGeoHashUtils.addNeighbors("z", new ArrayList<String>()), containsInAnyOrder("8", "b", "w", "x", "y"));
|
||||
assertThat(GeoHashUtils.addNeighbors("0", new ArrayList<String>()), containsInAnyOrder("1", "2", "3", "p", "r"));
|
||||
assertThat(GeoHashUtils.addNeighbors("b", new ArrayList<String>()), containsInAnyOrder("8", "9", "c", "x", "z"));
|
||||
assertThat(GeoHashUtils.addNeighbors("p", new ArrayList<String>()), containsInAnyOrder("n", "q", "r", "0", "2"));
|
||||
assertThat(GeoHashUtils.addNeighbors("z", new ArrayList<String>()), containsInAnyOrder("8", "b", "w", "x", "y"));
|
||||
|
||||
// Root crossing dateline
|
||||
assertThat(XGeoHashUtils.addNeighbors("2", new ArrayList<String>()), containsInAnyOrder("0", "1", "3", "8", "9", "p", "r", "x"));
|
||||
assertThat(XGeoHashUtils.addNeighbors("r", new ArrayList<String>()), containsInAnyOrder("0", "2", "8", "n", "p", "q", "w", "x"));
|
||||
assertThat(GeoHashUtils.addNeighbors("2", new ArrayList<String>()), containsInAnyOrder("0", "1", "3", "8", "9", "p", "r", "x"));
|
||||
assertThat(GeoHashUtils.addNeighbors("r", new ArrayList<String>()), containsInAnyOrder("0", "2", "8", "n", "p", "q", "w", "x"));
|
||||
|
||||
// level1: simple case
|
||||
assertThat(XGeoHashUtils.addNeighbors("dk", new ArrayList<String>()), containsInAnyOrder("d5", "d7", "de", "dh", "dj", "dm", "ds", "dt"));
|
||||
assertThat(GeoHashUtils.addNeighbors("dk", new ArrayList<String>()), containsInAnyOrder("d5", "d7", "de", "dh", "dj", "dm", "ds", "dt"));
|
||||
|
||||
// Level1: crossing cells
|
||||
assertThat(XGeoHashUtils.addNeighbors("d5", new ArrayList<String>()), containsInAnyOrder("d4", "d6", "d7", "dh", "dk", "9f", "9g", "9u"));
|
||||
assertThat(XGeoHashUtils.addNeighbors("d0", new ArrayList<String>()), containsInAnyOrder("d1", "d2", "d3", "9b", "9c", "6p", "6r", "3z"));
|
||||
assertThat(GeoHashUtils.addNeighbors("d5", new ArrayList<String>()), containsInAnyOrder("d4", "d6", "d7", "dh", "dk", "9f", "9g", "9u"));
|
||||
assertThat(GeoHashUtils.addNeighbors("d0", new ArrayList<String>()), containsInAnyOrder("d1", "d2", "d3", "9b", "9c", "6p", "6r", "3z"));
|
||||
}
|
||||
|
||||
public static double distance(double lat1, double lon1, double lat2, double lon2) {
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
package org.elasticsearch.search.suggest;
|
||||
|
||||
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.suggest.SuggestRequest;
|
||||
import org.elasticsearch.action.suggest.SuggestRequestBuilder;
|
||||
|
@ -818,7 +818,7 @@ public class ContextSuggestSearchIT extends ESIntegTestCase {
|
|||
|
||||
double latitude = 52.22;
|
||||
double longitude = 4.53;
|
||||
String geohash = XGeoHashUtils.stringEncode(longitude, latitude);
|
||||
String geohash = GeoHashUtils.stringEncode(longitude, latitude);
|
||||
|
||||
XContentBuilder doc1 = jsonBuilder().startObject().startObject("suggest_geo").field("input", "Hotel Marriot in Amsterdam").startObject("context").startObject("location").field("lat", latitude).field("lon", longitude).endObject().endObject().endObject().endObject();
|
||||
index("test", "test", "1", doc1);
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
*/
|
||||
package org.elasticsearch.search.suggest.context;
|
||||
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
|
@ -52,7 +52,7 @@ public class GeoLocationContextMappingTests extends ESTestCase {
|
|||
XContentParser parser = XContentHelper.createParser(builder.bytes());
|
||||
parser.nextToken();
|
||||
|
||||
String geohash = XGeoHashUtils.stringEncode(randomIntBetween(-180, +180), randomIntBetween(-90, +90));
|
||||
String geohash = GeoHashUtils.stringEncode(randomIntBetween(-180, +180), randomIntBetween(-90, +90));
|
||||
HashMap<String, Object> config = new HashMap<>();
|
||||
config.put("precision", 12);
|
||||
config.put("default", geohash);
|
||||
|
@ -171,8 +171,8 @@ public class GeoLocationContextMappingTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testUseWithMultiGeoHashGeoContext() throws Exception {
|
||||
String geohash1 = XGeoHashUtils.stringEncode(randomIntBetween(-180, +180), randomIntBetween(-90, +90));
|
||||
String geohash2 = XGeoHashUtils.stringEncode(randomIntBetween(-180, +180), randomIntBetween(-90, +90));
|
||||
String geohash1 = GeoHashUtils.stringEncode(randomIntBetween(-180, +180), randomIntBetween(-90, +90));
|
||||
String geohash2 = GeoHashUtils.stringEncode(randomIntBetween(-180, +180), randomIntBetween(-90, +90));
|
||||
XContentBuilder builder = jsonBuilder().startObject().startArray("location").value(geohash1).value(geohash2).endArray().endObject();
|
||||
XContentParser parser = XContentHelper.createParser(builder.bytes());
|
||||
parser.nextToken(); // start of object
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.test.geo;
|
||||
|
||||
import org.apache.lucene.util.XGeoUtils;
|
||||
import org.apache.lucene.util.GeoUtils;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
|
||||
import java.util.Random;
|
||||
|
@ -42,8 +42,8 @@ public class RandomGeoGenerator {
|
|||
assert pt != null && pt.length == 2;
|
||||
|
||||
// normalize min and max
|
||||
double[] min = {XGeoUtils.normalizeLon(minLon), XGeoUtils.normalizeLat(minLat)};
|
||||
double[] max = {XGeoUtils.normalizeLon(maxLon), XGeoUtils.normalizeLat(maxLat)};
|
||||
double[] min = {GeoUtils.normalizeLon(minLon), GeoUtils.normalizeLat(minLat)};
|
||||
double[] max = {GeoUtils.normalizeLon(maxLon), GeoUtils.normalizeLat(maxLat)};
|
||||
final double[] tMin = new double[2];
|
||||
final double[] tMax = new double[2];
|
||||
tMin[0] = Math.min(min[0], max[0]);
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
|
||||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.apache.lucene.util.XGeoHashUtils;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
|
@ -664,7 +664,7 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
|
||||
XContentBuilder source = JsonXContent.contentBuilder()
|
||||
.startObject()
|
||||
.field("pin", XGeoHashUtils.stringEncode(lon, lat))
|
||||
.field("pin", GeoHashUtils.stringEncode(lon, lat))
|
||||
.endObject();
|
||||
|
||||
assertAcked(prepareCreate("locations").addMapping("location", mapping));
|
||||
|
|
Loading…
Reference in New Issue