Upgrade Lucene 6 Release

* upgrades numerics to new Point format
* updates geo api changes
  * adds GeoPointDistanceRangeQuery as XGeoPointDistanceRangeQuery
  * cuts over to ES GeoHashUtils
This commit is contained in:
Adrien Grand 2016-04-01 11:30:10 +02:00 committed by Nicholas Knize
parent 56f061b0b4
commit 496c7fbd84
80 changed files with 524 additions and 107 deletions

View File

@ -1,5 +1,5 @@
elasticsearch = 5.0.0-alpha1
lucene = 6.0.0-snapshot-f0aa4fc
lucene = 6.0.0
# optional dependencies
spatial4j = 0.6

View File

@ -0,0 +1,126 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.spatial.geopoint.search;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.spatial.geopoint.document.GeoPointField.TermEncoding;
/** Implements a point distance range query on a GeoPoint field. This is based on
* {@code org.apache.lucene.spatial.geopoint.search.GeoPointDistanceQuery} and is implemented using a
* {@code org.apache.lucene.search.BooleanClause.MUST_NOT} clause to exclude any points that fall within
* minRadiusMeters from the provided point.
* <p>
* NOTE: this query does not correctly support multi-value docs (see: https://issues.apache.org/jira/browse/LUCENE-7126)
* <br>
* TODO: remove this per ISSUE #17658
*
* @lucene.experimental
*/
public final class XGeoPointDistanceRangeQuery extends GeoPointDistanceQuery {
/** minimum distance range (in meters) from lat, lon center location, maximum is inherited */
protected final double minRadiusMeters;
/**
* Constructs a query for all {@link org.apache.lucene.spatial.geopoint.document.GeoPointField} types within a minimum / maximum
* distance (in meters) range from a given point
*/
public XGeoPointDistanceRangeQuery(final String field, final double centerLat, final double centerLon,
final double minRadiusMeters, final double maxRadiusMeters) {
this(field, TermEncoding.PREFIX, centerLat, centerLon, minRadiusMeters, maxRadiusMeters);
}
/**
* Constructs a query for all {@link org.apache.lucene.spatial.geopoint.document.GeoPointField} types within a minimum / maximum
* distance (in meters) range from a given point. Accepts an optional
* {@link org.apache.lucene.spatial.geopoint.document.GeoPointField.TermEncoding}
*/
public XGeoPointDistanceRangeQuery(final String field, final TermEncoding termEncoding, final double centerLat, final double centerLon,
final double minRadiusMeters, final double maxRadius) {
super(field, termEncoding, centerLat, centerLon, maxRadius);
this.minRadiusMeters = minRadiusMeters;
}
@Override
public Query rewrite(IndexReader reader) {
Query q = super.rewrite(reader);
if (minRadiusMeters == 0.0) {
return q;
}
// add an exclusion query
BooleanQuery.Builder bqb = new BooleanQuery.Builder();
// create a new exclusion query
GeoPointDistanceQuery exclude = new GeoPointDistanceQuery(field, termEncoding, centerLat, centerLon, minRadiusMeters);
// full map search
// if (radiusMeters >= GeoProjectionUtils.SEMIMINOR_AXIS) {
// bqb.add(new BooleanClause(new GeoPointInBBoxQuery(this.field, -180.0, -90.0, 180.0, 90.0), BooleanClause.Occur.MUST));
// } else {
bqb.add(new BooleanClause(q, BooleanClause.Occur.MUST));
// }
bqb.add(new BooleanClause(exclude, BooleanClause.Occur.MUST_NOT));
return bqb.build();
}
@Override
public String toString(String field) {
final StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
sb.append(':');
if (!this.field.equals(field)) {
sb.append(" field=");
sb.append(this.field);
sb.append(':');
}
return sb.append( " Center: [")
.append(centerLat)
.append(',')
.append(centerLon)
.append(']')
.append(" From Distance: ")
.append(minRadiusMeters)
.append(" m")
.append(" To Distance: ")
.append(radiusMeters)
.append(" m")
.append(" Lower Left: [")
.append(minLat)
.append(',')
.append(minLon)
.append(']')
.append(" Upper Right: [")
.append(maxLat)
.append(',')
.append(maxLon)
.append("]")
.toString();
}
/** getter method for minimum distance */
public double getMinRadiusMeters() {
return this.minRadiusMeters;
}
/** getter method for maximum distance */
public double getMaxRadiusMeters() {
return this.radiusMeters;
}
}

View File

@ -89,14 +89,8 @@ public enum GeoDistance implements Writeable<GeoDistance> {
ARC {
@Override
public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) {
double x1 = sourceLatitude * Math.PI / 180D;
double x2 = targetLatitude * Math.PI / 180D;
double h1 = 1D - Math.cos(x1 - x2);
double h2 = 1D - Math.cos((sourceLongitude - targetLongitude) * Math.PI / 180D);
double h = (h1 + Math.cos(x1) * Math.cos(x2) * h2) / 2;
double averageLatitude = (x1 + x2) / 2;
double diameter = GeoUtils.earthDiameter(averageLatitude);
return unit.fromMeters(diameter * Math.asin(Math.min(1, Math.sqrt(h))));
double result = SloppyMath.haversinMeters(sourceLatitude, sourceLongitude, targetLatitude, targetLongitude);
return unit.fromMeters(result);
}
@Override
@ -113,6 +107,7 @@ public enum GeoDistance implements Writeable<GeoDistance> {
* Calculates distance as points on a globe in a sloppy way. Close to the pole areas the accuracy
* of this function decreases.
*/
@Deprecated
SLOPPY_ARC {
@Override
@ -122,7 +117,7 @@ public enum GeoDistance implements Writeable<GeoDistance> {
@Override
public double calculate(double sourceLatitude, double sourceLongitude, double targetLatitude, double targetLongitude, DistanceUnit unit) {
return unit.fromMeters(SloppyMath.haversin(sourceLatitude, sourceLongitude, targetLatitude, targetLongitude) * 1000.0);
return unit.fromMeters(SloppyMath.haversinMeters(sourceLatitude, sourceLongitude, targetLatitude, targetLongitude));
}
@Override

View File

@ -0,0 +1,284 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.elasticsearch.common.geo;
import java.util.ArrayList;
import java.util.Collection;
import org.apache.lucene.spatial.util.GeoEncodingUtils;
import org.apache.lucene.util.BitUtil;
/**
* Utilities for converting to/from the GeoHash standard
*
* The geohash long format is represented as lon/lat (x/y) interleaved with the 4 least significant bits
* representing the level (1-12) [xyxy...xyxyllll]
*
* This differs from a morton encoded value which interleaves lat/lon (y/x).
*
* @lucene.experimental
*/
public class GeoHashUtils {
private static final char[] BASE_32 = {'0', '1', '2', '3', '4', '5', '6',
'7', '8', '9', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n',
'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'};
private static final String BASE_32_STRING = new String(BASE_32);
/** maximum precision for geohash strings */
public static final int PRECISION = 12;
private static final short MORTON_OFFSET = (GeoEncodingUtils.BITS<<1) - (PRECISION*5);
// No instance:
private GeoHashUtils() {
}
/**
* Encode lon/lat to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
*/
public static final long longEncode(final double lon, final double lat, final int level) {
// shift to appropriate level
final short msf = (short)(((12 - level) * 5) + MORTON_OFFSET);
return ((BitUtil.flipFlop(GeoEncodingUtils.mortonHash(lat, lon)) >>> msf) << 4) | level;
}
/**
* Encode from geohash string to the geohash based long format (lon/lat interleaved, 4 least significant bits = level)
*/
public static final long longEncode(final String hash) {
int level = hash.length()-1;
long b;
long l = 0L;
for(char c : hash.toCharArray()) {
b = (long)(BASE_32_STRING.indexOf(c));
l |= (b<<(level--*5));
}
return (l<<4)|hash.length();
}
/**
* Encode an existing geohash long to the provided precision
*/
public static long longEncode(long geohash, int level) {
final short precision = (short)(geohash & 15);
if (precision == level) {
return geohash;
} else if (precision > level) {
return ((geohash >>> (((precision - level) * 5) + 4)) << 4) | level;
}
return ((geohash >>> 4) << (((level - precision) * 5) + 4) | level);
}
/**
* Convert from a morton encoded long from a geohash encoded long
*/
public static long fromMorton(long morton, int level) {
long mFlipped = BitUtil.flipFlop(morton);
mFlipped >>>= (((GeoHashUtils.PRECISION - level) * 5) + MORTON_OFFSET);
return (mFlipped << 4) | level;
}
/**
* Encode to a geohash string from the geohash based long format
*/
public static final String stringEncode(long geoHashLong) {
int level = (int)geoHashLong&15;
geoHashLong >>>= 4;
char[] chars = new char[level];
do {
chars[--level] = BASE_32[(int) (geoHashLong&31L)];
geoHashLong>>>=5;
} while(level > 0);
return new String(chars);
}
/**
* Encode to a geohash string from full resolution longitude, latitude)
*/
public static final String stringEncode(final double lon, final double lat) {
return stringEncode(lon, lat, 12);
}
/**
* Encode to a level specific geohash string from full resolution longitude, latitude
*/
public static final String stringEncode(final double lon, final double lat, final int level) {
// convert to geohashlong
final long ghLong = fromMorton(GeoEncodingUtils.mortonHash(lat, lon), level);
return stringEncode(ghLong);
}
/**
* Encode to a full precision geohash string from a given morton encoded long value
*/
public static final String stringEncodeFromMortonLong(final long hashedVal) throws Exception {
return stringEncode(hashedVal, PRECISION);
}
/**
* Encode to a geohash string at a given level from a morton long
*/
public static final String stringEncodeFromMortonLong(long hashedVal, final int level) {
// bit twiddle to geohash (since geohash is a swapped (lon/lat) encoding)
hashedVal = BitUtil.flipFlop(hashedVal);
StringBuilder geoHash = new StringBuilder();
short precision = 0;
final short msf = (GeoEncodingUtils.BITS<<1)-5;
long mask = 31L<<msf;
do {
geoHash.append(BASE_32[(int)((mask & hashedVal)>>>(msf-(precision*5)))]);
// next 5 bits
mask >>>= 5;
} while (++precision < level);
return geoHash.toString();
}
/**
* Encode to a morton long value from a given geohash string
*/
public static final long mortonEncode(final String hash) {
int level = 11;
long b;
long l = 0L;
for(char c : hash.toCharArray()) {
b = (long)(BASE_32_STRING.indexOf(c));
l |= (b<<((level--*5) + MORTON_OFFSET));
}
return BitUtil.flipFlop(l);
}
/**
* Encode to a morton long value from a given geohash long value
*/
public static final long mortonEncode(final long geoHashLong) {
final int level = (int)(geoHashLong&15);
final short odd = (short)(level & 1);
return BitUtil.flipFlop(((geoHashLong >>> 4) << odd) << (((12 - level) * 5) + (MORTON_OFFSET - odd)));
}
private static final char encode(int x, int y) {
return BASE_32[((x & 1) + ((y & 1) * 2) + ((x & 2) * 2) + ((y & 2) * 4) + ((x & 4) * 4)) % 32];
}
/**
* Calculate all neighbors of a given geohash cell.
*
* @param geohash Geohash of the defined cell
* @return geohashes of all neighbor cells
*/
public static Collection<? extends CharSequence> neighbors(String geohash) {
return addNeighbors(geohash, geohash.length(), new ArrayList<CharSequence>(8));
}
/**
* Calculate the geohash of a neighbor of a geohash
*
* @param geohash the geohash of a cell
* @param level level of the geohash
* @param dx delta of the first grid coordinate (must be -1, 0 or +1)
* @param dy delta of the second grid coordinate (must be -1, 0 or +1)
* @return geohash of the defined cell
*/
public final static String neighbor(String geohash, int level, int dx, int dy) {
int cell = BASE_32_STRING.indexOf(geohash.charAt(level -1));
// Decoding the Geohash bit pattern to determine grid coordinates
int x0 = cell & 1; // first bit of x
int y0 = cell & 2; // first bit of y
int x1 = cell & 4; // second bit of x
int y1 = cell & 8; // second bit of y
int x2 = cell & 16; // third bit of x
// combine the bitpattern to grid coordinates.
// note that the semantics of x and y are swapping
// on each level
int x = x0 + (x1 / 2) + (x2 / 4);
int y = (y0 / 2) + (y1 / 4);
if (level == 1) {
// Root cells at north (namely "bcfguvyz") or at
// south (namely "0145hjnp") do not have neighbors
// in north/south direction
if ((dy < 0 && y == 0) || (dy > 0 && y == 3)) {
return null;
} else {
return Character.toString(encode(x + dx, y + dy));
}
} else {
// define grid coordinates for next level
final int nx = ((level % 2) == 1) ? (x + dx) : (x + dy);
final int ny = ((level % 2) == 1) ? (y + dy) : (y + dx);
// if the defined neighbor has the same parent a the current cell
// encode the cell directly. Otherwise find the cell next to this
// cell recursively. Since encoding wraps around within a cell
// it can be encoded here.
// xLimit and YLimit must always be respectively 7 and 3
// since x and y semantics are swapping on each level.
if (nx >= 0 && nx <= 7 && ny >= 0 && ny <= 3) {
return geohash.substring(0, level - 1) + encode(nx, ny);
} else {
String neighbor = neighbor(geohash, level - 1, dx, dy);
return (neighbor != null) ? neighbor + encode(nx, ny) : neighbor;
}
}
}
/**
* Add all geohashes of the cells next to a given geohash to a list.
*
* @param geohash Geohash of a specified cell
* @param neighbors list to add the neighbors to
* @return the given list
*/
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, E neighbors) {
return addNeighbors(geohash, geohash.length(), neighbors);
}
/**
* Add all geohashes of the cells next to a given geohash to a list.
*
* @param geohash Geohash of a specified cell
* @param length level of the given geohash
* @param neighbors list to add the neighbors to
* @return the given list
*/
public static final <E extends Collection<? super String>> E addNeighbors(String geohash, int length, E neighbors) {
String south = neighbor(geohash, length, 0, -1);
String north = neighbor(geohash, length, 0, +1);
if (north != null) {
neighbors.add(neighbor(north, length, -1, 0));
neighbors.add(north);
neighbors.add(neighbor(north, length, +1, 0));
}
neighbors.add(neighbor(geohash, length, -1, 0));
neighbors.add(neighbor(geohash, length, +1, 0));
if (south != null) {
neighbors.add(neighbor(south, length, -1, 0));
neighbors.add(south);
neighbors.add(neighbor(south, length, +1, 0));
}
return neighbors;
}
}

View File

@ -21,8 +21,8 @@ package org.elasticsearch.common.geo;
import org.apache.lucene.util.BitUtil;
import static org.apache.lucene.spatial.util.GeoHashUtils.mortonEncode;
import static org.apache.lucene.spatial.util.GeoHashUtils.stringEncode;
import static org.elasticsearch.common.geo.GeoHashUtils.mortonEncode;
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonUnhashLat;
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonUnhashLon;

View File

@ -71,7 +71,7 @@ public class GeoUtils {
* maximum distance/radius from the point 'center' before overlapping
**/
public static double maxRadialDistance(GeoPoint center, double initialRadius) {
final double maxRadius = maxRadialDistanceMeters(center.lon(), center.lat());
final double maxRadius = maxRadialDistanceMeters(center.lat(), center.lon());
return Math.min(initialRadius, maxRadius);
}
@ -91,14 +91,6 @@ public class GeoUtils {
return true;
}
/**
* Return an approximate value of the diameter of the earth (in meters) at the given latitude (in radians).
*/
public static double earthDiameter(double latitude) {
// SloppyMath impl returns a result in kilometers
return SloppyMath.earthDiameter(latitude) * 1000;
}
/**
* Calculate the width (in meters) of geohash cells at a specific level
* @param level geohash level must be greater or equal to zero

View File

@ -25,7 +25,6 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.LegacyNumericUtils;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.index.IndexSettings;

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.mapper.geo;
import org.apache.lucene.document.Field;
import org.apache.lucene.spatial.util.GeoHashUtils;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.apache.lucene.util.LegacyNumericUtils;
import org.elasticsearch.Version;
import org.elasticsearch.ElasticsearchParseException;
@ -57,7 +57,6 @@ import java.util.List;
import java.util.Map;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseField;
import static org.elasticsearch.index.mapper.core.TypeParsers.parseMultiField;
/**
* GeoPointFieldMapper base class to maintain backward compatibility

View File

@ -129,7 +129,7 @@ public class GeoPointFieldMapper extends BaseGeoPointFieldMapper {
GeoUtils.normalizePoint(point);
}
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
context.doc().add(new GeoPointField(fieldType().name(), point.lon(), point.lat(), fieldType() ));
context.doc().add(new GeoPointField(fieldType().name(), point.lat(), point.lon(), fieldType()));
}
super.parse(context, point, geoHash);
}

View File

@ -291,8 +291,8 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
// if index created V_2_3 > use prefix encoded postings format
final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ?
GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX;
return new GeoPointInBBoxQuery(fieldType.name(), encoding, luceneTopLeft.lon(), luceneBottomRight.lat(),
luceneBottomRight.lon(), luceneTopLeft.lat());
return new GeoPointInBBoxQuery(fieldType.name(), encoding, luceneBottomRight.lat(), luceneTopLeft.lat(),
luceneTopLeft.lon(), luceneBottomRight.lon());
}
Query query;

View File

@ -257,7 +257,7 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ?
GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX;
normDistance = GeoUtils.maxRadialDistance(center, normDistance);
return new GeoPointDistanceQuery(fieldType.name(), encoding, center.lon(), center.lat(), normDistance);
return new GeoPointDistanceQuery(fieldType.name(), encoding, center.lat(), center.lon(), normDistance);
}
@Override

View File

@ -21,7 +21,7 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.apache.lucene.spatial.geopoint.document.GeoPointField;
import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceRangeQuery;
import org.apache.lucene.spatial.geopoint.search.XGeoPointDistanceRangeQuery;
import org.apache.lucene.spatial.util.GeoDistanceUtils;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
@ -288,7 +288,7 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
toValue = geoDistance.normalize(toValue, DistanceUnit.DEFAULT);
}
} else {
toValue = GeoDistanceUtils.maxRadialDistanceMeters(point.lon(), point.lat());
toValue = GeoDistanceUtils.maxRadialDistanceMeters(point.lat(), point.lon());
}
final Version indexVersionCreated = context.indexVersionCreated();
@ -304,7 +304,7 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ?
GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX;
return new GeoPointDistanceRangeQuery(fieldType.name(), encoding, point.lon(), point.lat(),
return new XGeoPointDistanceRangeQuery(fieldType.name(), encoding, point.lat(), point.lon(),
(includeLower) ? fromValue : fromValue + TOLERANCE,
(includeUpper) ? toValue : toValue - TOLERANCE);
}

View File

@ -165,7 +165,7 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQuery
// if index created V_2_3 > use prefix encoded postings format
final GeoPointField.TermEncoding encoding = (indexVersionCreated.before(Version.V_2_3_0)) ?
GeoPointField.TermEncoding.NUMERIC : GeoPointField.TermEncoding.PREFIX;
return new GeoPointInPolygonQuery(fieldType.name(), encoding, lons, lats);
return new GeoPointInPolygonQuery(fieldType.name(), encoding, lats, lons);
}
@Override

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.apache.lucene.spatial.util.GeoHashUtils;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;

View File

@ -20,8 +20,8 @@
package org.elasticsearch.search;
import org.apache.lucene.index.Term;
import org.apache.lucene.spatial.util.GeoHashUtils;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.io.stream.NamedWriteable;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;

View File

@ -21,7 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.geogrid;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.spatial.util.GeoHashUtils;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -218,4 +218,4 @@ public class GeoGridAggregatorBuilder extends ValuesSourceAggregatorBuilder<Valu
}
}
}
}

View File

@ -18,7 +18,7 @@
*/
package org.elasticsearch.search.aggregations.bucket.geogrid;
import org.apache.lucene.spatial.util.GeoHashUtils;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.apache.lucene.util.PriorityQueue;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.StreamInput;

View File

@ -92,7 +92,7 @@ public final class GeoCentroidAggregator extends MetricsAggregator {
pt[0] = pt[0] + (value.getLon() - pt[0]) / ++prevCounts;
pt[1] = pt[1] + (value.getLat() - pt[1]) / prevCounts;
}
centroids.set(bucket, GeoEncodingUtils.mortonHash(pt[0], pt[1]));
centroids.set(bucket, GeoEncodingUtils.mortonHash(pt[1], pt[0]));
}
}
};

View File

@ -140,7 +140,7 @@ public class InternalGeoCentroid extends InternalMetricsAggregation implements G
out.writeVLong(count);
if (centroid != null) {
out.writeBoolean(true);
out.writeLong(GeoEncodingUtils.mortonHash(centroid.lon(), centroid.lat()));
out.writeLong(GeoEncodingUtils.mortonHash(centroid.lat(), centroid.lon()));
} else {
out.writeBoolean(false);
}

View File

@ -44,8 +44,8 @@ import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import static org.apache.lucene.spatial.util.GeoHashUtils.addNeighbors;
import static org.apache.lucene.spatial.util.GeoHashUtils.stringEncode;
import static org.elasticsearch.common.geo.GeoHashUtils.addNeighbors;
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
/**
* A {@link ContextMapping} that uses a geo location/area as a

View File

@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.0.jar}" {
//// Very special jar permissions:
//// These are dangerous permissions that we don't want to grant to everything.
grant codeBase "${codebase.lucene-core-6.0.0-snapshot-f0aa4fc.jar}" {
grant codeBase "${codebase.lucene-core-6.0.0.jar}" {
// needed to allow MMapDirectory's "unmap hack" (die unmap hack, die)
// java 8 package
permission java.lang.RuntimePermission "accessClassInPackage.sun.misc";

View File

@ -31,7 +31,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" {
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};
grant codeBase "${codebase.lucene-test-framework-6.0.0-snapshot-f0aa4fc.jar}" {
grant codeBase "${codebase.lucene-test-framework-6.0.0.jar}" {
// needed by RamUsageTester
permission java.lang.reflect.ReflectPermission "suppressAccessChecks";
};

View File

@ -18,11 +18,10 @@
*/
package org.elasticsearch.common.geo;
import org.apache.lucene.spatial.util.GeoHashUtils;
import org.elasticsearch.test.ESTestCase;
/**
* Tests for {@link org.apache.lucene.spatial.util.GeoHashUtils}
* Tests for {@link org.elasticsearch.common.geo.GeoHashUtils}
*/
public class GeoHashTests extends ESTestCase {
public void testGeohashAsLongRoutines() {

View File

@ -48,7 +48,7 @@ public abstract class AbstractGeoFieldDataTestCase extends AbstractFieldDataImpl
final GeoPointField.TermEncoding termEncoding;
termEncoding = indexService.getIndexSettings().getIndexVersionCreated().onOrAfter(Version.V_2_3_0) ?
GeoPointField.TermEncoding.PREFIX : GeoPointField.TermEncoding.NUMERIC;
return new GeoPointField(fieldName, point.lon(), point.lat(), termEncoding, store);
return new GeoPointField(fieldName, point.lat(), point.lon(), termEncoding, store);
}
@Override

View File

@ -88,7 +88,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoEncodingUtils.mortonHash(51.0, 42.0)));
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoEncodingUtils.mortonHash(42.0, 51.0)));
}
assertThat(doc.rootDoc().getField("field.shape"), notNullValue());
@ -146,7 +146,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoEncodingUtils.mortonHash(51.0, 42.0)));
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoEncodingUtils.mortonHash(42.0, 51.0)));
}
assertThat(doc.rootDoc().getField("field.shape"), notNullValue());
@ -208,7 +208,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoEncodingUtils.mortonHash(51.0, 42.0)));
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoEncodingUtils.mortonHash(42.0, 51.0)));
}
assertThat(doc.rootDoc().getField("field.shape"), notNullValue());

View File

@ -46,7 +46,7 @@ import java.util.Map;
import java.lang.NumberFormatException;
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonHash;
import static org.apache.lucene.spatial.util.GeoHashUtils.stringEncode;
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.hamcrest.CoreMatchers.instanceOf;
@ -88,7 +88,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
if (indexCreatedBefore22 == true) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.3, 1.2)));
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.2, 1.3)));
}
}
@ -195,7 +195,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().get("point"), equalTo("89.0,1.0"));
} else {
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.0, 89.0)));
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(89.0, 1.0)));
}
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
@ -207,7 +207,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().get("point"), equalTo("-89.0,-1.0"));
} else {
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(-1.0, -89.0)));
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(-89.0, -1.0)));
}
doc = defaultMapper.parse("test", "type", "1", XContentFactory.jsonBuilder()
@ -219,7 +219,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().get("point"), equalTo("-1.0,-179.0"));
} else {
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(-179.0, -1.0)));
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(-1.0, -179.0)));
}
}
@ -406,7 +406,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.3, 1.2)));
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.2, 1.3)));
}
}
@ -435,14 +435,14 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.3, 1.2)));
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
}
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4));
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.5, 1.4)));
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5)));
}
}
@ -466,7 +466,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.3, 1.2)));
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
}
}
@ -492,7 +492,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.3, 1.2)));
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
}
}
@ -521,14 +521,14 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.3, 1.2)));
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
}
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4));
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.5, 1.4)));
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5)));
}
}
@ -552,7 +552,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.3, 1.2)));
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
}
}
@ -577,7 +577,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.3, 1.2)));
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
}
}
@ -603,7 +603,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.3, 1.2)));
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
}
}
@ -632,14 +632,14 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.3, 1.2)));
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(mortonHash(1.2, 1.3)));
}
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4));
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.5, 1.4)));
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(mortonHash(1.4, 1.5)));
}
}

View File

@ -34,7 +34,7 @@ import org.elasticsearch.test.VersionUtils;
import java.util.Collection;
import static org.apache.lucene.spatial.util.GeoHashUtils.stringEncode;
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
import static org.apache.lucene.spatial.util.GeoEncodingUtils.mortonHash;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
@ -72,7 +72,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.3, 1.2)));
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.2, 1.3)));
}
}
@ -96,7 +96,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
if (version.before(Version.V_2_2_0)) {
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
} else {
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.3, 1.2)));
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(mortonHash(1.2, 1.3)));
}
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.index.query;
import org.apache.lucene.search.Query;
import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceRangeQuery;
import org.apache.lucene.spatial.geopoint.search.XGeoPointDistanceRangeQuery;
import org.apache.lucene.spatial.util.GeoDistanceUtils;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.Version;
@ -57,7 +57,7 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase<GeoDistanc
}
}
GeoPoint point = builder.point();
final double maxRadius = GeoDistanceUtils.maxRadialDistanceMeters(point.lon(), point.lat());
final double maxRadius = GeoDistanceUtils.maxRadialDistanceMeters(point.lat(), point.lon());
final int fromValueMeters = randomInt((int)(maxRadius*0.5));
final int toValueMeters = randomIntBetween(fromValueMeters + 1, (int)maxRadius);
DistanceUnit fromToUnits = randomFrom(DistanceUnit.values());
@ -169,8 +169,8 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase<GeoDistanc
}
private void assertGeoPointQuery(GeoDistanceRangeQueryBuilder queryBuilder, Query query) throws IOException {
assertThat(query, instanceOf(GeoPointDistanceRangeQuery.class));
GeoPointDistanceRangeQuery geoQuery = (GeoPointDistanceRangeQuery) query;
assertThat(query, instanceOf(XGeoPointDistanceRangeQuery.class));
XGeoPointDistanceRangeQuery geoQuery = (XGeoPointDistanceRangeQuery) query;
assertThat(geoQuery.getField(), equalTo(queryBuilder.fieldName()));
if (queryBuilder.point() != null) {
GeoPoint expectedPoint = new GeoPoint(queryBuilder.point());

View File

@ -31,7 +31,7 @@ import org.elasticsearch.test.geo.RandomGeoGenerator;
import java.io.IOException;
import static org.hamcrest.Matchers.is;
import static org.apache.lucene.spatial.util.GeoHashUtils.stringEncode;
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
public class GeoPointParsingTests extends ESTestCase {
static double TOLERANCE = 1E-5;

View File

@ -24,9 +24,9 @@ import org.locationtech.spatial4j.distance.DistanceUtils;
import org.apache.lucene.spatial.prefix.tree.Cell;
import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.apache.lucene.spatial.util.GeoHashUtils;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.common.xcontent.XContentHelper;

View File

@ -46,8 +46,8 @@ import java.util.List;
import java.util.Random;
import java.util.Set;
import static org.apache.lucene.spatial.util.GeoHashUtils.PRECISION;
import static org.apache.lucene.spatial.util.GeoHashUtils.stringEncode;
import static org.elasticsearch.common.geo.GeoHashUtils.PRECISION;
import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.search.aggregations.AggregationBuilders.geohashGrid;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;

View File

@ -18,9 +18,9 @@
*/
package org.elasticsearch.search.aggregations.bucket;
import org.apache.lucene.spatial.util.GeoHashUtils;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
import org.elasticsearch.search.aggregations.bucket.filter.Filter;

View File

@ -23,9 +23,9 @@ import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.ObjectIntMap;
import com.carrotsearch.hppc.ObjectObjectHashMap;
import com.carrotsearch.hppc.ObjectObjectMap;
import org.apache.lucene.spatial.util.GeoHashUtils;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;

View File

@ -30,7 +30,6 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.query.SpatialArgs;
import org.apache.lucene.spatial.query.SpatialOperation;
import org.apache.lucene.spatial.query.UnsupportedSpatialOperation;
import org.apache.lucene.spatial.util.GeoHashUtils;
import org.apache.lucene.spatial.util.GeoProjectionUtils;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
@ -41,6 +40,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Priority;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.builders.CoordinatesBuilder;
import org.elasticsearch.common.geo.builders.LineStringBuilder;

View File

@ -19,12 +19,12 @@
package org.elasticsearch.search.sort;
import org.apache.lucene.spatial.util.GeoHashUtils;
import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;

View File

@ -20,10 +20,10 @@ package org.elasticsearch.search.suggest;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.apache.lucene.spatial.util.GeoHashUtils;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.geo.GeoHashUtils;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;

View File

@ -38,7 +38,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static org.apache.lucene.spatial.util.GeoHashUtils.addNeighbors;
import static org.elasticsearch.common.geo.GeoHashUtils.addNeighbors;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.search.suggest.completion.CategoryContextMappingTests.assertContextSuggestFields;
import static org.hamcrest.Matchers.equalTo;

View File

@ -19,7 +19,6 @@
package org.elasticsearch.test.geo;
import org.apache.lucene.spatial.util.GeoUtils;
import org.elasticsearch.common.geo.GeoPoint;
import java.util.Random;
@ -42,8 +41,8 @@ public class RandomGeoGenerator {
assert pt != null && pt.length == 2;
// normalize min and max
double[] min = {GeoUtils.normalizeLon(minLon), GeoUtils.normalizeLat(minLat)};
double[] max = {GeoUtils.normalizeLon(maxLon), GeoUtils.normalizeLat(maxLat)};
double[] min = {normalizeLongitude(minLon), normalizeLatitude(minLat)};
double[] max = {normalizeLongitude(maxLon), normalizeLatitude(maxLat)};
final double[] tMin = new double[2];
final double[] tMax = new double[2];
tMin[0] = Math.min(min[0], max[0]);
@ -65,4 +64,28 @@ public class RandomGeoGenerator {
randomPointIn(r, minLon, minLat, maxLon, maxLat, pt);
return new GeoPoint(pt[1], pt[0]);
}
/** Puts latitude in range of -90 to 90. */
private static double normalizeLatitude(double latitude) {
if (latitude >= -90 && latitude <= 90) {
return latitude; //common case, and avoids slight double precision shifting
}
double off = Math.abs((latitude + 90) % 360);
return (off <= 180 ? off : 360-off) - 90;
}
/** Puts longitude in range of -180 to +180. */
private static double normalizeLongitude(double longitude) {
if (longitude >= -180 && longitude <= 180) {
return longitude; //common case, and avoids slight double precision shifting
}
double off = (longitude + 180) % 360;
if (off < 0) {
return 180 + off;
} else if (off == 0 && longitude > 0) {
return 180;
} else {
return -180 + off;
}
}
}

View File

@ -1 +0,0 @@
cd2388adc4b33c7530bbb8cd386e5c8c5c8e6aca

View File

@ -0,0 +1 @@
abb404e5a429f92f6b43ce11bef81e95e9bac6ab

View File

@ -1 +0,0 @@
f5bbdd01b98fab7c18b46e762de3e39221b0c8fc

View File

@ -0,0 +1 @@
daaa8f7a00915bf2c5dd3b74489f49736b50d2bd

View File

@ -1 +0,0 @@
18ad74518b34af7cfbd6c1e3a408920ff7665501

View File

@ -0,0 +1 @@
3aa09d24a630d2249fdd7056ce569a72ff54ebcf

View File

@ -1 +0,0 @@
dc0b211e31b8f1e0ee3a9e8f9c71b13fa088dabf

View File

@ -0,0 +1 @@
a7664be36b0c765fc1394a1d65aef6a224ba4fa4

View File

@ -1 +0,0 @@
bbd503396c08546f1b9e023e77dbf25bbb052d1c

View File

@ -0,0 +1 @@
b81357ee48293500dd0876d33b59cbdc5e03c5f5

View File

@ -1 +0,0 @@
96fd93d4a4192c42b0d56198b73a25440d4db2f7

View File

@ -0,0 +1 @@
f8cef5f84d7c37f04d1bd0e8aac9ffcb685624b2

View File

@ -1 +0,0 @@
ddd44a319d201ff73cd25be139bd3175226ab5a5

View File

@ -0,0 +1 @@
ba02cc33668273258f66c61ffd18461e2a96c9d7

View File

@ -1 +0,0 @@
07d943ecdc552632bdca8f2772fd081a02cbf589

View File

@ -0,0 +1 @@
92bcb9af0b849addd6c4bf5a740ded646563e0f5

View File

@ -1 +0,0 @@
66c72fd979f54480af75d01719ef25da62c0a8b6

View File

@ -0,0 +1 @@
6d88cb7b8666ea809f8c894996b682ceb9581d07

View File

@ -1 +0,0 @@
8992204f922fe52af557e691cbfb4c54f92b76bd

View File

@ -0,0 +1 @@
89ac493f4bc038632abc1b4df9737bc6d433b052

View File

@ -1 +0,0 @@
8565264e00bc43e1226ff0d2e986dbb26d353ce2

View File

@ -0,0 +1 @@
5ab7f57a1ab03aad36e15b75dbc391c73866d347

View File

@ -1 +0,0 @@
98fc1bb7e005f33c388be66486341ad8168b72eb

View File

@ -0,0 +1 @@
4e44de59bd0afb244bf7acd788e00c438e072ba1

View File

@ -1 +0,0 @@
b5b651b0adbc2f404e091817282dabd7b432c677

View File

@ -0,0 +1 @@
886185979fc9e548fb125db80707f96b288fb8de

View File

@ -1 +0,0 @@
334e194bf83c75f0ae165e3e72b6fa35c5d636c5

View File

@ -0,0 +1 @@
898ce3070d5f833430e02dfef99bfbd67c38845d

View File

@ -1 +0,0 @@
89c46e9601cf8fb9acf77398838f8710c9e44053

View File

@ -0,0 +1 @@
46a915473878ab5379f41293fbec6da49ba706e1

View File

@ -1 +0,0 @@
f36f8010c9fec7342d34bece819c13de5f241135

View File

@ -0,0 +1 @@
06cf4d232592ce7dcb8d8ae10dcde13904f6bfb2

View File

@ -1 +0,0 @@
1378905632ff45a9887b267c4b30f7adef415ca4

View File

@ -0,0 +1 @@
7835cfa15f52676eec0dcd2ca8cf4f53f8aefd5a

View File

@ -1 +0,0 @@
49acd38e206d9c2fe28269fcba9b752d3b605e0e

View File

@ -0,0 +1 @@
d56f94ae9e67297db77c59d105d9ac0ba0501593

View File

@ -1 +0,0 @@
7c11723d7d4dc3b1c9bf80089cfc2de7bc8a2b6e

View File

@ -0,0 +1 @@
848fbed30403e67d47d27b50eb28c31f70e969a2

View File

@ -1 +0,0 @@
654d961bd4975a3cb13388d86d72fefb6994f659

View File

@ -0,0 +1 @@
c0acd2a995e1c9adb5165554b3506828cf544986

View File

@ -1 +0,0 @@
0f408ac498782617a0f80d6a295d82f6d3609499

View File

@ -0,0 +1 @@
71aa64f1a206da34a1e72607b8d6457ae2e127e8