Enable GeoPointV2 with backward compatibility testing
This commit removes all noreleases and cuts over to Lucene 5.4 GeoPointField type. Included are randomized testing updates to unit and integration test suites for ensuring full backward compatability with existing geo_point indexes.
This commit is contained in:
parent
720ebe347d
commit
dc77815744
|
@ -310,8 +310,8 @@ public class GeoUtils {
|
|||
public static void normalizePoint(double[] lonLat, boolean normLon, boolean normLat) {
|
||||
assert lonLat != null && lonLat.length == 2;
|
||||
|
||||
normLat = normLat && (lonLat[1] > 90 || lonLat[1] <= -90);
|
||||
normLon = normLon && (lonLat[0] > 180 || lonLat[0] <= -180);
|
||||
normLat = normLat && (lonLat[1] > 90 || lonLat[1] < -90);
|
||||
normLon = normLon && (lonLat[0] > 180 || lonLat[0] < -180);
|
||||
|
||||
if (normLat) {
|
||||
lonLat[1] = centeredModulus(lonLat[1], 360);
|
||||
|
|
|
@ -37,7 +37,7 @@ final class SingletonMultiGeoPointValues extends MultiGeoPointValues {
|
|||
@Override
|
||||
public void setDocument(int docID) {
|
||||
value = in.get(docID);
|
||||
if (value.lat() == 0 && value.lon() == 0 && docsWithField != null && !docsWithField.get(docID)) {
|
||||
if (value.lat() == Double.NaN && value.lon() == Double.NaN || (docsWithField != null && !docsWithField.get(docID))) {
|
||||
count = 0;
|
||||
} else {
|
||||
count = 1;
|
||||
|
|
|
@ -85,8 +85,7 @@ public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFie
|
|||
CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
// Ignore breaker
|
||||
return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.names(), fieldType.fieldDataType(),
|
||||
// norelease cut over to .before(Version.V_2_2_0) once GeoPointFieldV2 is completely merged
|
||||
indexSettings.getIndexVersionCreated().onOrBefore(Version.CURRENT));
|
||||
indexSettings.getIndexVersionCreated().before(Version.V_2_2_0));
|
||||
}
|
||||
}
|
||||
}
|
|
@ -74,7 +74,7 @@ public abstract class GeoPointArrayAtomicFieldData extends AbstractAtomicGeoPoin
|
|||
public MultiGeoPointValues getGeoPointValues() {
|
||||
final RandomAccessOrds ords = ordinals.ordinals();
|
||||
final SortedDocValues singleOrds = DocValues.unwrapSingleton(ords);
|
||||
final GeoPoint point = new GeoPoint();
|
||||
final GeoPoint point = new GeoPoint(Double.NaN, Double.NaN);
|
||||
if (singleOrds != null) {
|
||||
final GeoPointValues values = new GeoPointValues() {
|
||||
@Override
|
||||
|
@ -83,8 +83,7 @@ public abstract class GeoPointArrayAtomicFieldData extends AbstractAtomicGeoPoin
|
|||
if (ord >= 0) {
|
||||
return point.resetFromIndexHash(indexedPoints.get(ord));
|
||||
}
|
||||
// todo: same issue as in ParentChildIndexFieldData, handle issue upstream?
|
||||
return null;
|
||||
return point.reset(Double.NaN, Double.NaN);
|
||||
}
|
||||
};
|
||||
return FieldData.singleton(values, DocValues.docsWithValue(singleOrds, maxDoc));
|
||||
|
@ -139,8 +138,11 @@ public abstract class GeoPointArrayAtomicFieldData extends AbstractAtomicGeoPoin
|
|||
final GeoPointValues values = new GeoPointValues() {
|
||||
@Override
|
||||
public GeoPoint get(int docID) {
|
||||
if (set == null || set.get(docID)) {
|
||||
return point.resetFromIndexHash(indexedPoint.get(docID));
|
||||
}
|
||||
return point.reset(Double.NaN, Double.NaN);
|
||||
}
|
||||
};
|
||||
return FieldData.singleton(values, set);
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.index.RandomAccessOrds;
|
|||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.util.BitSet;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
|
@ -54,8 +55,9 @@ public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData
|
|||
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
|
||||
CircuitBreakerService breakerService, MapperService mapperService) {
|
||||
return new GeoPointArrayIndexFieldData(indexSettings, fieldType.names(), fieldType.fieldDataType(), cache,
|
||||
// norelease change to .before(Version.V_2_2_0) once GeoPointFieldV2 is completely merged
|
||||
breakerService, indexSettings.getIndexVersionCreated().onOrBefore(Version.CURRENT));
|
||||
breakerService, fieldType.fieldDataType().getSettings()
|
||||
.getAsVersion(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).before(Version.V_2_2_0) ||
|
||||
indexSettings.getIndexVersionCreated().before(Version.V_2_2_0));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -83,9 +83,9 @@ public abstract class GeoPointArrayLegacyAtomicFieldData extends AbstractAtomicG
|
|||
public GeoPoint get(int docID) {
|
||||
final int ord = singleOrds.getOrd(docID);
|
||||
if (ord >= 0) {
|
||||
point.reset(lat.get(ord), lon.get(ord));
|
||||
return point.reset(lat.get(ord), lon.get(ord));
|
||||
}
|
||||
return point;
|
||||
return point.reset(Double.NaN, Double.NaN);
|
||||
}
|
||||
};
|
||||
return FieldData.singleton(values, DocValues.docsWithValue(singleOrds, maxDoc));
|
||||
|
@ -96,8 +96,10 @@ public abstract class GeoPointArrayLegacyAtomicFieldData extends AbstractAtomicG
|
|||
@Override
|
||||
public GeoPoint valueAt(int index) {
|
||||
final long ord = ords.ordAt(index);
|
||||
point.reset(lat.get(ord), lon.get(ord));
|
||||
return point;
|
||||
if (ord >= 0) {
|
||||
return point.reset(lat.get(ord), lon.get(ord));
|
||||
}
|
||||
return point.reset(Double.NaN, Double.NaN);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -150,8 +152,10 @@ public abstract class GeoPointArrayLegacyAtomicFieldData extends AbstractAtomicG
|
|||
final GeoPointValues values = new GeoPointValues() {
|
||||
@Override
|
||||
public GeoPoint get(int docID) {
|
||||
point.reset(lat.get(docID), lon.get(docID));
|
||||
return point;
|
||||
if (set == null || set.get(docID)) {
|
||||
return point.reset(lat.get(docID), lon.get(docID));
|
||||
}
|
||||
return point.reset(Double.NaN, Double.NaN);
|
||||
}
|
||||
};
|
||||
return FieldData.singleton(values, set);
|
||||
|
|
|
@ -68,7 +68,7 @@ final class GeoPointDVAtomicFieldData extends AbstractAtomicGeoPointFieldData {
|
|||
final int previousLength = points.length;
|
||||
points = Arrays.copyOf(points, ArrayUtil.oversize(count, RamUsageEstimator.NUM_BYTES_OBJECT_REF));
|
||||
for (int i = previousLength; i < points.length; ++i) {
|
||||
points[i] = new GeoPoint();
|
||||
points[i] = new GeoPoint(Double.NaN, Double.NaN);
|
||||
}
|
||||
}
|
||||
for (int i=0; i<count; ++i) {
|
||||
|
|
|
@ -75,7 +75,7 @@ final class GeoPointLegacyDVAtomicFieldData extends AbstractAtomicGeoPointFieldD
|
|||
final int previousLength = points.length;
|
||||
points = Arrays.copyOf(points, ArrayUtil.oversize(count, RamUsageEstimator.NUM_BYTES_OBJECT_REF));
|
||||
for (int i = previousLength; i < points.length; ++i) {
|
||||
points[i] = new GeoPoint();
|
||||
points[i] = new GeoPoint(Double.NaN, Double.NaN);
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < count; ++i) {
|
||||
|
|
|
@ -203,8 +203,7 @@ public abstract class BaseGeoPointFieldMapper extends FieldMapper implements Arr
|
|||
@Override
|
||||
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
Builder builder;
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (parserContext.indexVersionCreated().onOrBefore(Version.CURRENT)) {
|
||||
if (parserContext.indexVersionCreated().before(Version.V_2_2_0)) {
|
||||
builder = new GeoPointFieldMapperLegacy.Builder(name);
|
||||
} else {
|
||||
builder = new GeoPointFieldMapper.Builder(name);
|
||||
|
|
|
@ -264,9 +264,9 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
|
|||
}
|
||||
}
|
||||
|
||||
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
|
||||
if (context.indexVersionCreated().after(Version.CURRENT)) {
|
||||
return new GeoPointInBBoxQuery(fieldType.names().fullName(), topLeft.lon(), bottomRight.lat(), bottomRight.lon(), topLeft.lat());
|
||||
if (context.indexVersionCreated().onOrAfter(Version.V_2_2_0)) {
|
||||
return new GeoPointInBBoxQuery(fieldType.names().fullName(), luceneTopLeft.lon(), luceneBottomRight.lat(),
|
||||
luceneBottomRight.lon(), luceneTopLeft.lat());
|
||||
}
|
||||
|
||||
Query query;
|
||||
|
|
|
@ -225,8 +225,7 @@ public class GeoDistanceQueryBuilder extends AbstractQueryBuilder<GeoDistanceQue
|
|||
|
||||
double normDistance = geoDistance.normalize(this.distance, DistanceUnit.DEFAULT);
|
||||
|
||||
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
|
||||
if (shardContext.indexVersionCreated().onOrBefore(Version.CURRENT)) {
|
||||
if (shardContext.indexVersionCreated().before(Version.V_2_2_0)) {
|
||||
GeoPointFieldMapperLegacy.GeoPointFieldType geoFieldType = ((GeoPointFieldMapperLegacy.GeoPointFieldType) fieldType);
|
||||
IndexGeoPointFieldData indexFieldData = shardContext.getForField(fieldType);
|
||||
return new GeoDistanceRangeQuery(center, null, normDistance, true, false, geoDistance, geoFieldType, indexFieldData, optimizeBbox);
|
||||
|
|
|
@ -221,6 +221,7 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
|
|||
}
|
||||
|
||||
final boolean indexCreatedBeforeV2_0 = context.indexVersionCreated().before(Version.V_2_0_0);
|
||||
final boolean indexCreatedBeforeV2_2 = context.indexVersionCreated().before(Version.V_2_2_0);
|
||||
// validation was not available prior to 2.x, so to support bwc
|
||||
// percolation queries we only ignore_malformed on 2.x created indexes
|
||||
if (!indexCreatedBeforeV2_0 && !GeoValidationMethod.isIgnoreMalformed(validationMethod)) {
|
||||
|
@ -237,15 +238,17 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
|
|||
GeoUtils.normalizePoint(point, true, true);
|
||||
}
|
||||
|
||||
Double fromValue = null;
|
||||
Double toValue = null;
|
||||
Double fromValue;
|
||||
Double toValue;
|
||||
if (from != null) {
|
||||
if (from instanceof Number) {
|
||||
fromValue = unit.toMeters(((Number) from).doubleValue());
|
||||
} else {
|
||||
fromValue = DistanceUnit.parse((String) from, unit, DistanceUnit.DEFAULT);
|
||||
}
|
||||
if (indexCreatedBeforeV2_2 == true) {
|
||||
fromValue = geoDistance.normalize(fromValue, DistanceUnit.DEFAULT);
|
||||
}
|
||||
} else {
|
||||
fromValue = new Double(0);
|
||||
}
|
||||
|
@ -256,13 +259,14 @@ public class GeoDistanceRangeQueryBuilder extends AbstractQueryBuilder<GeoDistan
|
|||
} else {
|
||||
toValue = DistanceUnit.parse((String) to, unit, DistanceUnit.DEFAULT);
|
||||
}
|
||||
if (indexCreatedBeforeV2_2 == true) {
|
||||
toValue = geoDistance.normalize(toValue, DistanceUnit.DEFAULT);
|
||||
}
|
||||
} else {
|
||||
toValue = GeoUtils.maxRadialDistance(point);
|
||||
}
|
||||
|
||||
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
|
||||
if (context.indexVersionCreated().onOrBefore(Version.CURRENT)) {
|
||||
if (indexCreatedBeforeV2_2 == true) {
|
||||
GeoPointFieldMapperLegacy.GeoPointFieldType geoFieldType = ((GeoPointFieldMapperLegacy.GeoPointFieldType) fieldType);
|
||||
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
|
||||
return new GeoDistanceRangeQuery(point, fromValue, toValue, includeLower, includeUpper, geoDistance, geoFieldType,
|
||||
|
|
|
@ -136,8 +136,7 @@ public class GeoPolygonQueryBuilder extends AbstractQueryBuilder<GeoPolygonQuery
|
|||
}
|
||||
}
|
||||
|
||||
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
|
||||
if (context.indexVersionCreated().onOrBefore(Version.CURRENT)) {
|
||||
if (context.indexVersionCreated().before(Version.V_2_2_0)) {
|
||||
IndexGeoPointFieldData indexFieldData = context.getForField(fieldType);
|
||||
return new GeoPolygonQuery(indexFieldData, shell.toArray(new GeoPoint[shellSize]));
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.suggest.completion.context;
|
||||
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.apache.lucene.index.DocValuesType;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
|
@ -193,7 +194,11 @@ public class GeoContextMapping extends ContextMapping {
|
|||
}
|
||||
} else {
|
||||
for (IndexableField field : fields) {
|
||||
if (field instanceof StringField) {
|
||||
spare.resetFromString(field.stringValue());
|
||||
} else {
|
||||
spare.resetFromIndexHash(Long.parseLong(field.stringValue()));
|
||||
}
|
||||
geohashes.add(spare.geohash());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.lucene.search.IndexSearcher;
|
|||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
|
@ -43,6 +44,7 @@ import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
|
|||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -94,8 +96,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
|
|||
} else if (type.getType().equals("byte")) {
|
||||
fieldType = MapperBuilders.byteField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
|
||||
} else if (type.getType().equals("geo_point")) {
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (indexService.getIndexSettings().getIndexVersionCreated().onOrBefore(Version.CURRENT)) {
|
||||
if (indexService.getIndexSettings().getIndexVersionCreated().before(Version.V_2_2_0)) {
|
||||
fieldType = new GeoPointFieldMapperLegacy.Builder(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
|
||||
} else {
|
||||
fieldType = new GeoPointFieldMapper.Builder(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
|
||||
|
@ -112,7 +113,9 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
|
|||
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
Settings settings = Settings.builder().put("index.fielddata.cache", "none").build();
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.builder().put("index.fielddata.cache", "none")
|
||||
.put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
indexService = createIndex("test", settings);
|
||||
mapperService = indexService.mapperService();
|
||||
indicesFieldDataCache = getInstanceFromNode(IndicesFieldDataCache.class);
|
||||
|
|
|
@ -36,19 +36,12 @@ import static org.hamcrest.Matchers.*;
|
|||
*
|
||||
*/
|
||||
public abstract class AbstractGeoFieldDataTestCase extends AbstractFieldDataImplTestCase {
|
||||
protected Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
|
||||
|
||||
@Override
|
||||
protected abstract FieldDataType getFieldDataType();
|
||||
|
||||
protected Settings.Builder getFieldDataSettings() {
|
||||
return Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version);
|
||||
}
|
||||
|
||||
protected Field randomGeoPointField(String fieldName, Field.Store store) {
|
||||
GeoPoint point = randomPoint(random());
|
||||
// norelease move to .before(Version.2_2_0) once GeoPointV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (indexService.getIndexSettings().getIndexVersionCreated().before(Version.V_2_2_0)) {
|
||||
return new StringField(fieldName, point.lat()+","+point.lon(), store);
|
||||
}
|
||||
return new GeoPointField(fieldName, point.lon(), point.lat(), store);
|
||||
|
@ -90,8 +83,9 @@ public abstract class AbstractGeoFieldDataTestCase extends AbstractFieldDataImpl
|
|||
} else {
|
||||
assertThat(docCount, greaterThan(0));
|
||||
for (int i = 0; i < docCount; ++i) {
|
||||
assertThat(values.valueAt(i).lat(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LAT_INCL), lessThanOrEqualTo(GeoUtils.MAX_LAT_INCL)));
|
||||
assertThat(values.valueAt(i).lat(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LON_INCL), lessThanOrEqualTo(GeoUtils.MAX_LON_INCL)));
|
||||
final GeoPoint point = values.valueAt(i);
|
||||
assertThat(point.lat(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LAT_INCL), lessThanOrEqualTo(GeoUtils.MAX_LAT_INCL)));
|
||||
assertThat(point.lon(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LON_INCL), lessThanOrEqualTo(GeoUtils.MAX_LON_INCL)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -570,7 +570,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
|
|||
for (int i = 0; i < numDocs; ++i) {
|
||||
leftValues.setDocument(i);
|
||||
final int numValues = leftValues.count();
|
||||
rightValues.setDocument(i);;
|
||||
rightValues.setDocument(i);
|
||||
assertEquals(numValues, rightValues.count());
|
||||
List<GeoPoint> leftPoints = new ArrayList<>();
|
||||
List<GeoPoint> rightPoints = new ArrayList<>();
|
||||
|
@ -580,6 +580,8 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
|
|||
GeoPoint r = rightValues.valueAt(j);
|
||||
rightPoints.add(new GeoPoint(r.getLat(), r.getLon()));
|
||||
}
|
||||
// missing values were treated as 0,0 which are valid geopoints, this now correctly tests for missing values
|
||||
if (leftPoints.isEmpty() == false) {
|
||||
for (GeoPoint l : leftPoints) {
|
||||
assertTrue("Couldn't find " + l + " among " + rightPoints, contains(l, rightPoints, precision));
|
||||
}
|
||||
|
@ -588,6 +590,7 @@ public class DuelFieldDataTests extends AbstractFieldDataTestCase {
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static boolean contains(GeoPoint point, List<GeoPoint> set, Distance precision) {
|
||||
for (GeoPoint r : set) {
|
||||
|
|
|
@ -34,7 +34,7 @@ public class GeoFieldDataTests extends AbstractGeoFieldDataTestCase {
|
|||
|
||||
@Override
|
||||
protected FieldDataType getFieldDataType() {
|
||||
return new FieldDataType("geo_point", getFieldDataSettings());
|
||||
return new FieldDataType("geo_point");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -102,8 +102,7 @@ public class ExternalMapper extends FieldMapper {
|
|||
context.path().add(name);
|
||||
BinaryFieldMapper binMapper = binBuilder.build(context);
|
||||
BooleanFieldMapper boolMapper = boolBuilder.build(context);
|
||||
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
|
||||
BaseGeoPointFieldMapper pointMapper = (context.indexCreatedVersion().onOrBefore(Version.CURRENT)) ?
|
||||
BaseGeoPointFieldMapper pointMapper = (context.indexCreatedVersion().before(Version.V_2_2_0)) ?
|
||||
legacyPointBuilder.build(context) : pointBuilder.build(context);
|
||||
GeoShapeFieldMapper shapeMapper = shapeBuilder.build(context);
|
||||
FieldMapper stringMapper = (FieldMapper)stringBuilder.build(context);
|
||||
|
|
|
@ -65,8 +65,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T"));
|
||||
|
||||
assertThat(doc.rootDoc().getField("field.point"), notNullValue());
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoUtils.mortonHash(51.0, 42.0)));
|
||||
|
@ -119,8 +118,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T"));
|
||||
|
||||
assertThat(doc.rootDoc().getField("field.point"), notNullValue());
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoUtils.mortonHash(51.0, 42.0)));
|
||||
|
@ -179,8 +177,7 @@ public class SimpleExternalMappingTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().getField("field.bool").stringValue(), is("T"));
|
||||
|
||||
assertThat(doc.rootDoc().getField("field.point"), notNullValue());
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().getField("field.point").stringValue(), is("42.0,51.0"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getField("field.point").stringValue()), is(GeoUtils.mortonHash(51.0, 42.0)));
|
||||
|
|
|
@ -67,8 +67,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.bytes());
|
||||
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
boolean indexCreatedBefore22 = version.onOrBefore(Version.CURRENT);
|
||||
boolean indexCreatedBefore22 = version.before(Version.V_2_2_0);
|
||||
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
|
||||
final boolean stored = indexCreatedBefore22 == false;
|
||||
assertThat(doc.rootDoc().getField("point.lat").fieldType().stored(), is(stored));
|
||||
|
@ -168,8 +167,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
// default to normalize
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point");
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
mapping.field("coerce", true);
|
||||
}
|
||||
mapping.field("ignore_malformed", true).endObject().endObject().endObject().endObject();
|
||||
|
@ -183,8 +181,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.bytes());
|
||||
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("89.0,1.0"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoUtils.mortonHash(1.0, 89.0)));
|
||||
|
@ -196,8 +193,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.bytes());
|
||||
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("-89.0,-1.0"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoUtils.mortonHash(-1.0, -89.0)));
|
||||
|
@ -209,8 +205,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
.endObject()
|
||||
.bytes());
|
||||
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("-1.0,-179.0"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoUtils.mortonHash(-179.0, -1.0)));
|
||||
|
@ -221,8 +216,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true);
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
mapping.field("coerce", false);
|
||||
}
|
||||
mapping.field("ignore_malformed", false).endObject().endObject().endObject().endObject().string();
|
||||
|
@ -285,8 +279,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true);
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
mapping.field("coerce", false);
|
||||
}
|
||||
mapping.field("ignore_malformed", true).endObject().endObject().endObject().endObject().string();
|
||||
|
@ -345,8 +338,7 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
|
||||
assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3));
|
||||
assertThat(doc.rootDoc().getField("point.geohash"), nullValue());
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
|
@ -375,16 +367,14 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2));
|
||||
assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2));
|
||||
assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3));
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
}
|
||||
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4));
|
||||
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.5, 1.4)));
|
||||
|
@ -408,11 +398,10 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
|
||||
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -435,11 +424,10 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2));
|
||||
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
|
||||
assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3));
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -465,16 +453,14 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2));
|
||||
assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2));
|
||||
assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3));
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().getFields("point")[0].stringValue(), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
}
|
||||
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4));
|
||||
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().getFields("point")[1].stringValue(), equalTo("1.4,1.5"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.5, 1.4)));
|
||||
|
@ -498,11 +484,10 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
|
||||
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -524,11 +509,10 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
|
||||
assertThat(doc.rootDoc().getField("point.lat"), notNullValue());
|
||||
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -551,11 +535,10 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().getField("point.lat").numericValue().doubleValue(), equalTo(1.2));
|
||||
assertThat(doc.rootDoc().getField("point.lon"), notNullValue());
|
||||
assertThat(doc.rootDoc().getField("point.lon").numericValue().doubleValue(), equalTo(1.3));
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -581,16 +564,14 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
|
|||
assertThat(doc.rootDoc().getFields("point.lon").length, equalTo(2));
|
||||
assertThat(doc.rootDoc().getFields("point.lat")[0].numericValue().doubleValue(), equalTo(1.2));
|
||||
assertThat(doc.rootDoc().getFields("point.lon")[0].numericValue().doubleValue(), equalTo(1.3));
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[0].stringValue()), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
}
|
||||
assertThat(doc.rootDoc().getFields("point.lat")[1].numericValue().doubleValue(), equalTo(1.4));
|
||||
assertThat(doc.rootDoc().getFields("point.lon")[1].numericValue().doubleValue(), equalTo(1.5));
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().getFields("point")[1].stringValue()), equalTo(GeoUtils.mortonHash(1.5, 1.4)));
|
||||
|
|
|
@ -59,8 +59,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
|
|||
|
||||
assertThat(doc.rootDoc().getField("point.lat"), nullValue());
|
||||
assertThat(doc.rootDoc().getField("point.lon"), nullValue());
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
|
@ -84,8 +83,7 @@ public class GeohashMappingGeoPointTests extends ESSingleNodeTestCase {
|
|||
|
||||
assertThat(doc.rootDoc().getField("point.lat"), nullValue());
|
||||
assertThat(doc.rootDoc().getField("point.lon"), nullValue());
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertThat(doc.rootDoc().get("point"), equalTo("1.2,1.3"));
|
||||
} else {
|
||||
assertThat(Long.parseLong(doc.rootDoc().get("point")), equalTo(GeoUtils.mortonHash(1.3, 1.2)));
|
||||
|
|
|
@ -257,8 +257,7 @@ public class MultiFieldTests extends ESSingleNodeTestCase {
|
|||
public void testConvertMultiFieldGeoPoint() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
boolean indexCreatedBefore22 = version.onOrBefore(Version.CURRENT);
|
||||
boolean indexCreatedBefore22 = version.before(Version.V_2_2_0);
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/multifield/test-multi-field-type-geo_point.json");
|
||||
DocumentMapper docMapper = createIndex("test", settings).mapperService().documentMapperParser().parse(mapping);
|
||||
|
||||
|
|
|
@ -267,11 +267,15 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
|
|||
|
||||
@Override
|
||||
protected void doAssertLuceneQuery(GeoBoundingBoxQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
if (context.indexVersionCreated().before(Version.V_2_2_0)) {
|
||||
if (queryBuilder.type() == GeoExecType.INDEXED) {
|
||||
assertTrue("Found no indexed geo query.", query instanceof ConstantScoreQuery);
|
||||
} else {
|
||||
assertTrue("Found no indexed geo query.", query instanceof InMemoryGeoBoundingBoxQuery);
|
||||
}
|
||||
} else {
|
||||
assertTrue("Found no indexed geo query.", query instanceof GeoPointInBBoxQuery);
|
||||
}
|
||||
}
|
||||
|
||||
public abstract class PointTester {
|
||||
|
@ -423,8 +427,7 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
|
|||
|
||||
private void assertGeoBoundingBoxQuery(String query) throws IOException {
|
||||
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
|
||||
// norelease cut over to .before(Version.2_2_0) once GeoPointFieldV2 is fully merged
|
||||
if (queryShardContext().indexVersionCreated().onOrBefore(Version.CURRENT)) {
|
||||
if (queryShardContext().indexVersionCreated().before(Version.V_2_2_0)) {
|
||||
InMemoryGeoBoundingBoxQuery filter = (InMemoryGeoBoundingBoxQuery) parsedQuery;
|
||||
assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME));
|
||||
assertThat(filter.topLeft().lat(), closeTo(40, 1E-5));
|
||||
|
|
|
@ -162,8 +162,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
|
|||
@Override
|
||||
protected void doAssertLuceneQuery(GeoDistanceQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
Version version = context.indexVersionCreated();
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertLegacyQuery(queryBuilder, query);
|
||||
} else {
|
||||
assertGeoPointQuery(queryBuilder, query);
|
||||
|
@ -370,8 +369,7 @@ public class GeoDistanceQueryBuilderTests extends AbstractQueryTestCase<GeoDista
|
|||
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
|
||||
Query parsedQuery = parseQuery(query).toQuery(createShardContext());
|
||||
Version version = queryShardContext().indexVersionCreated();
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
GeoDistanceRangeQuery q = (GeoDistanceRangeQuery) parsedQuery;
|
||||
assertThat(q.fieldName(), equalTo(GEO_POINT_FIELD_NAME));
|
||||
assertThat(q.lat(), closeTo(lat, 1E-5D));
|
||||
|
|
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.geo.GeoPoint;
|
|||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.index.search.geo.GeoDistanceRangeQuery;
|
||||
import org.elasticsearch.test.geo.RandomGeoGenerator;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -44,14 +45,13 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase<GeoDistanc
|
|||
Version version = queryShardContext().indexVersionCreated();
|
||||
GeoDistanceRangeQueryBuilder builder;
|
||||
if (randomBoolean()) {
|
||||
builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, randomGeohash(1, 12));
|
||||
builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, randomGeohash(3, 12));
|
||||
} else {
|
||||
double lat = randomDouble() * 180 - 90;
|
||||
double lon = randomDouble() * 360 - 180;
|
||||
GeoPoint point = RandomGeoGenerator.randomPointIn(random(), -179, -89, 89, 179);
|
||||
if (randomBoolean()) {
|
||||
builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, new GeoPoint(lat, lon));
|
||||
builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, point);
|
||||
} else {
|
||||
builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, lat, lon);
|
||||
builder = new GeoDistanceRangeQueryBuilder(GEO_POINT_FIELD_NAME, point.lat(), point.lon());
|
||||
}
|
||||
}
|
||||
GeoPoint point = builder.point();
|
||||
|
@ -60,7 +60,7 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase<GeoDistanc
|
|||
final double maxRadius = GeoUtils.maxRadialDistance(point, distToPole);
|
||||
|
||||
final int fromValueMeters = randomInt((int)(maxRadius*0.5));
|
||||
final int toValueMeters = randomIntBetween(fromValueMeters, (int)maxRadius);
|
||||
final int toValueMeters = randomIntBetween(fromValueMeters + 1, (int)maxRadius);
|
||||
DistanceUnit fromToUnits = randomFrom(DistanceUnit.values());
|
||||
final String fromToUnitsStr = fromToUnits.toString();
|
||||
final double fromValue = DistanceUnit.convert(fromValueMeters, DistanceUnit.DEFAULT, fromToUnits);
|
||||
|
@ -105,8 +105,7 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase<GeoDistanc
|
|||
if (randomBoolean()) {
|
||||
builder.geoDistance(randomFrom(GeoDistance.values()));
|
||||
}
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (randomBoolean() && version.onOrBefore(Version.CURRENT)) {
|
||||
if (randomBoolean() && version.before(Version.V_2_2_0)) {
|
||||
builder.optimizeBbox(randomFrom("none", "memory", "indexed"));
|
||||
}
|
||||
builder.unit(fromToUnits);
|
||||
|
@ -120,8 +119,7 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase<GeoDistanc
|
|||
protected void doAssertLuceneQuery(GeoDistanceRangeQueryBuilder queryBuilder, Query query, QueryShardContext context)
|
||||
throws IOException {
|
||||
Version version = context.indexVersionCreated();
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertLegacyQuery(queryBuilder, query);
|
||||
} else {
|
||||
assertGeoPointQuery(queryBuilder, query);
|
||||
|
@ -193,9 +191,6 @@ public class GeoDistanceRangeQueryTests extends AbstractQueryTestCase<GeoDistanc
|
|||
if (queryBuilder.unit() != null) {
|
||||
toValue = queryBuilder.unit().toMeters(toValue);
|
||||
}
|
||||
if (queryBuilder.geoDistance() != null) {
|
||||
toValue = queryBuilder.geoDistance().normalize(toValue, DistanceUnit.DEFAULT);
|
||||
}
|
||||
assertThat(geoQuery.getMaxRadiusMeters(), closeTo(toValue, 1E-5));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,8 +59,7 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
|||
@Override
|
||||
protected void doAssertLuceneQuery(GeoPolygonQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||
Version version = context.indexVersionCreated();
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertLegacyQuery(queryBuilder, query);
|
||||
} else {
|
||||
assertGeoPointQuery(queryBuilder, query);
|
||||
|
@ -300,8 +299,7 @@ public class GeoPolygonQueryBuilderTests extends AbstractQueryTestCase<GeoPolygo
|
|||
QueryShardContext context = createShardContext();
|
||||
Version version = context.indexVersionCreated();
|
||||
Query parsedQuery = parseQuery(query).toQuery(context);
|
||||
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
|
||||
if (version.onOrBefore(Version.CURRENT)) {
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
GeoPolygonQuery filter = (GeoPolygonQuery) parsedQuery;
|
||||
assertThat(filter.fieldName(), equalTo(GEO_POINT_FIELD_NAME));
|
||||
assertThat(filter.points().length, equalTo(4));
|
||||
|
|
|
@ -377,7 +377,7 @@ public class GeoUtilsTests extends ESTestCase {
|
|||
assertNormalizedPoint(new GeoPoint(0.0, 0.0), new GeoPoint(0.0, 0.0));
|
||||
assertNormalizedPoint(new GeoPoint(-180.0, -360.0), new GeoPoint(0.0, 180.0));
|
||||
assertNormalizedPoint(new GeoPoint(180.0, 360.0), new GeoPoint(0.0, 180.0));
|
||||
assertNormalizedPoint(new GeoPoint(-90.0, -180.0), new GeoPoint(-90.0, 180.0));
|
||||
assertNormalizedPoint(new GeoPoint(-90.0, -180.0), new GeoPoint(-90.0, -180.0));
|
||||
assertNormalizedPoint(new GeoPoint(90.0, 180.0), new GeoPoint(90.0, 180.0));
|
||||
}
|
||||
|
||||
|
|
|
@ -19,19 +19,24 @@
|
|||
|
||||
package org.elasticsearch.search.functionscore;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionFuture;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.lucene.search.function.CombineFunction;
|
||||
import org.elasticsearch.common.lucene.search.function.FiltersFunctionScoreQuery;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.DateTimeZone;
|
||||
|
||||
|
@ -565,12 +570,18 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testManyDocsLin() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"type",
|
||||
jsonBuilder().startObject().startObject("type").startObject("properties").startObject("test").field("type", "string")
|
||||
.endObject().startObject("date").field("type", "date").endObject().startObject("num").field("type", "double")
|
||||
.endObject().startObject("geo").field("type", "geo_point").field("coerce", true).endObject().endObject()
|
||||
.endObject().endObject()));
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = jsonBuilder().startObject().startObject("type").startObject("properties")
|
||||
.startObject("test").field("type", "string").endObject().startObject("date").field("type", "date")
|
||||
.field("doc_values", true).endObject().startObject("num").field("type", "double")
|
||||
.field("doc_values", true).endObject().startObject("geo").field("type", "geo_point")
|
||||
.field("ignore_malformed", true);
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
xContentBuilder.field("coerce", true);
|
||||
}
|
||||
xContentBuilder.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").setSettings(settings).addMapping("type", xContentBuilder.string()));
|
||||
ensureYellow();
|
||||
int numDocs = 200;
|
||||
List<IndexRequestBuilder> indexBuilders = new ArrayList<>();
|
||||
|
|
|
@ -19,12 +19,16 @@
|
|||
|
||||
package org.elasticsearch.search.geo;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.GeoValidationMethod;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
|
||||
|
@ -39,10 +43,15 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
*/
|
||||
public class GeoBoundingBoxIT extends ESIntegTestCase {
|
||||
public void testSimpleBoundingBoxTest() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
|
||||
.endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder));
|
||||
.startObject("properties").startObject("location").field("type", "geo_point");
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
xContentBuilder.field("lat_lon", true);
|
||||
}
|
||||
xContentBuilder.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
|
||||
|
@ -108,10 +117,15 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testLimitsBoundingBox() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
|
||||
.endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder));
|
||||
.startObject("properties").startObject("location").field("type", "geo_point");
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
xContentBuilder.field("lat_lon", true);
|
||||
}
|
||||
xContentBuilder.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
|
||||
|
@ -210,10 +224,15 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testLimit2BoundingBox() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
|
||||
.endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder));
|
||||
.startObject("properties").startObject("location").field("type", "geo_point");
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
xContentBuilder.field("lat_lon", true);
|
||||
}
|
||||
xContentBuilder.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
|
||||
|
@ -260,10 +279,15 @@ public class GeoBoundingBoxIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testCompleteLonRange() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
|
||||
.endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder));
|
||||
.startObject("properties").startObject("location").field("type", "geo_point");
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
xContentBuilder.field("lat_lon", true);
|
||||
}
|
||||
xContentBuilder.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
|
||||
|
|
|
@ -30,10 +30,13 @@ import org.apache.lucene.spatial.query.SpatialArgs;
|
|||
import org.apache.lucene.spatial.query.SpatialOperation;
|
||||
import org.apache.lucene.spatial.query.UnsupportedSpatialOperation;
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.apache.lucene.util.GeoProjectionUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.bulk.BulkItemResponse;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -43,11 +46,14 @@ import org.elasticsearch.common.geo.builders.MultiPolygonBuilder;
|
|||
import org.elasticsearch.common.geo.builders.PolygonBuilder;
|
||||
import org.elasticsearch.common.geo.builders.ShapeBuilder;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.query.GeohashCellQuery;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
|
@ -73,10 +79,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFirs
|
|||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasId;
|
||||
import static org.hamcrest.Matchers.anyOf;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -398,25 +401,28 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
|
||||
public void testBulk() throws Exception {
|
||||
byte[] bulkAction = unZipData("/org/elasticsearch/search/geo/gzippedmap.gz");
|
||||
|
||||
String mapping = XContentFactory.jsonBuilder()
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("country")
|
||||
.startObject("properties")
|
||||
.startObject("pin")
|
||||
.field("type", "geo_point")
|
||||
.field("lat_lon", true)
|
||||
.field("store", true)
|
||||
.field("type", "geo_point");
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
xContentBuilder.field("lat_lon", true);
|
||||
}
|
||||
xContentBuilder.field("store", true)
|
||||
.endObject()
|
||||
.startObject("location")
|
||||
.field("type", "geo_shape")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.string();
|
||||
.endObject();
|
||||
|
||||
client().admin().indices().prepareCreate("countries").addMapping("country", mapping).execute().actionGet();
|
||||
client().admin().indices().prepareCreate("countries").setSettings(settings)
|
||||
.addMapping("country", xContentBuilder.string()).execute().actionGet();
|
||||
BulkResponse bulk = client().prepareBulk().add(bulkAction, 0, bulkAction.length, null, null).execute().actionGet();
|
||||
|
||||
for (BulkItemResponse item : bulk.getItems()) {
|
||||
|
@ -450,13 +456,17 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
GeoPoint point = new GeoPoint();
|
||||
for (SearchHit hit : distance.getHits()) {
|
||||
String name = hit.getId();
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
point.resetFromString(hit.fields().get("pin").getValue().toString());
|
||||
} else {
|
||||
point.resetFromIndexHash(hit.fields().get("pin").getValue());
|
||||
}
|
||||
double dist = distance(point.getLat(), point.getLon(), 51.11, 9.851);
|
||||
|
||||
assertThat("distance to '" + name + "'", dist, lessThanOrEqualTo(425000d));
|
||||
assertThat(name, anyOf(equalTo("CZ"), equalTo("DE"), equalTo("BE"), equalTo("NL"), equalTo("LU")));
|
||||
if (key.equals(name)) {
|
||||
assertThat(dist, equalTo(0d));
|
||||
assertThat(dist, closeTo(0d, 0.1d));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -557,7 +567,7 @@ public class GeoFilterIT extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public static double distance(double lat1, double lon1, double lat2, double lon2) {
|
||||
return GeoUtils.EARTH_SEMI_MAJOR_AXIS * DistanceUtils.distHaversineRAD(
|
||||
return GeoProjectionUtils.SEMIMAJOR_AXIS * DistanceUtils.distHaversineRAD(
|
||||
DistanceUtils.toRadians(lat1),
|
||||
DistanceUtils.toRadians(lon1),
|
||||
DistanceUtils.toRadians(lat2),
|
||||
|
|
|
@ -19,12 +19,16 @@
|
|||
|
||||
package org.elasticsearch.search.geo;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -42,10 +46,15 @@ public class GeoPolygonIT extends ESIntegTestCase {
|
|||
|
||||
@Override
|
||||
protected void setupSuiteScopeCluster() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true)
|
||||
.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder));
|
||||
.startObject("properties").startObject("location").field("type", "geo_point");
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
xContentBuilder.field("lat_lon", true);
|
||||
}
|
||||
xContentBuilder.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
|
||||
|
|
|
@ -20,13 +20,17 @@
|
|||
package org.elasticsearch.messy.tests;
|
||||
|
||||
import org.apache.lucene.util.GeoHashUtils;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.index.query.GeoDistanceQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
@ -36,6 +40,7 @@ import org.elasticsearch.search.SearchHit;
|
|||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -71,10 +76,15 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testSimpleDistance() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true)
|
||||
.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder));
|
||||
.startObject("properties").startObject("location").field("type", "geo_point");
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
xContentBuilder.field("lat_lon", true);
|
||||
}
|
||||
xContentBuilder.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
||||
indexRandom(true, client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
|
||||
|
@ -222,12 +232,15 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testDistanceSortingMVFields() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true)
|
||||
.field("ignore_malformed", true).field("coerce", true)
|
||||
.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test")
|
||||
.addMapping("type1", xContentBuilder));
|
||||
.startObject("properties").startObject("locations").field("type", "geo_point");
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
xContentBuilder.field("lat_lon", true).field("coerce", true);
|
||||
}
|
||||
xContentBuilder.field("ignore_malformed", true).endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
|
||||
|
@ -357,10 +370,15 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
|
||||
// Regression bug: https://github.com/elasticsearch/elasticsearch/issues/2851
|
||||
public void testDistanceSortingWithMissingGeoPoint() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("locations").field("type", "geo_point").field("lat_lon", true)
|
||||
.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder));
|
||||
.startObject("properties").startObject("locations").field("type", "geo_point");
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
xContentBuilder.field("lat_lon", true);
|
||||
}
|
||||
xContentBuilder.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
|
||||
|
@ -407,10 +425,15 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
double target_lat = 32.81;
|
||||
double target_long = -117.21;
|
||||
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||
.startObject("properties").startObject("location").field("type", "geo_point").field("lat_lon", true).endObject().endObject()
|
||||
.endObject().endObject();
|
||||
assertAcked(prepareCreate("test").addMapping("type1", xContentBuilder));
|
||||
.startObject("properties").startObject("location").field("type", "geo_point");
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
xContentBuilder.field("lat_lon", true);
|
||||
}
|
||||
xContentBuilder.endObject().endObject().endObject().endObject();
|
||||
assertAcked(prepareCreate("test").setSettings(settings).addMapping("type1", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
||||
client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject()
|
||||
|
@ -425,28 +448,28 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
.actionGet();
|
||||
Double resultDistance1 = searchResponse1.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultDistance1,
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.DEFAULT), 0.0001d));
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.DEFAULT), 0.01d));
|
||||
|
||||
SearchResponse searchResponse2 = client().prepareSearch().addField("_source")
|
||||
.addScriptField("distance", new Script("doc['location'].distance(" + target_lat + "," + target_long + ")")).execute()
|
||||
.actionGet();
|
||||
Double resultDistance2 = searchResponse2.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultDistance2,
|
||||
closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.DEFAULT), 0.0001d));
|
||||
closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.DEFAULT), 0.01d));
|
||||
|
||||
SearchResponse searchResponse3 = client().prepareSearch().addField("_source")
|
||||
.addScriptField("distance", new Script("doc['location'].arcDistanceInKm(" + target_lat + "," + target_long + ")"))
|
||||
.execute().actionGet();
|
||||
Double resultArcDistance3 = searchResponse3.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultArcDistance3,
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.0001d));
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
||||
SearchResponse searchResponse4 = client().prepareSearch().addField("_source")
|
||||
.addScriptField("distance", new Script("doc['location'].distanceInKm(" + target_lat + "," + target_long + ")")).execute()
|
||||
.actionGet();
|
||||
Double resultDistance4 = searchResponse4.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultDistance4,
|
||||
closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.0001d));
|
||||
closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
||||
SearchResponse searchResponse5 = client()
|
||||
.prepareSearch()
|
||||
|
@ -455,7 +478,7 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
.execute().actionGet();
|
||||
Double resultArcDistance5 = searchResponse5.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultArcDistance5,
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.0001d));
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
||||
SearchResponse searchResponse6 = client()
|
||||
.prepareSearch()
|
||||
|
@ -464,24 +487,26 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
.execute().actionGet();
|
||||
Double resultArcDistance6 = searchResponse6.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultArcDistance6,
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.0001d));
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
||||
SearchResponse searchResponse7 = client().prepareSearch().addField("_source")
|
||||
.addScriptField("distance", new Script("doc['location'].arcDistanceInMiles(" + target_lat + "," + target_long + ")"))
|
||||
.execute().actionGet();
|
||||
Double resultDistance7 = searchResponse7.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultDistance7,
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES), 0.0001d));
|
||||
closeTo(GeoDistance.ARC.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES), 0.01d));
|
||||
|
||||
SearchResponse searchResponse8 = client().prepareSearch().addField("_source")
|
||||
.addScriptField("distance", new Script("doc['location'].distanceInMiles(" + target_lat + "," + target_long + ")"))
|
||||
.execute().actionGet();
|
||||
Double resultDistance8 = searchResponse8.getHits().getHits()[0].getFields().get("distance").getValue();
|
||||
assertThat(resultDistance8,
|
||||
closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES), 0.0001d));
|
||||
closeTo(GeoDistance.PLANE.calculate(source_lat, source_long, target_lat, target_long, DistanceUnit.MILES), 0.01d));
|
||||
}
|
||||
|
||||
public void testDistanceSortingNestedFields() throws Exception {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("company")
|
||||
.startObject("properties")
|
||||
.startObject("name").field("type", "string").endObject()
|
||||
|
@ -489,14 +514,17 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
.field("type", "nested")
|
||||
.startObject("properties")
|
||||
.startObject("name").field("type", "string").endObject()
|
||||
.startObject("location").field("type", "geo_point").field("lat_lon", true)
|
||||
.endObject()
|
||||
.startObject("location").field("type", "geo_point");
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
xContentBuilder.field("lat_lon", true);
|
||||
}
|
||||
xContentBuilder.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject().endObject();
|
||||
|
||||
assertAcked(prepareCreate("companies").addMapping("company", xContentBuilder));
|
||||
assertAcked(prepareCreate("companies").setSettings(settings).addMapping("company", xContentBuilder));
|
||||
ensureGreen();
|
||||
|
||||
indexRandom(true, client().prepareIndex("companies", "company", "1").setSource(jsonBuilder().startObject()
|
||||
|
@ -645,6 +673,8 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
* Issue 3073
|
||||
*/
|
||||
public void testGeoDistanceFilter() throws IOException {
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
double lat = 40.720611;
|
||||
double lon = -73.998776;
|
||||
|
||||
|
@ -653,21 +683,18 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
.startObject("location")
|
||||
.startObject("properties")
|
||||
.startObject("pin")
|
||||
.field("type", "geo_point")
|
||||
.field("geohash", true)
|
||||
.field("geohash_precision", 24)
|
||||
.field("lat_lon", true)
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
.field("type", "geo_point");
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
mapping.field("lat_lon", true);
|
||||
}
|
||||
mapping.endObject().endObject().endObject().endObject();
|
||||
|
||||
XContentBuilder source = JsonXContent.contentBuilder()
|
||||
.startObject()
|
||||
.field("pin", GeoHashUtils.stringEncode(lon, lat))
|
||||
.endObject();
|
||||
|
||||
assertAcked(prepareCreate("locations").addMapping("location", mapping));
|
||||
assertAcked(prepareCreate("locations").setSettings(settings).addMapping("location", mapping));
|
||||
client().prepareIndex("locations", "location", "1").setCreate(true).setSource(source).execute().actionGet();
|
||||
refresh();
|
||||
client().prepareGet("locations", "location", "1").execute().actionGet();
|
||||
|
@ -692,11 +719,19 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
}
|
||||
|
||||
public void testDuelOptimizations() throws Exception {
|
||||
assertAcked(prepareCreate("index").addMapping("type", "location", "type=geo_point,lat_lon=true"));
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
assertAcked(prepareCreate("index").setSettings(settings)
|
||||
.addMapping("type", "location", "type=geo_point,lat_lon=true"));
|
||||
} else {
|
||||
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", "location", "type=geo_point"));
|
||||
}
|
||||
final int numDocs = scaledRandomIntBetween(3000, 10000);
|
||||
List<IndexRequestBuilder> docs = new ArrayList<>();
|
||||
for (int i = 0; i < numDocs; ++i) {
|
||||
docs.add(client().prepareIndex("index", "type").setSource(jsonBuilder().startObject().startObject("location").field("lat", randomLat()).field("lon", randomLon()).endObject().endObject()));
|
||||
docs.add(client().prepareIndex("index", "type").setSource(jsonBuilder().startObject().startObject("location")
|
||||
.field("lat", randomLat()).field("lon", randomLon()).endObject().endObject()));
|
||||
}
|
||||
indexRandom(true, docs);
|
||||
ensureSearchable();
|
||||
|
@ -707,20 +742,34 @@ public class GeoDistanceTests extends ESIntegTestCase {
|
|||
final String distance = DistanceUnit.KILOMETERS.toString(randomInt(10000));
|
||||
for (GeoDistance geoDistance : Arrays.asList(GeoDistance.ARC, GeoDistance.SLOPPY_ARC)) {
|
||||
logger.info("Now testing GeoDistance={}, distance={}, origin=({}, {})", geoDistance, distance, originLat, originLon);
|
||||
long matches = -1;
|
||||
GeoDistanceQueryBuilder qb = QueryBuilders.geoDistanceQuery("location").point(originLat, originLon).distance(distance).geoDistance(geoDistance);
|
||||
long matches;
|
||||
if (version.before(Version.V_2_2_0)) {
|
||||
for (String optimizeBbox : Arrays.asList("none", "memory", "indexed")) {
|
||||
SearchResponse resp = client().prepareSearch("index").setSize(0).setQuery(QueryBuilders.constantScoreQuery(
|
||||
QueryBuilders.geoDistanceQuery("location").point(originLat, originLon).distance(distance).geoDistance(geoDistance).optimizeBbox(optimizeBbox))).execute().actionGet();
|
||||
assertSearchResponse(resp);
|
||||
logger.info("{} -> {} hits", optimizeBbox, resp.getHits().totalHits());
|
||||
if (matches < 0) {
|
||||
matches = resp.getHits().totalHits();
|
||||
} else {
|
||||
assertEquals(matches, resp.getHits().totalHits());
|
||||
qb.optimizeBbox(optimizeBbox);
|
||||
SearchResponse resp = client().prepareSearch("index").setSize(0)
|
||||
.setQuery(QueryBuilders.constantScoreQuery(qb)).execute().actionGet();
|
||||
matches = assertDuelOptimization(resp);
|
||||
logger.info("{} -> {} hits", optimizeBbox, matches);
|
||||
}
|
||||
} else {
|
||||
SearchResponse resp = client().prepareSearch("index").setSize(0)
|
||||
.setQuery(QueryBuilders.constantScoreQuery(qb)).execute().actionGet();
|
||||
matches = assertDuelOptimization(resp);
|
||||
logger.info("{} hits", matches);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private long assertDuelOptimization(SearchResponse resp) {
|
||||
long matches = -1;
|
||||
assertSearchResponse(resp);
|
||||
if (matches < 0) {
|
||||
matches = resp.getHits().totalHits();
|
||||
} else {
|
||||
assertEquals(matches, matches = resp.getHits().totalHits());
|
||||
}
|
||||
return matches;
|
||||
}
|
||||
}
|
|
@ -24,6 +24,7 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.apache.lucene.util.UnicodeUtil;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
|
@ -32,6 +33,7 @@ import org.elasticsearch.action.search.ShardSearchFailure;
|
|||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.text.StringAndBytesText;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
|
@ -52,6 +54,7 @@ import org.elasticsearch.search.sort.ScriptSortBuilder;
|
|||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
|
@ -96,6 +99,7 @@ import static org.hamcrest.Matchers.not;
|
|||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
import static org.apache.lucene.util.GeoUtils.TOLERANCE;
|
||||
|
||||
/**
|
||||
*
|
||||
|
@ -851,7 +855,7 @@ public class SimpleSortTests extends ESIntegTestCase {
|
|||
|
||||
assertThat(searchResponse.getHits().getTotalHits(), equalTo(20l));
|
||||
for (int i = 0; i < 10; i++) {
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double) searchResponse.getHits().getAt(i).field("min").value(), equalTo((double) i));
|
||||
assertThat("res: " + i + " id: " + searchResponse.getHits().getAt(i).getId(), (Double) searchResponse.getHits().getAt(i).field("min").value(), closeTo((double) i, TOLERANCE));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1732,7 +1736,9 @@ public class SimpleSortTests extends ESIntegTestCase {
|
|||
* |___________________________
|
||||
* 1 2 3 4 5 6 7
|
||||
*/
|
||||
assertAcked(prepareCreate("index").addMapping("type", "location", "type=geo_point"));
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", "location", "type=geo_point"));
|
||||
XContentBuilder d1Builder = jsonBuilder();
|
||||
GeoPoint[] d1Points = {new GeoPoint(3, 2), new GeoPoint(4, 1)};
|
||||
createShuffeldJSONArray(d1Builder, d1Points);
|
||||
|
@ -1761,32 +1767,32 @@ public class SimpleSortTests extends ESIntegTestCase {
|
|||
.addSort(new GeoDistanceSortBuilder("location").points(q).sortMode("min").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.execute().actionGet();
|
||||
assertOrderedSearchHits(searchResponse, "d1", "d2");
|
||||
assertThat(searchResponse.getHits().getAt(0).getSortValues()[0], equalTo(GeoDistance.PLANE.calculate(2, 2, 3, 2, DistanceUnit.KILOMETERS)));
|
||||
assertThat(searchResponse.getHits().getAt(1).getSortValues()[0], equalTo(GeoDistance.PLANE.calculate(2, 1, 5, 1, DistanceUnit.KILOMETERS)));
|
||||
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 3, 2, DistanceUnit.KILOMETERS), 0.01d));
|
||||
assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 5, 1, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort(new GeoDistanceSortBuilder("location").points(q).sortMode("min").order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.execute().actionGet();
|
||||
assertOrderedSearchHits(searchResponse, "d2", "d1");
|
||||
assertThat(searchResponse.getHits().getAt(0).getSortValues()[0], equalTo(GeoDistance.PLANE.calculate(2, 1, 5, 1, DistanceUnit.KILOMETERS)));
|
||||
assertThat(searchResponse.getHits().getAt(1).getSortValues()[0], equalTo(GeoDistance.PLANE.calculate(2, 2, 3, 2, DistanceUnit.KILOMETERS)));
|
||||
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 5, 1, DistanceUnit.KILOMETERS), 0.01d));
|
||||
assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 3, 2, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort(new GeoDistanceSortBuilder("location").points(q).sortMode("max").order(SortOrder.ASC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.execute().actionGet();
|
||||
assertOrderedSearchHits(searchResponse, "d1", "d2");
|
||||
assertThat(searchResponse.getHits().getAt(0).getSortValues()[0], equalTo(GeoDistance.PLANE.calculate(2, 2, 4, 1, DistanceUnit.KILOMETERS)));
|
||||
assertThat(searchResponse.getHits().getAt(1).getSortValues()[0], equalTo(GeoDistance.PLANE.calculate(2, 1, 6, 2, DistanceUnit.KILOMETERS)));
|
||||
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 4, 1, DistanceUnit.KILOMETERS), 0.01d));
|
||||
assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 6, 2, DistanceUnit.KILOMETERS), 0.01d));
|
||||
|
||||
searchResponse = client().prepareSearch()
|
||||
.setQuery(matchAllQuery())
|
||||
.addSort(new GeoDistanceSortBuilder("location").points(q).sortMode("max").order(SortOrder.DESC).geoDistance(GeoDistance.PLANE).unit(DistanceUnit.KILOMETERS))
|
||||
.execute().actionGet();
|
||||
assertOrderedSearchHits(searchResponse, "d2", "d1");
|
||||
assertThat(searchResponse.getHits().getAt(0).getSortValues()[0], equalTo(GeoDistance.PLANE.calculate(2, 1, 6, 2, DistanceUnit.KILOMETERS)));
|
||||
assertThat(searchResponse.getHits().getAt(1).getSortValues()[0], equalTo(GeoDistance.PLANE.calculate(2, 2, 4, 1, DistanceUnit.KILOMETERS)));
|
||||
assertThat((Double)searchResponse.getHits().getAt(0).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 1, 6, 2, DistanceUnit.KILOMETERS), 0.01d));
|
||||
assertThat((Double)searchResponse.getHits().getAt(1).getSortValues()[0], closeTo(GeoDistance.PLANE.calculate(2, 2, 4, 1, DistanceUnit.KILOMETERS), 0.01d));
|
||||
}
|
||||
|
||||
protected void createShuffeldJSONArray(XContentBuilder builder, GeoPoint[] pointsArray) throws IOException {
|
||||
|
@ -1814,7 +1820,9 @@ public class SimpleSortTests extends ESIntegTestCase {
|
|||
* |______________________
|
||||
* 1 2 3 4 5 6
|
||||
*/
|
||||
assertAcked(prepareCreate("index").addMapping("type", "location", "type=geo_point"));
|
||||
Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT);
|
||||
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
|
||||
assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", "location", "type=geo_point"));
|
||||
XContentBuilder d1Builder = jsonBuilder();
|
||||
GeoPoint[] d1Points = {new GeoPoint(2.5, 1), new GeoPoint(2.75, 2), new GeoPoint(3, 3), new GeoPoint(3.25, 4)};
|
||||
createShuffeldJSONArray(d1Builder, d1Points);
|
||||
|
|
Loading…
Reference in New Issue