Update GeoPoint FieldData for GeoPointV2

This commit adds an abstraction layer to GeoPoint FieldData for supporting new Lucene 5.4 GeoPointField and backwards compatibility.
This commit is contained in:
Nicholas Knize 2015-10-28 16:14:06 -05:00
parent d0853e9253
commit 3037970c33
16 changed files with 755 additions and 104 deletions

View File

@ -28,8 +28,8 @@ import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.plain.BytesBinaryDVIndexFieldData;
import org.elasticsearch.index.fielddata.plain.DisabledIndexFieldData;
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
import org.elasticsearch.index.fielddata.plain.GeoPointBinaryDVIndexFieldData;
import org.elasticsearch.index.fielddata.plain.GeoPointDoubleArrayIndexFieldData;
import org.elasticsearch.index.fielddata.plain.AbstractGeoPointDVIndexFieldData;
import org.elasticsearch.index.fielddata.plain.GeoPointArrayIndexFieldData;
import org.elasticsearch.index.fielddata.plain.IndexIndexFieldData;
import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
@ -85,7 +85,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
buildersByTypeBuilder.put("short", MISSING_DOC_VALUES_BUILDER);
buildersByTypeBuilder.put("int", MISSING_DOC_VALUES_BUILDER);
buildersByTypeBuilder.put("long", MISSING_DOC_VALUES_BUILDER);
buildersByTypeBuilder.put("geo_point", new GeoPointDoubleArrayIndexFieldData.Builder());
buildersByTypeBuilder.put("geo_point", new GeoPointArrayIndexFieldData.Builder());
buildersByTypeBuilder.put(ParentFieldMapper.NAME, new ParentChildIndexFieldData.Builder());
buildersByTypeBuilder.put(IndexFieldMapper.NAME, new IndexIndexFieldData.Builder());
buildersByTypeBuilder.put("binary", new DisabledIndexFieldData.Builder());
@ -101,7 +101,7 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
.put("short", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.SHORT))
.put("int", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.INT))
.put("long", new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.LONG))
.put("geo_point", new GeoPointBinaryDVIndexFieldData.Builder())
.put("geo_point", new AbstractGeoPointDVIndexFieldData.Builder())
.put("binary", new BytesBinaryDVIndexFieldData.Builder())
.put(BooleanFieldMapper.CONTENT_TYPE, new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.BOOLEAN))
.immutableMap();
@ -129,8 +129,8 @@ public class IndexFieldDataService extends AbstractIndexComponent implements Clo
.put(Tuple.tuple("long", DOC_VALUES_FORMAT), new DocValuesIndexFieldData.Builder().numericType(IndexNumericFieldData.NumericType.LONG))
.put(Tuple.tuple("long", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", ARRAY_FORMAT), new GeoPointDoubleArrayIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", DOC_VALUES_FORMAT), new GeoPointBinaryDVIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", ARRAY_FORMAT), new GeoPointArrayIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", DOC_VALUES_FORMAT), new AbstractGeoPointDVIndexFieldData.Builder())
.put(Tuple.tuple("geo_point", DISABLED_FORMAT), new DisabledIndexFieldData.Builder())
.put(Tuple.tuple("binary", DOC_VALUES_FORMAT), new BytesBinaryDVIndexFieldData.Builder())

View File

@ -30,7 +30,7 @@ import java.util.Collections;
/**
*/
abstract class AbstractAtomicGeoPointFieldData implements AtomicGeoPointFieldData {
public abstract class AbstractAtomicGeoPointFieldData implements AtomicGeoPointFieldData {
@Override
public final SortedBinaryDocValues getBytesValues() {

View File

@ -21,11 +21,16 @@ package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.DocValues;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MappedFieldType.Names;
import org.elasticsearch.index.mapper.MapperService;
@ -34,9 +39,9 @@ import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
public class GeoPointBinaryDVIndexFieldData extends DocValuesIndexFieldData implements IndexGeoPointFieldData {
public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFieldData implements IndexGeoPointFieldData {
public GeoPointBinaryDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType) {
AbstractGeoPointDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType) {
super(index, fieldNames, fieldDataType);
}
@ -45,29 +50,43 @@ public class GeoPointBinaryDVIndexFieldData extends DocValuesIndexFieldData impl
throw new IllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance");
}
@Override
public AtomicGeoPointFieldData load(LeafReaderContext context) {
try {
return new GeoPointBinaryDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldNames.indexName()));
} catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e);
/**
* Lucene 5.4 GeoPointFieldType
*/
public static class GeoPointDVIndexFieldData extends AbstractGeoPointDVIndexFieldData {
final boolean indexCreatedBefore2x;
public GeoPointDVIndexFieldData(Index index, Names fieldNames, FieldDataType fieldDataType, final boolean indexCreatedBefore2x) {
super(index, fieldNames, fieldDataType);
this.indexCreatedBefore2x = indexCreatedBefore2x;
}
@Override
public AtomicGeoPointFieldData load(LeafReaderContext context) {
try {
if (indexCreatedBefore2x) {
return new GeoPointLegacyDVAtomicFieldData(DocValues.getBinary(context.reader(), fieldNames.indexName()));
}
return new GeoPointDVAtomicFieldData(DocValues.getSortedNumeric(context.reader(), fieldNames.indexName()));
} catch (IOException e) {
throw new IllegalStateException("Cannot load doc values", e);
}
}
@Override
public AtomicGeoPointFieldData loadDirect(LeafReaderContext context) throws Exception {
return load(context);
}
}
@Override
public AtomicGeoPointFieldData loadDirect(LeafReaderContext context) throws Exception {
return load(context);
}
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
// Ignore breaker
final Names fieldNames = fieldType.names();
return new GeoPointBinaryDVIndexFieldData(indexSettings.getIndex(), fieldNames, fieldType.fieldDataType());
return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.names(), fieldType.fieldDataType(),
// norelease cut over to .before(Version.V_2_2_0) once GeoPointFieldV2 is completely merged
indexSettings.getIndexVersionCreated().onOrBefore(Version.CURRENT));
}
}
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator;
import org.apache.lucene.util.CharsRefBuilder;
import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.index.IndexSettings;
@ -33,15 +34,34 @@ import org.elasticsearch.search.MultiValueMode;
import java.io.IOException;
abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData<AtomicGeoPointFieldData> implements IndexGeoPointFieldData {
protected abstract static class BaseGeoPointTermsEnum {
protected final BytesRefIterator termsEnum;
protected static class GeoPointEnum {
protected BaseGeoPointTermsEnum(BytesRefIterator termsEnum) {
this.termsEnum = termsEnum;
}
}
private final BytesRefIterator termsEnum;
protected static class GeoPointTermsEnum extends BaseGeoPointTermsEnum {
protected GeoPointTermsEnum(BytesRefIterator termsEnum) {
super(termsEnum);
}
public Long next() throws IOException {
final BytesRef term = termsEnum.next();
if (term == null) {
return null;
}
return NumericUtils.prefixCodedToLong(term);
}
}
protected static class GeoPointTermsEnumLegacy extends BaseGeoPointTermsEnum {
private final GeoPoint next;
private final CharsRefBuilder spare;
protected GeoPointEnum(BytesRefIterator termsEnum) {
this.termsEnum = termsEnum;
protected GeoPointTermsEnumLegacy(BytesRefIterator termsEnum) {
super(termsEnum);
next = new GeoPoint();
spare = new CharsRefBuilder();
}
@ -67,7 +87,6 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData<Ato
final double lon = Double.parseDouble(new String(spare.chars(), commaIndex + 1, spare.length() - (commaIndex + 1)));
return next.reset(lat, lon);
}
}
public AbstractIndexGeoPointFieldData(IndexSettings indexSettings, Names fieldNames, FieldDataType fieldDataType, IndexFieldDataCache cache) {
@ -83,5 +102,4 @@ abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData<Ato
protected AtomicGeoPointFieldData empty(int maxDoc) {
return AbstractAtomicGeoPointFieldData.empty(maxDoc);
}
}

View File

@ -0,0 +1,148 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.GeoUtils;
import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.util.LongArray;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.GeoPointValues;
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
/**
*
*/
public abstract class GeoPointArrayAtomicFieldData extends AbstractAtomicGeoPointFieldData {
@Override
public void close() {
}
static class WithOrdinals extends GeoPointArrayAtomicFieldData {
private final LongArray indexedPoints;
private final Ordinals ordinals;
private final int maxDoc;
public WithOrdinals(LongArray indexedPoints, Ordinals ordinals, int maxDoc) {
super();
this.indexedPoints = indexedPoints;
this.ordinals = ordinals;
this.maxDoc = maxDoc;
}
@Override
public long ramBytesUsed() {
return RamUsageEstimator.NUM_BYTES_INT + indexedPoints.ramBytesUsed();
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
resources.add(Accountables.namedAccountable("indexedPoints", indexedPoints));
return Collections.unmodifiableList(resources);
}
@Override
public MultiGeoPointValues getGeoPointValues() {
final RandomAccessOrds ords = ordinals.ordinals();
final SortedDocValues singleOrds = DocValues.unwrapSingleton(ords);
final GeoPoint point = new GeoPoint();
if (singleOrds != null) {
final GeoPointValues values = new GeoPointValues() {
@Override
public GeoPoint get(int docID) {
final int ord = singleOrds.getOrd(docID);
if (ord >= 0) {
return point.resetFromIndexHash(indexedPoints.get(ord));
}
// todo: same issue as in ParentChildIndexFieldData, handle issue upstream?
return null;
}
};
return FieldData.singleton(values, DocValues.docsWithValue(singleOrds, maxDoc));
}
return new MultiGeoPointValues() {
@Override
public GeoPoint valueAt(int index) {
return point.resetFromIndexHash(indexedPoints.get(ords.ordAt(index)));
}
@Override
public void setDocument(int docId) {
ords.setDocument(docId);
}
@Override
public int count() {
return ords.cardinality();
}
};
}
}
public static class Single extends GeoPointArrayAtomicFieldData {
private final LongArray indexedPoint;
private final BitSet set;
public Single(LongArray indexedPoint, BitSet set) {
this.indexedPoint = indexedPoint;
this.set = set;
}
@Override
public long ramBytesUsed() {
return RamUsageEstimator.NUM_BYTES_INT + indexedPoint.ramBytesUsed()
+ (set == null ? 0 : set.ramBytesUsed());
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
resources.add(Accountables.namedAccountable("indexedPoints", indexedPoint));
if (set != null) {
resources.add(Accountables.namedAccountable("missing bitset", set));
}
return Collections.unmodifiableList(resources);
}
@Override
public MultiGeoPointValues getGeoPointValues() {
final GeoPoint point = new GeoPoint();
final GeoPointValues values = new GeoPointValues() {
@Override
public GeoPoint get(int docID) {
return point.resetFromIndexHash(indexedPoint.get(docID));
}
};
return FieldData.singleton(values, set);
}
}
}

View File

@ -16,6 +16,7 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.LeafReader;
@ -23,12 +24,18 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.Terms;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.Version;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.DoubleArray;
import org.elasticsearch.common.util.LongArray;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData;
import org.elasticsearch.index.fielddata.FieldData;
import org.elasticsearch.index.fielddata.FieldDataType;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.ordinals.Ordinals;
import org.elasticsearch.index.fielddata.ordinals.OrdinalsBuilder;
import org.elasticsearch.index.mapper.MappedFieldType;
@ -36,24 +43,28 @@ import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
/**
* Loads FieldData for an array of GeoPoints supporting both long encoded points and backward compatible double arrays
*/
public class GeoPointDoubleArrayIndexFieldData extends AbstractIndexGeoPointFieldData {
public class GeoPointArrayIndexFieldData extends AbstractIndexGeoPointFieldData {
private final CircuitBreakerService breakerService;
private final boolean indexCreatedBefore22;
public static class Builder implements IndexFieldData.Builder {
@Override
public IndexFieldData<?> build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache,
CircuitBreakerService breakerService, MapperService mapperService) {
return new GeoPointDoubleArrayIndexFieldData(indexSettings, fieldType.names(), fieldType.fieldDataType(), cache, breakerService);
return new GeoPointArrayIndexFieldData(indexSettings, fieldType.names(), fieldType.fieldDataType(), cache,
// norelease change to .before(Version.V_2_2_0) once GeoPointFieldV2 is completely merged
breakerService, indexSettings.getIndexVersionCreated().onOrBefore(Version.CURRENT));
}
}
public GeoPointDoubleArrayIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames,
FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService) {
public GeoPointArrayIndexFieldData(IndexSettings indexSettings, MappedFieldType.Names fieldNames,
FieldDataType fieldDataType, IndexFieldDataCache cache, CircuitBreakerService breakerService,
final boolean indexCreatedBefore22) {
super(indexSettings, fieldNames, fieldDataType, cache);
this.breakerService = breakerService;
this.indexCreatedBefore22 = indexCreatedBefore22;
}
@Override
@ -69,12 +80,66 @@ public class GeoPointDoubleArrayIndexFieldData extends AbstractIndexGeoPointFiel
estimator.afterLoad(null, data.ramBytesUsed());
return data;
}
return (indexCreatedBefore22 == true) ? loadLegacyFieldData(reader, estimator, terms, data) : loadFieldData22(reader, estimator, terms, data);
}
/**
* long encoded geopoint field data
*/
private AtomicGeoPointFieldData loadFieldData22(LeafReader reader, NonEstimatingEstimator estimator, Terms terms,
AtomicGeoPointFieldData data) throws Exception {
LongArray indexedPoints = BigArrays.NON_RECYCLING_INSTANCE.newLongArray(128);
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio",
OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
boolean success = false;
try (OrdinalsBuilder builder = new OrdinalsBuilder(reader.maxDoc(), acceptableTransientOverheadRatio)) {
final GeoPointTermsEnum iter = new GeoPointTermsEnum(builder.buildFromTerms(OrdinalsBuilder.wrapNumeric64Bit(terms.iterator())));
Long hashedPoint;
long numTerms = 0;
while ((hashedPoint = iter.next()) != null) {
indexedPoints = BigArrays.NON_RECYCLING_INSTANCE.resize(indexedPoints, numTerms + 1);
indexedPoints.set(numTerms++, hashedPoint);
}
indexedPoints = BigArrays.NON_RECYCLING_INSTANCE.resize(indexedPoints, numTerms);
Ordinals build = builder.build(fieldDataType.getSettings());
RandomAccessOrds ordinals = build.ordinals();
if (!(FieldData.isMultiValued(ordinals) || CommonSettings.getMemoryStorageHint(fieldDataType) == CommonSettings
.MemoryStorageFormat.ORDINALS)) {
int maxDoc = reader.maxDoc();
LongArray sIndexedPoint = BigArrays.NON_RECYCLING_INSTANCE.newLongArray(reader.maxDoc());
for (int i=0; i<maxDoc; ++i) {
ordinals.setDocument(i);
long nativeOrdinal = ordinals.nextOrd();
if (nativeOrdinal != RandomAccessOrds.NO_MORE_ORDS) {
sIndexedPoint.set(i, indexedPoints.get(nativeOrdinal));
}
}
BitSet set = builder.buildDocsWithValuesSet();
data = new GeoPointArrayAtomicFieldData.Single(sIndexedPoint, set);
} else {
data = new GeoPointArrayAtomicFieldData.WithOrdinals(indexedPoints, build, reader.maxDoc());
}
success = true;
return data;
} finally {
if (success) {
estimator.afterLoad(null, data.ramBytesUsed());
}
}
}
/**
* Backward compatibility support for legacy lat/lon double arrays
*/
private AtomicGeoPointFieldData loadLegacyFieldData(LeafReader reader, NonEstimatingEstimator estimator, Terms terms,
AtomicGeoPointFieldData data) throws Exception {
DoubleArray lat = BigArrays.NON_RECYCLING_INSTANCE.newDoubleArray(128);
DoubleArray lon = BigArrays.NON_RECYCLING_INSTANCE.newDoubleArray(128);
final float acceptableTransientOverheadRatio = fieldDataType.getSettings().getAsFloat("acceptable_transient_overhead_ratio", OrdinalsBuilder.DEFAULT_ACCEPTABLE_OVERHEAD_RATIO);
boolean success = false;
try (OrdinalsBuilder builder = new OrdinalsBuilder(terms.size(), reader.maxDoc(), acceptableTransientOverheadRatio)) {
final GeoPointEnum iter = new GeoPointEnum(builder.buildFromTerms(terms.iterator()));
final GeoPointTermsEnumLegacy iter = new GeoPointTermsEnumLegacy(builder.buildFromTerms(terms.iterator()));
GeoPoint point;
long numTerms = 0;
while ((point = iter.next()) != null) {
@ -102,9 +167,9 @@ public class GeoPointDoubleArrayIndexFieldData extends AbstractIndexGeoPointFiel
}
}
BitSet set = builder.buildDocsWithValuesSet();
data = new GeoPointDoubleArrayAtomicFieldData.Single(sLon, sLat, set);
data = new GeoPointArrayLegacyAtomicFieldData.Single(sLon, sLat, set);
} else {
data = new GeoPointDoubleArrayAtomicFieldData.WithOrdinals(lon, lat, build, reader.maxDoc());
data = new GeoPointArrayLegacyAtomicFieldData.WithOrdinals(lon, lat, build, reader.maxDoc());
}
success = true;
return data;
@ -112,8 +177,6 @@ public class GeoPointDoubleArrayIndexFieldData extends AbstractIndexGeoPointFiel
if (success) {
estimator.afterLoad(null, data.ramBytesUsed());
}
}
}
}

View File

@ -39,13 +39,13 @@ import java.util.List;
/**
*/
public abstract class GeoPointDoubleArrayAtomicFieldData extends AbstractAtomicGeoPointFieldData {
public abstract class GeoPointArrayLegacyAtomicFieldData extends AbstractAtomicGeoPointFieldData {
@Override
public void close() {
}
static class WithOrdinals extends GeoPointDoubleArrayAtomicFieldData {
static class WithOrdinals extends GeoPointArrayLegacyAtomicFieldData {
private final DoubleArray lon, lat;
private final Ordinals ordinals;
@ -63,7 +63,7 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AbstractAtomicG
public long ramBytesUsed() {
return RamUsageEstimator.NUM_BYTES_INT/*size*/ + lon.ramBytesUsed() + lat.ramBytesUsed();
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();
@ -117,7 +117,7 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AbstractAtomicG
/**
* Assumes unset values are marked in bitset, and docId is used as the index to the value array.
*/
public static class Single extends GeoPointDoubleArrayAtomicFieldData {
public static class Single extends GeoPointArrayLegacyAtomicFieldData {
private final DoubleArray lon, lat;
private final BitSet set;
@ -130,9 +130,9 @@ public abstract class GeoPointDoubleArrayAtomicFieldData extends AbstractAtomicG
@Override
public long ramBytesUsed() {
return RamUsageEstimator.NUM_BYTES_INT/*size*/ + lon.ramBytesUsed() + lat.ramBytesUsed() + (set == null ? 0 : set.ramBytesUsed());
return RamUsageEstimator.NUM_BYTES_INT + lon.ramBytesUsed() + lat.ramBytesUsed() + (set == null ? 0 : set.ramBytesUsed());
}
@Override
public Collection<Accountable> getChildResources() {
List<Accountable> resources = new ArrayList<>();

View File

@ -0,0 +1,90 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata.plain;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.RamUsageEstimator;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
final class GeoPointDVAtomicFieldData extends AbstractAtomicGeoPointFieldData {
private final SortedNumericDocValues values;
GeoPointDVAtomicFieldData(SortedNumericDocValues values) {
super();
this.values = values;
}
@Override
public long ramBytesUsed() {
return 0; // not exposed by Lucene
}
@Override
public Collection<Accountable> getChildResources() {
return Collections.emptyList();
}
@Override
public void close() {
// no-op
}
@Override
public MultiGeoPointValues getGeoPointValues() {
return new MultiGeoPointValues() {
GeoPoint[] points = new GeoPoint[0];
private int count = 0;
@Override
public void setDocument(int docId) {
values.setDocument(docId);
count = values.count();
if (count > points.length) {
final int previousLength = points.length;
points = Arrays.copyOf(points, ArrayUtil.oversize(count, RamUsageEstimator.NUM_BYTES_OBJECT_REF));
for (int i = previousLength; i < points.length; ++i) {
points[i] = new GeoPoint();
}
}
for (int i=0; i<count; ++i) {
points[i].resetFromIndexHash(values.valueAt(i));
}
}
@Override
public int count() {
return count;
}
@Override
public GeoPoint valueAt(int index) {
return points[index];
}
};
}
}

View File

@ -32,14 +32,14 @@ import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
final class GeoPointBinaryDVAtomicFieldData extends AbstractAtomicGeoPointFieldData {
final class GeoPointLegacyDVAtomicFieldData extends AbstractAtomicGeoPointFieldData {
private static final int COORDINATE_SIZE = 8; // number of bytes per coordinate
private static final int GEOPOINT_SIZE = COORDINATE_SIZE * 2; // lat + lon
private final BinaryDocValues values;
GeoPointBinaryDVAtomicFieldData(BinaryDocValues values) {
GeoPointLegacyDVAtomicFieldData(BinaryDocValues values) {
super();
this.values = values;
}
@ -48,7 +48,7 @@ final class GeoPointBinaryDVAtomicFieldData extends AbstractAtomicGeoPointFieldD
public long ramBytesUsed() {
return 0; // not exposed by Lucene
}
@Override
public Collection<Accountable> getChildResources() {
return Collections.emptyList();
@ -97,5 +97,4 @@ final class GeoPointBinaryDVAtomicFieldData extends AbstractAtomicGeoPointFieldD
};
}
}

View File

@ -37,7 +37,6 @@ import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.geo.BaseGeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapperLegacy;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
@ -95,7 +94,6 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
} else if (type.getType().equals("byte")) {
fieldType = MapperBuilders.byteField(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();
} else if (type.getType().equals("geo_point")) {
BaseGeoPointFieldMapper.Builder builder;
// norelease update to .before(Version.V_2_2_0 once GeoPointFieldV2 is fully merged
if (indexService.getIndexSettings().getIndexVersionCreated().onOrBefore(Version.CURRENT)) {
fieldType = new GeoPointFieldMapperLegacy.Builder(fieldName).docValues(docValues).fieldDataSettings(type.getSettings()).build(context).fieldType();

View File

@ -0,0 +1,98 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.GeoPointField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.util.GeoUtils;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.VersionUtils;
import static org.elasticsearch.test.geo.RandomShapeGenerator.randomPoint;
import static org.hamcrest.Matchers.*;
/**
*
*/
public abstract class AbstractGeoFieldDataTestCase extends AbstractFieldDataImplTestCase {
protected Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
@Override
protected abstract FieldDataType getFieldDataType();
protected Settings.Builder getFieldDataSettings() {
return Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version);
}
protected Field randomGeoPointField(String fieldName, Field.Store store) {
GeoPoint point = randomPoint(random());
// norelease move to .before(Version.2_2_0) once GeoPointV2 is fully merged
if (version.onOrBefore(Version.CURRENT)) {
return new StringField(fieldName, point.lat()+","+point.lon(), store);
}
return new GeoPointField(fieldName, point.lon(), point.lat(), store);
}
@Override
protected void fillAllMissing() throws Exception {
Document d = new Document();
d.add(new StringField("_id", "1", Field.Store.NO));
writer.addDocument(d);
d = new Document();
d.add(new StringField("_id", "2", Field.Store.NO));
writer.addDocument(d);
d = new Document();
d.add(new StringField("_id", "3", Field.Store.NO));
writer.addDocument(d);
}
@Override
public void testSortMultiValuesFields() {
assumeFalse("Only test on non geo_point fields", getFieldDataType().equals("geo_point"));
}
protected void assertValues(MultiGeoPointValues values, int docId) {
assertValues(values, docId, false);
}
protected void assertMissing(MultiGeoPointValues values, int docId) {
assertValues(values, docId, true);
}
private void assertValues(MultiGeoPointValues values, int docId, boolean missing) {
values.setDocument(docId);
int docCount = values.count();
if (missing) {
assertThat(docCount, equalTo(0));
} else {
assertThat(docCount, greaterThan(0));
for (int i = 0; i < docCount; ++i) {
assertThat(values.valueAt(i).lat(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LAT_INCL), lessThanOrEqualTo(GeoUtils.MAX_LAT_INCL)));
assertThat(values.valueAt(i).lat(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LON_INCL), lessThanOrEqualTo(GeoUtils.MAX_LON_INCL)));
}
}
}
}

View File

@ -0,0 +1,205 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.fielddata;
import org.apache.lucene.document.*;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term;
import org.elasticsearch.index.fielddata.plain.AbstractAtomicGeoPointFieldData;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
/**
* Basic Unit Test for GeoPointField data
* todo include backcompat testing - see ISSUE #14562
*/
public class GeoFieldDataTests extends AbstractGeoFieldDataTestCase {
private static String FIELD_NAME = "value";
@Override
protected FieldDataType getFieldDataType() {
return new FieldDataType("geo_point", getFieldDataSettings());
}
@Override
protected void add2SingleValuedDocumentsAndDeleteOneOfThem() throws Exception {
Document d = new Document();
d.add(new StringField("_id", "1", Field.Store.NO));
d.add(randomGeoPointField(FIELD_NAME, Field.Store.YES));
writer.addDocument(d);
d = new Document();
d.add(new StringField("_id", "2", Field.Store.NO));
d.add(randomGeoPointField(FIELD_NAME, Field.Store.NO));
writer.addDocument(d);
writer.commit();
writer.deleteDocuments(new Term("_id", "1"));
}
@Override
protected void fillMultiValueWithMissing() throws Exception {
Document d = new Document();
d.add(new StringField("_id", "1", Field.Store.NO));
d.add(randomGeoPointField(FIELD_NAME, Field.Store.NO));
d.add(randomGeoPointField(FIELD_NAME, Field.Store.NO));
writer.addDocument(d);
// missing
d = new Document();
d.add(new StringField("_id", "2", Field.Store.NO));
writer.addDocument(d);
d = new Document();
d.add(new StringField("_id", "3", Field.Store.NO));
d.add(randomGeoPointField(FIELD_NAME, Field.Store.NO));
writer.addDocument(d);
}
@Override
protected void fillSingleValueAllSet() throws Exception {
Document d = new Document();
d.add(new StringField("_id", "1", Field.Store.NO));
d.add(randomGeoPointField(FIELD_NAME, Field.Store.NO));
writer.addDocument(d);
d = new Document();
d.add(new StringField("_id", "2", Field.Store.NO));
d.add(randomGeoPointField(FIELD_NAME, Field.Store.NO));
writer.addDocument(d);
d = new Document();
d.add(new StringField("_id", "3", Field.Store.NO));
d.add(randomGeoPointField(FIELD_NAME, Field.Store.NO));
writer.addDocument(d);
}
@Override
protected void fillSingleValueWithMissing() throws Exception {
Document d = new Document();
d.add(new StringField("_id", "1", Field.Store.NO));
d.add(randomGeoPointField(FIELD_NAME, Field.Store.NO));
writer.addDocument(d);
d = new Document();
d.add(new StringField("_id", "2", Field.Store.NO));
//d.add(new StringField("value", one(), Field.Store.NO)); // MISSING....
writer.addDocument(d);
d = new Document();
d.add(new StringField("_id", "3", Field.Store.NO));
d.add(randomGeoPointField(FIELD_NAME, Field.Store.NO));
writer.addDocument(d);
}
@Override
protected void fillMultiValueAllSet() throws Exception {
Document d = new Document();
d.add(new StringField("_id", "1", Field.Store.NO));
d.add(randomGeoPointField(FIELD_NAME, Field.Store.NO));
d.add(randomGeoPointField(FIELD_NAME, Field.Store.NO));
writer.addDocument(d);
d = new Document();
d.add(new StringField("_id", "2", Field.Store.NO));
d.add(randomGeoPointField(FIELD_NAME, Field.Store.NO));
writer.addDocument(d);
d = new Document();
d.add(new StringField("_id", "3", Field.Store.NO));
d.add(randomGeoPointField(FIELD_NAME, Field.Store.NO));
writer.addDocument(d);
}
@Override
protected void fillExtendedMvSet() throws Exception {
Document d;
final int maxDocs = randomInt(10);
for (int i=0; i<maxDocs; ++i) {
d = new Document();
d.add(new StringField("_id", i+"", Field.Store.NO));
int maxVals = randomInt(5);
for (int v=0; v<maxVals; ++v) {
d.add(randomGeoPointField(FIELD_NAME, Field.Store.NO));
}
writer.addDocument(d);
if (randomBoolean()) {
writer.commit();
}
}
}
@Override
public void testSingleValueAllSet() throws Exception {
fillSingleValueAllSet();
IndexFieldData indexFieldData = getForField("value");
LeafReaderContext readerContext = refreshReader();
AtomicFieldData fieldData = indexFieldData.load(readerContext);
assertThat(fieldData.ramBytesUsed(), greaterThanOrEqualTo(minRamBytesUsed()));
MultiGeoPointValues fieldValues = ((AbstractAtomicGeoPointFieldData)fieldData).getGeoPointValues();
assertValues(fieldValues, 0);
assertValues(fieldValues, 1);
assertValues(fieldValues, 2);
}
@Override
public void testSingleValueWithMissing() throws Exception {
fillSingleValueWithMissing();
IndexFieldData indexFieldData = getForField("value");
LeafReaderContext readerContext = refreshReader();
AtomicFieldData fieldData = indexFieldData.load(readerContext);
assertThat(fieldData.ramBytesUsed(), greaterThanOrEqualTo(minRamBytesUsed()));
MultiGeoPointValues fieldValues = ((AbstractAtomicGeoPointFieldData)fieldData).getGeoPointValues();
assertValues(fieldValues, 0);
assertMissing(fieldValues, 1);
assertValues(fieldValues, 2);
}
@Override
public void testMultiValueAllSet() throws Exception {
fillMultiValueAllSet();
IndexFieldData indexFieldData = getForField("value");
LeafReaderContext readerContext = refreshReader();
AtomicFieldData fieldData = indexFieldData.load(readerContext);
assertThat(fieldData.ramBytesUsed(), greaterThanOrEqualTo(minRamBytesUsed()));
MultiGeoPointValues fieldValues = ((AbstractAtomicGeoPointFieldData)fieldData).getGeoPointValues();
assertValues(fieldValues, 0);
assertValues(fieldValues, 1);
assertValues(fieldValues, 2);
}
@Override
public void testMultiValueWithMissing() throws Exception {
fillMultiValueWithMissing();
IndexFieldData indexFieldData = getForField("value");
AtomicFieldData fieldData = indexFieldData.load(refreshReader());
assertThat(fieldData.ramBytesUsed(), greaterThanOrEqualTo(minRamBytesUsed()));
MultiGeoPointValues fieldValues = ((AbstractAtomicGeoPointFieldData)fieldData).getGeoPointValues();
assertValues(fieldValues, 0);
assertMissing(fieldValues, 1);
assertValues(fieldValues, 2);
}
}

View File

@ -183,16 +183,20 @@ public class MissingValueIT extends ESIntegTestCase {
SearchResponse response = client().prepareSearch("idx").addAggregation(geoBounds("bounds").field("non_existing_field").missing("2,1")).get();
assertSearchResponse(response);
GeoBounds bounds = response.getAggregations().get("bounds");
assertEquals(new GeoPoint(2,1), bounds.bottomRight());
assertEquals(new GeoPoint(2,1), bounds.topLeft());
assertThat(bounds.bottomRight().lat(), closeTo(2.0, 1E-5));
assertThat(bounds.bottomRight().lon(), closeTo(1.0, 1E-5));
assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5));
assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5));
}
public void testGeoBounds() {
SearchResponse response = client().prepareSearch("idx").addAggregation(geoBounds("bounds").field("location").missing("2,1")).get();
assertSearchResponse(response);
GeoBounds bounds = response.getAggregations().get("bounds");
assertEquals(new GeoPoint(1,2), bounds.bottomRight());
assertEquals(new GeoPoint(2,1), bounds.topLeft());
assertThat(bounds.bottomRight().lat(), closeTo(1.0, 1E-5));
assertThat(bounds.bottomRight().lon(), closeTo(2.0, 1E-5));
assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5));
assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5));
}
public void testGeoCentroid() {

View File

@ -18,8 +18,11 @@
*/
package org.elasticsearch.search.aggregations.bucket;
import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.Aggregator.SubAggCollectionMode;
@ -28,6 +31,7 @@ import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.search.aggregations.bucket.range.Range.Bucket;
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.VersionUtils;
import org.hamcrest.Matchers;
import java.util.ArrayList;
@ -53,6 +57,7 @@ import static org.hamcrest.core.IsNull.nullValue;
*/
@ESIntegTestCase.SuiteScopeTestCase
public class GeoDistanceIT extends ESIntegTestCase {
private Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception {
XContentBuilder source = jsonBuilder().startObject().field("city", name);
@ -67,7 +72,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
@Override
public void setupSuiteScopeCluster() throws Exception {
prepareCreate("idx")
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
prepareCreate("idx").setSettings(settings)
.addMapping("type", "location", "type=geo_point", "city", "type=string,index=not_analyzed")
.execute().actionGet();
@ -109,7 +115,8 @@ public class GeoDistanceIT extends ESIntegTestCase {
}
}
indexRandom(true, cities);
prepareCreate("empty_bucket_idx").addMapping("type", "value", "type=integer", "location", "type=geo_point").execute().actionGet();
prepareCreate("empty_bucket_idx")
.addMapping("type", "value", "type=integer", "location", "type=geo_point").execute().actionGet();
List<IndexRequestBuilder> builders = new ArrayList<>();
for (int i = 0; i < 2; i++) {
builders.add(client().prepareIndex("empty_bucket_idx", "type", "" + i).setSource(jsonBuilder()

View File

@ -23,9 +23,12 @@ import com.carrotsearch.hppc.ObjectIntMap;
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
import org.apache.lucene.util.GeoHashUtils;
import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.GeoBoundingBoxQueryBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilders;
@ -33,6 +36,7 @@ import org.elasticsearch.search.aggregations.bucket.filter.Filter;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid;
import org.elasticsearch.search.aggregations.bucket.geogrid.GeoHashGrid.Bucket;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.VersionUtils;
import java.util.ArrayList;
import java.util.Arrays;
@ -50,6 +54,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@ESIntegTestCase.SuiteScopeTestCase
public class GeoHashGridIT extends ESIntegTestCase {
private Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT);
static ObjectIntMap<String> expectedDocCountsForGeoHash = null;
static ObjectIntMap<String> multiValuedExpectedDocCountsForGeoHash = null;
@ -74,7 +79,9 @@ public class GeoHashGridIT extends ESIntegTestCase {
public void setupSuiteScopeCluster() throws Exception {
createIndex("idx_unmapped");
assertAcked(prepareCreate("idx")
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build();
assertAcked(prepareCreate("idx").setSettings(settings)
.addMapping("type", "location", "type=geo_point", "city", "type=string,index=not_analyzed"));
List<IndexRequestBuilder> cities = new ArrayList<>();
@ -99,7 +106,7 @@ public class GeoHashGridIT extends ESIntegTestCase {
}
indexRandom(true, cities);
assertAcked(prepareCreate("multi_valued_idx")
assertAcked(prepareCreate("multi_valued_idx").setSettings(settings)
.addMapping("type", "location", "type=geo_point", "city", "type=string,index=not_analyzed"));
cities = new ArrayList<>();

View File

@ -36,12 +36,7 @@ import static org.elasticsearch.search.aggregations.AggregationBuilders.geoBound
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.sameInstance;
import static org.hamcrest.Matchers.*;
/**
*
@ -63,10 +58,10 @@ public class GeoBoundsIT extends AbstractGeoTestCase {
assertThat(geoBounds.getName(), equalTo(aggName));
GeoPoint topLeft = geoBounds.topLeft();
GeoPoint bottomRight = geoBounds.bottomRight();
assertThat(topLeft.lat(), equalTo(singleTopLeft.lat()));
assertThat(topLeft.lon(), equalTo(singleTopLeft.lon()));
assertThat(bottomRight.lat(), equalTo(singleBottomRight.lat()));
assertThat(bottomRight.lon(), equalTo(singleBottomRight.lon()));
assertThat(topLeft.lat(), closeTo(singleTopLeft.lat(), GEOHASH_TOLERANCE));
assertThat(topLeft.lon(), closeTo(singleTopLeft.lon(), GEOHASH_TOLERANCE));
assertThat(bottomRight.lat(), closeTo(singleBottomRight.lat(), GEOHASH_TOLERANCE));
assertThat(bottomRight.lon(), closeTo(singleBottomRight.lon(), GEOHASH_TOLERANCE));
}
public void testSingleValuedField_getProperty() throws Exception {
@ -92,19 +87,19 @@ public class GeoBoundsIT extends AbstractGeoTestCase {
assertThat((GeoBounds) global.getProperty(aggName), sameInstance(geobounds));
GeoPoint topLeft = geobounds.topLeft();
GeoPoint bottomRight = geobounds.bottomRight();
assertThat(topLeft.lat(), equalTo(singleTopLeft.lat()));
assertThat(topLeft.lon(), equalTo(singleTopLeft.lon()));
assertThat(bottomRight.lat(), equalTo(singleBottomRight.lat()));
assertThat(bottomRight.lon(), equalTo(singleBottomRight.lon()));
assertThat((double) global.getProperty(aggName + ".top"), equalTo(singleTopLeft.lat()));
assertThat((double) global.getProperty(aggName + ".left"), equalTo(singleTopLeft.lon()));
assertThat((double) global.getProperty(aggName + ".bottom"), equalTo(singleBottomRight.lat()));
assertThat((double) global.getProperty(aggName + ".right"), equalTo(singleBottomRight.lon()));
assertThat(topLeft.lat(), closeTo(singleTopLeft.lat(), GEOHASH_TOLERANCE));
assertThat(topLeft.lon(), closeTo(singleTopLeft.lon(), GEOHASH_TOLERANCE));
assertThat(bottomRight.lat(), closeTo(singleBottomRight.lat(), GEOHASH_TOLERANCE));
assertThat(bottomRight.lon(), closeTo(singleBottomRight.lon(), GEOHASH_TOLERANCE));
assertThat((double) global.getProperty(aggName + ".top"), closeTo(singleTopLeft.lat(), GEOHASH_TOLERANCE));
assertThat((double) global.getProperty(aggName + ".left"), closeTo(singleTopLeft.lon(), GEOHASH_TOLERANCE));
assertThat((double) global.getProperty(aggName + ".bottom"), closeTo(singleBottomRight.lat(), GEOHASH_TOLERANCE));
assertThat((double) global.getProperty(aggName + ".right"), closeTo(singleBottomRight.lon(), GEOHASH_TOLERANCE));
}
public void testMultiValuedField() throws Exception {
SearchResponse response = client().prepareSearch(IDX_NAME)
.addAggregation(geoBounds(aggName).field(MULTI_VALUED_FIELD_NAME)
.addAggregation(geoBounds(aggName).field(MULTI_VALUED_FIELD_NAME)
.wrapLongitude(false))
.execute().actionGet();
@ -116,10 +111,10 @@ public class GeoBoundsIT extends AbstractGeoTestCase {
assertThat(geoBounds.getName(), equalTo(aggName));
GeoPoint topLeft = geoBounds.topLeft();
GeoPoint bottomRight = geoBounds.bottomRight();
assertThat(topLeft.lat(), equalTo(multiTopLeft.lat()));
assertThat(topLeft.lon(), equalTo(multiTopLeft.lon()));
assertThat(bottomRight.lat(), equalTo(multiBottomRight.lat()));
assertThat(bottomRight.lon(), equalTo(multiBottomRight.lon()));
assertThat(topLeft.lat(), closeTo(multiTopLeft.lat(), GEOHASH_TOLERANCE));
assertThat(topLeft.lon(), closeTo(multiTopLeft.lon(), GEOHASH_TOLERANCE));
assertThat(bottomRight.lat(), closeTo(multiBottomRight.lat(), GEOHASH_TOLERANCE));
assertThat(bottomRight.lon(), closeTo(multiBottomRight.lon(), GEOHASH_TOLERANCE));
}
public void testUnmapped() throws Exception {
@ -152,10 +147,10 @@ public class GeoBoundsIT extends AbstractGeoTestCase {
assertThat(geoBounds.getName(), equalTo(aggName));
GeoPoint topLeft = geoBounds.topLeft();
GeoPoint bottomRight = geoBounds.bottomRight();
assertThat(topLeft.lat(), equalTo(singleTopLeft.lat()));
assertThat(topLeft.lon(), equalTo(singleTopLeft.lon()));
assertThat(bottomRight.lat(), equalTo(singleBottomRight.lat()));
assertThat(bottomRight.lon(), equalTo(singleBottomRight.lon()));
assertThat(topLeft.lat(), closeTo(singleTopLeft.lat(), GEOHASH_TOLERANCE));
assertThat(topLeft.lon(), closeTo(singleTopLeft.lon(), GEOHASH_TOLERANCE));
assertThat(bottomRight.lat(), closeTo(singleBottomRight.lat(), GEOHASH_TOLERANCE));
assertThat(bottomRight.lon(), closeTo(singleBottomRight.lon(), GEOHASH_TOLERANCE));
}
public void testEmptyAggregation() throws Exception {
@ -191,10 +186,10 @@ public class GeoBoundsIT extends AbstractGeoTestCase {
assertThat(geoBounds.getName(), equalTo(aggName));
GeoPoint topLeft = geoBounds.topLeft();
GeoPoint bottomRight = geoBounds.bottomRight();
assertThat(topLeft.lat(), equalTo(geoValuesTopLeft.lat()));
assertThat(topLeft.lon(), equalTo(geoValuesTopLeft.lon()));
assertThat(bottomRight.lat(), equalTo(geoValuesBottomRight.lat()));
assertThat(bottomRight.lon(), equalTo(geoValuesBottomRight.lon()));
assertThat(topLeft.lat(), closeTo(geoValuesTopLeft.lat(), GEOHASH_TOLERANCE));
assertThat(topLeft.lon(), closeTo(geoValuesTopLeft.lon(), GEOHASH_TOLERANCE));
assertThat(bottomRight.lat(), closeTo(geoValuesBottomRight.lat(), GEOHASH_TOLERANCE));
assertThat(bottomRight.lon(), closeTo(geoValuesBottomRight.lon(), GEOHASH_TOLERANCE));
}
public void testSingleValuedFieldNearDateLineWrapLongitude() throws Exception {
@ -212,10 +207,10 @@ public class GeoBoundsIT extends AbstractGeoTestCase {
assertThat(geoBounds.getName(), equalTo(aggName));
GeoPoint topLeft = geoBounds.topLeft();
GeoPoint bottomRight = geoBounds.bottomRight();
assertThat(topLeft.lat(), equalTo(geoValuesTopLeft.lat()));
assertThat(topLeft.lon(), equalTo(geoValuesTopLeft.lon()));
assertThat(bottomRight.lat(), equalTo(geoValuesBottomRight.lat()));
assertThat(bottomRight.lon(), equalTo(geoValuesBottomRight.lon()));
assertThat(topLeft.lat(), closeTo(geoValuesTopLeft.lat(), GEOHASH_TOLERANCE));
assertThat(topLeft.lon(), closeTo(geoValuesTopLeft.lon(), GEOHASH_TOLERANCE));
assertThat(bottomRight.lat(), closeTo(geoValuesBottomRight.lat(), GEOHASH_TOLERANCE));
assertThat(bottomRight.lon(), closeTo(geoValuesBottomRight.lon(), GEOHASH_TOLERANCE));
}
/**
@ -259,9 +254,9 @@ public class GeoBoundsIT extends AbstractGeoTestCase {
assertThat(geoBounds.getName(), equalTo(aggName));
GeoPoint topLeft = geoBounds.topLeft();
GeoPoint bottomRight = geoBounds.bottomRight();
assertThat(topLeft.lat(), equalTo(1.0));
assertThat(topLeft.lon(), equalTo(0.0));
assertThat(bottomRight.lat(), equalTo(1.0));
assertThat(bottomRight.lon(), equalTo(0.0));
assertThat(topLeft.lat(), closeTo(1.0, GEOHASH_TOLERANCE));
assertThat(topLeft.lon(), closeTo(0.0, GEOHASH_TOLERANCE));
assertThat(bottomRight.lat(), closeTo(1.0, GEOHASH_TOLERANCE));
assertThat(bottomRight.lon(), closeTo(0.0, GEOHASH_TOLERANCE));
}
}
}