- currently we make a few assumptions in the field data based aggregations base on which we decide on execution paths, e.g. initial buffer sizes we use for ordinal arrays.
- also, in geo distance, because it's based on range agg and that by default the order of the geo point per doc is unknown, always wrap it in a dedicated field data source which sorts the values if needed. But most of the times, a doc will be associated with a single point and therefore most of this wrapping is redundant and adds perf. cost for nothing. - the idea here is for every request that "hits" a field data agg, we'll first iterate over the searchable segments and load their field data and compute the cross-segment info out of them. This info will be placed in the field context with which the value sources are created. - we currently have some of this info on the IndexFieldData, but problem with getting it from there is that we may easily end up getting wrong info that originate in unsearchable segments.
This commit is contained in:
parent
e5f52ce778
commit
7b13f1932e
|
@ -41,8 +41,6 @@ import java.util.List;
|
||||||
|
|
||||||
public class HistogramAggregator extends BucketsAggregator {
|
public class HistogramAggregator extends BucketsAggregator {
|
||||||
|
|
||||||
private final static int INITIAL_CAPACITY = 50; // TODO sizing
|
|
||||||
|
|
||||||
private final NumericValuesSource valuesSource;
|
private final NumericValuesSource valuesSource;
|
||||||
private final Rounding rounding;
|
private final Rounding rounding;
|
||||||
private final InternalOrder order;
|
private final InternalOrder order;
|
||||||
|
@ -59,11 +57,12 @@ public class HistogramAggregator extends BucketsAggregator {
|
||||||
boolean keyed,
|
boolean keyed,
|
||||||
boolean computeEmptyBuckets,
|
boolean computeEmptyBuckets,
|
||||||
@Nullable NumericValuesSource valuesSource,
|
@Nullable NumericValuesSource valuesSource,
|
||||||
|
long initialCapacity,
|
||||||
AbstractHistogramBase.Factory<?> histogramFactory,
|
AbstractHistogramBase.Factory<?> histogramFactory,
|
||||||
AggregationContext aggregationContext,
|
AggregationContext aggregationContext,
|
||||||
Aggregator parent) {
|
Aggregator parent) {
|
||||||
|
|
||||||
super(name, BucketAggregationMode.PER_BUCKET, factories, 50, aggregationContext, parent);
|
super(name, BucketAggregationMode.PER_BUCKET, factories, initialCapacity, aggregationContext, parent);
|
||||||
this.valuesSource = valuesSource;
|
this.valuesSource = valuesSource;
|
||||||
this.rounding = rounding;
|
this.rounding = rounding;
|
||||||
this.order = order;
|
this.order = order;
|
||||||
|
@ -71,7 +70,7 @@ public class HistogramAggregator extends BucketsAggregator {
|
||||||
this.computeEmptyBuckets = computeEmptyBuckets;
|
this.computeEmptyBuckets = computeEmptyBuckets;
|
||||||
this.histogramFactory = histogramFactory;
|
this.histogramFactory = histogramFactory;
|
||||||
|
|
||||||
bucketOrds = new LongHash(INITIAL_CAPACITY);
|
bucketOrds = new LongHash(initialCapacity);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -152,12 +151,13 @@ public class HistogramAggregator extends BucketsAggregator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent) {
|
protected Aggregator createUnmapped(AggregationContext aggregationContext, Aggregator parent) {
|
||||||
return new HistogramAggregator(name, factories, rounding, order, keyed, computeEmptyBuckets, null, histogramFactory, aggregationContext, parent);
|
return new HistogramAggregator(name, factories, rounding, order, keyed, computeEmptyBuckets, null, 0, histogramFactory, aggregationContext, parent);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Aggregator create(NumericValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
|
protected Aggregator create(NumericValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
|
||||||
return new HistogramAggregator(name, factories, rounding, order, keyed, computeEmptyBuckets, valuesSource, histogramFactory, aggregationContext, parent);
|
// todo if we'll keep track of min/max values in IndexFieldData, we could use the max here to come up with a better estimation for the buckets count
|
||||||
|
return new HistogramAggregator(name, factories, rounding, order, keyed, computeEmptyBuckets, valuesSource, 50, histogramFactory, aggregationContext, parent);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -225,8 +225,11 @@ public class GeoDistanceParser implements Aggregator.Parser {
|
||||||
@Override
|
@Override
|
||||||
protected Aggregator create(final GeoPointValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
|
protected Aggregator create(final GeoPointValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
|
||||||
final DistanceValues distanceValues = new DistanceValues(valuesSource, distanceType, origin, unit);
|
final DistanceValues distanceValues = new DistanceValues(valuesSource, distanceType, origin, unit);
|
||||||
FieldDataSource.Numeric distanceSource = new DistanceSource(distanceValues);
|
FieldDataSource.Numeric distanceSource = new DistanceSource(distanceValues, valuesSource.metaData());
|
||||||
distanceSource = new FieldDataSource.Numeric.SortedAndUnique(distanceSource);
|
if (distanceSource.metaData().multiValued()) {
|
||||||
|
// we need to ensure uniqueness
|
||||||
|
distanceSource = new FieldDataSource.Numeric.SortedAndUnique(distanceSource);
|
||||||
|
}
|
||||||
final NumericValuesSource numericSource = new NumericValuesSource(distanceSource, null, null);
|
final NumericValuesSource numericSource = new NumericValuesSource(distanceSource, null, null);
|
||||||
return new RangeAggregator(name, factories, numericSource, rangeFactory, ranges, keyed, aggregationContext, parent);
|
return new RangeAggregator(name, factories, numericSource, rangeFactory, ranges, keyed, aggregationContext, parent);
|
||||||
}
|
}
|
||||||
|
@ -264,9 +267,17 @@ public class GeoDistanceParser implements Aggregator.Parser {
|
||||||
private static class DistanceSource extends FieldDataSource.Numeric {
|
private static class DistanceSource extends FieldDataSource.Numeric {
|
||||||
|
|
||||||
private final DoubleValues values;
|
private final DoubleValues values;
|
||||||
|
private final MetaData metaData;
|
||||||
|
|
||||||
public DistanceSource(DoubleValues values) {
|
public DistanceSource(DoubleValues values, MetaData metaData) {
|
||||||
this.values = values;
|
this.values = values;
|
||||||
|
// even if the geo points are unique, there's no guarantee the distances are
|
||||||
|
this.metaData = MetaData.builder(metaData).uniqueness(MetaData.Uniqueness.UNKNOWN).build();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public MetaData metaData() {
|
||||||
|
return metaData;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -38,22 +38,20 @@ import java.util.Collections;
|
||||||
*/
|
*/
|
||||||
public class DoubleTermsAggregator extends BucketsAggregator {
|
public class DoubleTermsAggregator extends BucketsAggregator {
|
||||||
|
|
||||||
private static final int INITIAL_CAPACITY = 50; // TODO sizing
|
|
||||||
|
|
||||||
private final InternalOrder order;
|
private final InternalOrder order;
|
||||||
private final int requiredSize;
|
private final int requiredSize;
|
||||||
private final int shardSize;
|
private final int shardSize;
|
||||||
private final NumericValuesSource valuesSource;
|
private final NumericValuesSource valuesSource;
|
||||||
private final LongHash bucketOrds;
|
private final LongHash bucketOrds;
|
||||||
|
|
||||||
public DoubleTermsAggregator(String name, AggregatorFactories factories, NumericValuesSource valuesSource,
|
public DoubleTermsAggregator(String name, AggregatorFactories factories, NumericValuesSource valuesSource, long estimatedBucketCount,
|
||||||
InternalOrder order, int requiredSize, int shardSize, AggregationContext aggregationContext, Aggregator parent) {
|
InternalOrder order, int requiredSize, int shardSize, AggregationContext aggregationContext, Aggregator parent) {
|
||||||
super(name, BucketAggregationMode.PER_BUCKET, factories, INITIAL_CAPACITY, aggregationContext, parent);
|
super(name, BucketAggregationMode.PER_BUCKET, factories, estimatedBucketCount, aggregationContext, parent);
|
||||||
this.valuesSource = valuesSource;
|
this.valuesSource = valuesSource;
|
||||||
this.order = order;
|
this.order = order;
|
||||||
this.requiredSize = requiredSize;
|
this.requiredSize = requiredSize;
|
||||||
this.shardSize = shardSize;
|
this.shardSize = shardSize;
|
||||||
bucketOrds = new LongHash(INITIAL_CAPACITY);
|
bucketOrds = new LongHash(estimatedBucketCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -38,22 +38,20 @@ import java.util.Collections;
|
||||||
*/
|
*/
|
||||||
public class LongTermsAggregator extends BucketsAggregator {
|
public class LongTermsAggregator extends BucketsAggregator {
|
||||||
|
|
||||||
private static final int INITIAL_CAPACITY = 50; // TODO sizing
|
|
||||||
|
|
||||||
private final InternalOrder order;
|
private final InternalOrder order;
|
||||||
private final int requiredSize;
|
private final int requiredSize;
|
||||||
private final int shardSize;
|
private final int shardSize;
|
||||||
private final NumericValuesSource valuesSource;
|
private final NumericValuesSource valuesSource;
|
||||||
private final LongHash bucketOrds;
|
private final LongHash bucketOrds;
|
||||||
|
|
||||||
public LongTermsAggregator(String name, AggregatorFactories factories, NumericValuesSource valuesSource,
|
public LongTermsAggregator(String name, AggregatorFactories factories, NumericValuesSource valuesSource, long estimatedBucketCount,
|
||||||
InternalOrder order, int requiredSize, int shardSize, AggregationContext aggregationContext, Aggregator parent) {
|
InternalOrder order, int requiredSize, int shardSize, AggregationContext aggregationContext, Aggregator parent) {
|
||||||
super(name, BucketAggregationMode.PER_BUCKET, factories, INITIAL_CAPACITY, aggregationContext, parent);
|
super(name, BucketAggregationMode.PER_BUCKET, factories, estimatedBucketCount, aggregationContext, parent);
|
||||||
this.valuesSource = valuesSource;
|
this.valuesSource = valuesSource;
|
||||||
this.order = order;
|
this.order = order;
|
||||||
this.requiredSize = requiredSize;
|
this.requiredSize = requiredSize;
|
||||||
this.shardSize = shardSize;
|
this.shardSize = shardSize;
|
||||||
bucketOrds = new LongHash(INITIAL_CAPACITY);
|
bucketOrds = new LongHash(estimatedBucketCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -45,8 +45,6 @@ import java.util.Collections;
|
||||||
*/
|
*/
|
||||||
public class StringTermsAggregator extends BucketsAggregator {
|
public class StringTermsAggregator extends BucketsAggregator {
|
||||||
|
|
||||||
private static final int INITIAL_CAPACITY = 50; // TODO sizing
|
|
||||||
|
|
||||||
private final ValuesSource valuesSource;
|
private final ValuesSource valuesSource;
|
||||||
private final InternalOrder order;
|
private final InternalOrder order;
|
||||||
private final int requiredSize;
|
private final int requiredSize;
|
||||||
|
@ -54,11 +52,11 @@ public class StringTermsAggregator extends BucketsAggregator {
|
||||||
protected final BytesRefHash bucketOrds;
|
protected final BytesRefHash bucketOrds;
|
||||||
private final IncludeExclude includeExclude;
|
private final IncludeExclude includeExclude;
|
||||||
|
|
||||||
public StringTermsAggregator(String name, AggregatorFactories factories, ValuesSource valuesSource,
|
public StringTermsAggregator(String name, AggregatorFactories factories, ValuesSource valuesSource, long estimatedBucketCount,
|
||||||
InternalOrder order, int requiredSize, int shardSize,
|
InternalOrder order, int requiredSize, int shardSize,
|
||||||
IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent) {
|
IncludeExclude includeExclude, AggregationContext aggregationContext, Aggregator parent) {
|
||||||
|
|
||||||
super(name, BucketAggregationMode.PER_BUCKET, factories, INITIAL_CAPACITY, aggregationContext, parent);
|
super(name, BucketAggregationMode.PER_BUCKET, factories, estimatedBucketCount, aggregationContext, parent);
|
||||||
this.valuesSource = valuesSource;
|
this.valuesSource = valuesSource;
|
||||||
this.order = order;
|
this.order = order;
|
||||||
this.requiredSize = requiredSize;
|
this.requiredSize = requiredSize;
|
||||||
|
@ -145,9 +143,9 @@ public class StringTermsAggregator extends BucketsAggregator {
|
||||||
private Ordinals.Docs ordinals;
|
private Ordinals.Docs ordinals;
|
||||||
private LongArray ordinalToBucket;
|
private LongArray ordinalToBucket;
|
||||||
|
|
||||||
public WithOrdinals(String name, AggregatorFactories factories, BytesValuesSource.WithOrdinals valuesSource, InternalOrder order, int requiredSize,
|
public WithOrdinals(String name, AggregatorFactories factories, BytesValuesSource.WithOrdinals valuesSource, long esitmatedBucketCount,
|
||||||
int shardSize, AggregationContext aggregationContext, Aggregator parent) {
|
InternalOrder order, int requiredSize, int shardSize, AggregationContext aggregationContext, Aggregator parent) {
|
||||||
super(name, factories, valuesSource, order, requiredSize, shardSize, null, aggregationContext, parent);
|
super(name, factories, valuesSource, esitmatedBucketCount, order, requiredSize, shardSize, null, aggregationContext, parent);
|
||||||
this.valuesSource = valuesSource;
|
this.valuesSource = valuesSource;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -164,7 +162,7 @@ public class StringTermsAggregator extends BucketsAggregator {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void collect(int doc, long owningBucketOrdinal) throws IOException {
|
public void collect(int doc, long owningBucketOrdinal) throws IOException {
|
||||||
assert owningBucketOrdinal == 0;
|
assert owningBucketOrdinal == 0 : "this is a per_bucket aggregator";
|
||||||
final int valuesCount = ordinals.setDocument(doc);
|
final int valuesCount = ordinals.setDocument(doc);
|
||||||
|
|
||||||
for (int i = 0; i < valuesCount; ++i) {
|
for (int i = 0; i < valuesCount; ++i) {
|
||||||
|
|
|
@ -71,6 +71,20 @@ public class TermsAggregatorFactory extends ValueSourceAggregatorFactory {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Aggregator create(ValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
|
protected Aggregator create(ValuesSource valuesSource, long expectedBucketsCount, AggregationContext aggregationContext, Aggregator parent) {
|
||||||
|
long estimatedBucketCount = valuesSource.metaData().maxAtomicUniqueValuesCount();
|
||||||
|
if (estimatedBucketCount < 0) {
|
||||||
|
// there isn't an estimation available.. 50 should be a good start
|
||||||
|
estimatedBucketCount = 50;
|
||||||
|
}
|
||||||
|
|
||||||
|
// adding an upper bound on the estimation as some atomic field data in the future (binary doc values) and not
|
||||||
|
// going to know their exact cardinality and will return upper bounds in AtomicFieldData.getNumberUniqueValues()
|
||||||
|
// that may be largely over-estimated.. the value chosen here is arbitrary just to play nice with typical CPU cache
|
||||||
|
//
|
||||||
|
// Another reason is that it may be faster to resize upon growth than to start directly with the appropriate size.
|
||||||
|
// And that all values are not necessarily visited by the matches.
|
||||||
|
estimatedBucketCount = Math.min(estimatedBucketCount, 512);
|
||||||
|
|
||||||
if (valuesSource instanceof BytesValuesSource) {
|
if (valuesSource instanceof BytesValuesSource) {
|
||||||
if (executionHint != null && !executionHint.equals(EXECUTION_HINT_VALUE_MAP) && !executionHint.equals(EXECUTION_HINT_VALUE_ORDINALS)) {
|
if (executionHint != null && !executionHint.equals(EXECUTION_HINT_VALUE_MAP) && !executionHint.equals(EXECUTION_HINT_VALUE_ORDINALS)) {
|
||||||
throw new ElasticSearchIllegalArgumentException("execution_hint can only be '" + EXECUTION_HINT_VALUE_MAP + "' or '" + EXECUTION_HINT_VALUE_ORDINALS + "', not " + executionHint);
|
throw new ElasticSearchIllegalArgumentException("execution_hint can only be '" + EXECUTION_HINT_VALUE_MAP + "' or '" + EXECUTION_HINT_VALUE_ORDINALS + "', not " + executionHint);
|
||||||
|
@ -93,11 +107,12 @@ public class TermsAggregatorFactory extends ValueSourceAggregatorFactory {
|
||||||
|
|
||||||
if (execution.equals(EXECUTION_HINT_VALUE_ORDINALS)) {
|
if (execution.equals(EXECUTION_HINT_VALUE_ORDINALS)) {
|
||||||
assert includeExclude == null;
|
assert includeExclude == null;
|
||||||
final StringTermsAggregator.WithOrdinals aggregator = new StringTermsAggregator.WithOrdinals(name, factories, (BytesValuesSource.WithOrdinals) valuesSource, order, requiredSize, shardSize, aggregationContext, parent);
|
final StringTermsAggregator.WithOrdinals aggregator = new StringTermsAggregator.WithOrdinals(name,
|
||||||
|
factories, (BytesValuesSource.WithOrdinals) valuesSource, estimatedBucketCount, order, requiredSize, shardSize, aggregationContext, parent);
|
||||||
aggregationContext.registerReaderContextAware(aggregator);
|
aggregationContext.registerReaderContextAware(aggregator);
|
||||||
return aggregator;
|
return aggregator;
|
||||||
} else {
|
} else {
|
||||||
return new StringTermsAggregator(name, factories, valuesSource, order, requiredSize, shardSize, includeExclude, aggregationContext, parent);
|
return new StringTermsAggregator(name, factories, valuesSource, estimatedBucketCount, order, requiredSize, shardSize, includeExclude, aggregationContext, parent);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,9 +123,9 @@ public class TermsAggregatorFactory extends ValueSourceAggregatorFactory {
|
||||||
|
|
||||||
if (valuesSource instanceof NumericValuesSource) {
|
if (valuesSource instanceof NumericValuesSource) {
|
||||||
if (((NumericValuesSource) valuesSource).isFloatingPoint()) {
|
if (((NumericValuesSource) valuesSource).isFloatingPoint()) {
|
||||||
return new DoubleTermsAggregator(name, factories, (NumericValuesSource) valuesSource, order, requiredSize, shardSize, aggregationContext, parent);
|
return new DoubleTermsAggregator(name, factories, (NumericValuesSource) valuesSource, estimatedBucketCount, order, requiredSize, shardSize, aggregationContext, parent);
|
||||||
}
|
}
|
||||||
return new LongTermsAggregator(name, factories, (NumericValuesSource) valuesSource, order, requiredSize, shardSize, aggregationContext, parent);
|
return new LongTermsAggregator(name, factories, (NumericValuesSource) valuesSource, estimatedBucketCount, order, requiredSize, shardSize, aggregationContext, parent);
|
||||||
}
|
}
|
||||||
|
|
||||||
throw new AggregationExecutionException("terms aggregation cannot be applied to field [" + valuesSourceConfig.fieldContext().field() +
|
throw new AggregationExecutionException("terms aggregation cannot be applied to field [" + valuesSourceConfig.fieldContext().field() +
|
||||||
|
|
|
@ -31,7 +31,6 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
|
||||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||||
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
import org.elasticsearch.search.aggregations.AggregationExecutionException;
|
||||||
import org.elasticsearch.search.aggregations.support.FieldDataSource.Uniqueness;
|
|
||||||
import org.elasticsearch.search.aggregations.support.bytes.BytesValuesSource;
|
import org.elasticsearch.search.aggregations.support.bytes.BytesValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.geopoints.GeoPointValuesSource;
|
import org.elasticsearch.search.aggregations.support.geopoints.GeoPointValuesSource;
|
||||||
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
|
import org.elasticsearch.search.aggregations.support.numeric.NumericValuesSource;
|
||||||
|
@ -139,7 +138,8 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
|
||||||
private NumericValuesSource numericField(ObjectObjectOpenHashMap<String, FieldDataSource> fieldDataSources, ValuesSourceConfig<?> config) {
|
private NumericValuesSource numericField(ObjectObjectOpenHashMap<String, FieldDataSource> fieldDataSources, ValuesSourceConfig<?> config) {
|
||||||
FieldDataSource.Numeric dataSource = (FieldDataSource.Numeric) fieldDataSources.get(config.fieldContext.field());
|
FieldDataSource.Numeric dataSource = (FieldDataSource.Numeric) fieldDataSources.get(config.fieldContext.field());
|
||||||
if (dataSource == null) {
|
if (dataSource == null) {
|
||||||
dataSource = new FieldDataSource.Numeric.FieldData((IndexNumericFieldData<?>) config.fieldContext.indexFieldData());
|
FieldDataSource.MetaData metaData = FieldDataSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
|
||||||
|
dataSource = new FieldDataSource.Numeric.FieldData((IndexNumericFieldData<?>) config.fieldContext.indexFieldData(), metaData);
|
||||||
setReaderIfNeeded((ReaderContextAware) dataSource);
|
setReaderIfNeeded((ReaderContextAware) dataSource);
|
||||||
readerAwares.add((ReaderContextAware) dataSource);
|
readerAwares.add((ReaderContextAware) dataSource);
|
||||||
fieldDataSources.put(config.fieldContext.field(), dataSource);
|
fieldDataSources.put(config.fieldContext.field(), dataSource);
|
||||||
|
@ -166,10 +166,11 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
|
||||||
FieldDataSource dataSource = fieldDataSources.get(config.fieldContext.field());
|
FieldDataSource dataSource = fieldDataSources.get(config.fieldContext.field());
|
||||||
if (dataSource == null) {
|
if (dataSource == null) {
|
||||||
final IndexFieldData<?> indexFieldData = config.fieldContext.indexFieldData();
|
final IndexFieldData<?> indexFieldData = config.fieldContext.indexFieldData();
|
||||||
if (indexFieldData instanceof IndexFieldData.WithOrdinals<?>) {
|
FieldDataSource.MetaData metaData = FieldDataSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
|
||||||
dataSource = new FieldDataSource.Bytes.WithOrdinals.FieldData((IndexFieldData.WithOrdinals<?>) indexFieldData);
|
if (indexFieldData instanceof IndexFieldData.WithOrdinals) {
|
||||||
|
dataSource = new FieldDataSource.Bytes.WithOrdinals.FieldData((IndexFieldData.WithOrdinals) indexFieldData, metaData);
|
||||||
} else {
|
} else {
|
||||||
dataSource = new FieldDataSource.Bytes.FieldData(indexFieldData);
|
dataSource = new FieldDataSource.Bytes.FieldData(indexFieldData, metaData);
|
||||||
}
|
}
|
||||||
setReaderIfNeeded((ReaderContextAware) dataSource);
|
setReaderIfNeeded((ReaderContextAware) dataSource);
|
||||||
readerAwares.add((ReaderContextAware) dataSource);
|
readerAwares.add((ReaderContextAware) dataSource);
|
||||||
|
@ -185,7 +186,7 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
|
||||||
// Even in case we wrap field data, we might still need to wrap for sorting, because the wrapped field data might be
|
// Even in case we wrap field data, we might still need to wrap for sorting, because the wrapped field data might be
|
||||||
// eg. a numeric field data that doesn't sort according to the byte order. However field data values are unique so no
|
// eg. a numeric field data that doesn't sort according to the byte order. However field data values are unique so no
|
||||||
// need to wrap for uniqueness
|
// need to wrap for uniqueness
|
||||||
if ((config.ensureUnique && dataSource.getUniqueness() != Uniqueness.UNIQUE) || config.ensureSorted) {
|
if ((config.ensureUnique && !dataSource.metaData().uniqueness().unique()) || config.ensureSorted) {
|
||||||
dataSource = new FieldDataSource.Bytes.SortedAndUnique(dataSource);
|
dataSource = new FieldDataSource.Bytes.SortedAndUnique(dataSource);
|
||||||
readerAwares.add((ReaderContextAware) dataSource);
|
readerAwares.add((ReaderContextAware) dataSource);
|
||||||
}
|
}
|
||||||
|
@ -216,7 +217,8 @@ public class AggregationContext implements ReaderContextAware, ScorerAware {
|
||||||
private GeoPointValuesSource geoPointField(ObjectObjectOpenHashMap<String, FieldDataSource> fieldDataSources, ValuesSourceConfig<?> config) {
|
private GeoPointValuesSource geoPointField(ObjectObjectOpenHashMap<String, FieldDataSource> fieldDataSources, ValuesSourceConfig<?> config) {
|
||||||
FieldDataSource.GeoPoint dataSource = (FieldDataSource.GeoPoint) fieldDataSources.get(config.fieldContext.field());
|
FieldDataSource.GeoPoint dataSource = (FieldDataSource.GeoPoint) fieldDataSources.get(config.fieldContext.field());
|
||||||
if (dataSource == null) {
|
if (dataSource == null) {
|
||||||
dataSource = new FieldDataSource.GeoPoint((IndexGeoPointFieldData<?>) config.fieldContext.indexFieldData());
|
FieldDataSource.MetaData metaData = FieldDataSource.MetaData.load(config.fieldContext.indexFieldData(), searchContext);
|
||||||
|
dataSource = new FieldDataSource.GeoPoint((IndexGeoPointFieldData<?>) config.fieldContext.indexFieldData(), metaData);
|
||||||
setReaderIfNeeded(dataSource);
|
setReaderIfNeeded(dataSource);
|
||||||
readerAwares.add(dataSource);
|
readerAwares.add(dataSource);
|
||||||
fieldDataSources.put(config.fieldContext.field(), dataSource);
|
fieldDataSources.put(config.fieldContext.field(), dataSource);
|
||||||
|
|
|
@ -32,30 +32,119 @@ import org.elasticsearch.search.aggregations.support.FieldDataSource.Bytes.Sorte
|
||||||
import org.elasticsearch.search.aggregations.support.bytes.ScriptBytesValues;
|
import org.elasticsearch.search.aggregations.support.bytes.ScriptBytesValues;
|
||||||
import org.elasticsearch.search.aggregations.support.numeric.ScriptDoubleValues;
|
import org.elasticsearch.search.aggregations.support.numeric.ScriptDoubleValues;
|
||||||
import org.elasticsearch.search.aggregations.support.numeric.ScriptLongValues;
|
import org.elasticsearch.search.aggregations.support.numeric.ScriptLongValues;
|
||||||
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public abstract class FieldDataSource {
|
public abstract class FieldDataSource {
|
||||||
|
|
||||||
/** Whether values are unique or not per document. */
|
public static class MetaData {
|
||||||
public enum Uniqueness {
|
|
||||||
UNIQUE,
|
public static final MetaData UNKNOWN = new MetaData();
|
||||||
NOT_UNIQUE,
|
|
||||||
UNKNOWN;
|
public enum Uniqueness {
|
||||||
|
UNIQUE,
|
||||||
|
NOT_UNIQUE,
|
||||||
|
UNKNOWN;
|
||||||
|
|
||||||
|
public boolean unique() {
|
||||||
|
return this == UNIQUE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private long maxAtomicUniqueValuesCount = -1;
|
||||||
|
private boolean multiValued = true;
|
||||||
|
private Uniqueness uniqueness = Uniqueness.UNKNOWN;
|
||||||
|
|
||||||
|
private MetaData() {}
|
||||||
|
|
||||||
|
private MetaData(MetaData other) {
|
||||||
|
this.maxAtomicUniqueValuesCount = other.maxAtomicUniqueValuesCount;
|
||||||
|
this.multiValued = other.multiValued;
|
||||||
|
this.uniqueness = other.uniqueness;
|
||||||
|
}
|
||||||
|
|
||||||
|
private MetaData(long maxAtomicUniqueValuesCount, boolean multiValued, Uniqueness uniqueness) {
|
||||||
|
this.maxAtomicUniqueValuesCount = maxAtomicUniqueValuesCount;
|
||||||
|
this.multiValued = multiValued;
|
||||||
|
this.uniqueness = uniqueness;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long maxAtomicUniqueValuesCount() {
|
||||||
|
return maxAtomicUniqueValuesCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean multiValued() {
|
||||||
|
return multiValued;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Uniqueness uniqueness() {
|
||||||
|
return uniqueness;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static MetaData load(IndexFieldData indexFieldData, SearchContext context) {
|
||||||
|
MetaData metaData = new MetaData();
|
||||||
|
metaData.uniqueness = Uniqueness.UNIQUE;
|
||||||
|
for (AtomicReaderContext readerContext : context.searcher().getTopReaderContext().leaves()) {
|
||||||
|
AtomicFieldData fieldData = indexFieldData.load(readerContext);
|
||||||
|
metaData.multiValued |= fieldData.isMultiValued();
|
||||||
|
metaData.maxAtomicUniqueValuesCount = Math.max(metaData.maxAtomicUniqueValuesCount, fieldData.getNumberUniqueValues());
|
||||||
|
}
|
||||||
|
return metaData;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Builder builder() {
|
||||||
|
return new Builder();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Builder builder(MetaData other) {
|
||||||
|
return new Builder(other);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Builder {
|
||||||
|
|
||||||
|
private final MetaData metaData;
|
||||||
|
|
||||||
|
private Builder() {
|
||||||
|
metaData = new MetaData();
|
||||||
|
}
|
||||||
|
|
||||||
|
private Builder(MetaData metaData) {
|
||||||
|
this.metaData = new MetaData(metaData);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder maxAtomicUniqueValuesCount(long maxAtomicUniqueValuesCount) {
|
||||||
|
metaData.maxAtomicUniqueValuesCount = maxAtomicUniqueValuesCount;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder multiValued(boolean multiValued) {
|
||||||
|
metaData.multiValued = multiValued;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Builder uniqueness(Uniqueness uniqueness) {
|
||||||
|
metaData.uniqueness = uniqueness;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public MetaData build() {
|
||||||
|
return metaData;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Return whether values are unique. */
|
/**
|
||||||
public Uniqueness getUniqueness() {
|
* Get the current {@link BytesValues}.
|
||||||
return Uniqueness.UNKNOWN;
|
*/
|
||||||
}
|
|
||||||
|
|
||||||
/** Get the current {@link BytesValues}. */
|
|
||||||
public abstract BytesValues bytesValues();
|
public abstract BytesValues bytesValues();
|
||||||
|
|
||||||
/** Ask the underlying data source to provide pre-computed hashes, optional operation. */
|
/**
|
||||||
|
* Ask the underlying data source to provide pre-computed hashes, optional operation.
|
||||||
|
*/
|
||||||
public void setNeedsHashes(boolean needsHashes) {}
|
public void setNeedsHashes(boolean needsHashes) {}
|
||||||
|
|
||||||
|
public abstract MetaData metaData();
|
||||||
|
|
||||||
public static abstract class Bytes extends FieldDataSource {
|
public static abstract class Bytes extends FieldDataSource {
|
||||||
|
|
||||||
public static abstract class WithOrdinals extends Bytes {
|
public static abstract class WithOrdinals extends Bytes {
|
||||||
|
@ -66,17 +155,19 @@ public abstract class FieldDataSource {
|
||||||
|
|
||||||
protected boolean needsHashes;
|
protected boolean needsHashes;
|
||||||
protected final IndexFieldData.WithOrdinals<?> indexFieldData;
|
protected final IndexFieldData.WithOrdinals<?> indexFieldData;
|
||||||
|
protected final MetaData metaData;
|
||||||
protected AtomicFieldData.WithOrdinals<?> atomicFieldData;
|
protected AtomicFieldData.WithOrdinals<?> atomicFieldData;
|
||||||
private BytesValues.WithOrdinals bytesValues;
|
private BytesValues.WithOrdinals bytesValues;
|
||||||
|
|
||||||
public FieldData(IndexFieldData.WithOrdinals<?> indexFieldData) {
|
public FieldData(IndexFieldData.WithOrdinals<?> indexFieldData, MetaData metaData) {
|
||||||
this.indexFieldData = indexFieldData;
|
this.indexFieldData = indexFieldData;
|
||||||
|
this.metaData = metaData;
|
||||||
needsHashes = false;
|
needsHashes = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Uniqueness getUniqueness() {
|
public MetaData metaData() {
|
||||||
return Uniqueness.UNIQUE;
|
return metaData;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final void setNeedsHashes(boolean needsHashes) {
|
public final void setNeedsHashes(boolean needsHashes) {
|
||||||
|
@ -107,17 +198,19 @@ public abstract class FieldDataSource {
|
||||||
|
|
||||||
protected boolean needsHashes;
|
protected boolean needsHashes;
|
||||||
protected final IndexFieldData<?> indexFieldData;
|
protected final IndexFieldData<?> indexFieldData;
|
||||||
|
protected final MetaData metaData;
|
||||||
protected AtomicFieldData<?> atomicFieldData;
|
protected AtomicFieldData<?> atomicFieldData;
|
||||||
private BytesValues bytesValues;
|
private BytesValues bytesValues;
|
||||||
|
|
||||||
public FieldData(IndexFieldData<?> indexFieldData) {
|
public FieldData(IndexFieldData<?> indexFieldData, MetaData metaData) {
|
||||||
this.indexFieldData = indexFieldData;
|
this.indexFieldData = indexFieldData;
|
||||||
|
this.metaData = metaData;
|
||||||
needsHashes = false;
|
needsHashes = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Uniqueness getUniqueness() {
|
public MetaData metaData() {
|
||||||
return Uniqueness.UNIQUE;
|
return metaData;
|
||||||
}
|
}
|
||||||
|
|
||||||
public final void setNeedsHashes(boolean needsHashes) {
|
public final void setNeedsHashes(boolean needsHashes) {
|
||||||
|
@ -149,25 +242,31 @@ public abstract class FieldDataSource {
|
||||||
values = new ScriptBytesValues(script);
|
values = new ScriptBytesValues(script);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public MetaData metaData() {
|
||||||
|
return MetaData.UNKNOWN;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
|
public org.elasticsearch.index.fielddata.BytesValues bytesValues() {
|
||||||
return values;
|
return values;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class SortedAndUnique extends Bytes implements ReaderContextAware {
|
public static class SortedAndUnique extends Bytes implements ReaderContextAware {
|
||||||
|
|
||||||
private final FieldDataSource delegate;
|
private final FieldDataSource delegate;
|
||||||
|
private final MetaData metaData;
|
||||||
private BytesValues bytesValues;
|
private BytesValues bytesValues;
|
||||||
|
|
||||||
public SortedAndUnique(FieldDataSource delegate) {
|
public SortedAndUnique(FieldDataSource delegate) {
|
||||||
this.delegate = delegate;
|
this.delegate = delegate;
|
||||||
|
this.metaData = MetaData.builder(delegate.metaData()).uniqueness(MetaData.Uniqueness.UNIQUE).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Uniqueness getUniqueness() {
|
public MetaData metaData() {
|
||||||
return Uniqueness.UNIQUE;
|
return metaData;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -180,7 +279,7 @@ public abstract class FieldDataSource {
|
||||||
if (bytesValues == null) {
|
if (bytesValues == null) {
|
||||||
bytesValues = delegate.bytesValues();
|
bytesValues = delegate.bytesValues();
|
||||||
if (bytesValues.isMultiValued() &&
|
if (bytesValues.isMultiValued() &&
|
||||||
(delegate.getUniqueness() != Uniqueness.UNIQUE || bytesValues.getOrder() != Order.BYTES)) {
|
(!delegate.metaData().uniqueness.unique() || bytesValues.getOrder() != Order.BYTES)) {
|
||||||
bytesValues = new SortedUniqueBytesValues(bytesValues);
|
bytesValues = new SortedUniqueBytesValues(bytesValues);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -254,13 +353,11 @@ public abstract class FieldDataSource {
|
||||||
|
|
||||||
public static class WithScript extends Numeric {
|
public static class WithScript extends Numeric {
|
||||||
|
|
||||||
private final Numeric delegate;
|
|
||||||
private final LongValues longValues;
|
private final LongValues longValues;
|
||||||
private final DoubleValues doubleValues;
|
private final DoubleValues doubleValues;
|
||||||
private final FieldDataSource.WithScript.BytesValues bytesValues;
|
private final FieldDataSource.WithScript.BytesValues bytesValues;
|
||||||
|
|
||||||
public WithScript(Numeric delegate, SearchScript script) {
|
public WithScript(Numeric delegate, SearchScript script) {
|
||||||
this.delegate = delegate;
|
|
||||||
this.longValues = new LongValues(delegate, script);
|
this.longValues = new LongValues(delegate, script);
|
||||||
this.doubleValues = new DoubleValues(delegate, script);
|
this.doubleValues = new DoubleValues(delegate, script);
|
||||||
this.bytesValues = new FieldDataSource.WithScript.BytesValues(delegate, script);
|
this.bytesValues = new FieldDataSource.WithScript.BytesValues(delegate, script);
|
||||||
|
@ -286,6 +383,11 @@ public abstract class FieldDataSource {
|
||||||
return doubleValues;
|
return doubleValues;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public MetaData metaData() {
|
||||||
|
return MetaData.UNKNOWN;
|
||||||
|
}
|
||||||
|
|
||||||
static class LongValues extends org.elasticsearch.index.fielddata.LongValues {
|
static class LongValues extends org.elasticsearch.index.fielddata.LongValues {
|
||||||
|
|
||||||
private final Numeric source;
|
private final Numeric source;
|
||||||
|
@ -337,19 +439,21 @@ public abstract class FieldDataSource {
|
||||||
|
|
||||||
protected boolean needsHashes;
|
protected boolean needsHashes;
|
||||||
protected final IndexNumericFieldData<?> indexFieldData;
|
protected final IndexNumericFieldData<?> indexFieldData;
|
||||||
|
protected final MetaData metaData;
|
||||||
protected AtomicNumericFieldData atomicFieldData;
|
protected AtomicNumericFieldData atomicFieldData;
|
||||||
private BytesValues bytesValues;
|
private BytesValues bytesValues;
|
||||||
private LongValues longValues;
|
private LongValues longValues;
|
||||||
private DoubleValues doubleValues;
|
private DoubleValues doubleValues;
|
||||||
|
|
||||||
public FieldData(IndexNumericFieldData<?> indexFieldData) {
|
public FieldData(IndexNumericFieldData<?> indexFieldData, MetaData metaData) {
|
||||||
this.indexFieldData = indexFieldData;
|
this.indexFieldData = indexFieldData;
|
||||||
|
this.metaData = metaData;
|
||||||
needsHashes = false;
|
needsHashes = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Uniqueness getUniqueness() {
|
public MetaData metaData() {
|
||||||
return Uniqueness.UNIQUE;
|
return metaData;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -417,6 +521,11 @@ public abstract class FieldDataSource {
|
||||||
bytesValues = new ScriptBytesValues(script);
|
bytesValues = new ScriptBytesValues(script);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public MetaData metaData() {
|
||||||
|
return MetaData.UNKNOWN;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isFloatingPoint() {
|
public boolean isFloatingPoint() {
|
||||||
return scriptValueType != null ? scriptValueType.isFloatingPoint() : true;
|
return scriptValueType != null ? scriptValueType.isFloatingPoint() : true;
|
||||||
|
@ -442,17 +551,19 @@ public abstract class FieldDataSource {
|
||||||
public static class SortedAndUnique extends Numeric implements ReaderContextAware {
|
public static class SortedAndUnique extends Numeric implements ReaderContextAware {
|
||||||
|
|
||||||
private final Numeric delegate;
|
private final Numeric delegate;
|
||||||
|
private final MetaData metaData;
|
||||||
private LongValues longValues;
|
private LongValues longValues;
|
||||||
private DoubleValues doubleValues;
|
private DoubleValues doubleValues;
|
||||||
private BytesValues bytesValues;
|
private BytesValues bytesValues;
|
||||||
|
|
||||||
public SortedAndUnique(Numeric delegate) {
|
public SortedAndUnique(Numeric delegate) {
|
||||||
this.delegate = delegate;
|
this.delegate = delegate;
|
||||||
|
this.metaData = MetaData.builder(delegate.metaData()).uniqueness(MetaData.Uniqueness.UNIQUE).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Uniqueness getUniqueness() {
|
public MetaData metaData() {
|
||||||
return Uniqueness.UNIQUE;
|
return metaData;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -472,7 +583,7 @@ public abstract class FieldDataSource {
|
||||||
if (longValues == null) {
|
if (longValues == null) {
|
||||||
longValues = delegate.longValues();
|
longValues = delegate.longValues();
|
||||||
if (longValues.isMultiValued() &&
|
if (longValues.isMultiValued() &&
|
||||||
(delegate.getUniqueness() != Uniqueness.UNIQUE || longValues.getOrder() != Order.NUMERIC)) {
|
(!delegate.metaData().uniqueness.unique() || longValues.getOrder() != Order.NUMERIC)) {
|
||||||
longValues = new SortedUniqueLongValues(longValues);
|
longValues = new SortedUniqueLongValues(longValues);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -484,7 +595,7 @@ public abstract class FieldDataSource {
|
||||||
if (doubleValues == null) {
|
if (doubleValues == null) {
|
||||||
doubleValues = delegate.doubleValues();
|
doubleValues = delegate.doubleValues();
|
||||||
if (doubleValues.isMultiValued() &&
|
if (doubleValues.isMultiValued() &&
|
||||||
(delegate.getUniqueness() != Uniqueness.UNIQUE || doubleValues.getOrder() != Order.NUMERIC)) {
|
(!delegate.metaData().uniqueness.unique() || doubleValues.getOrder() != Order.NUMERIC)) {
|
||||||
doubleValues = new SortedUniqueDoubleValues(doubleValues);
|
doubleValues = new SortedUniqueDoubleValues(doubleValues);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -496,7 +607,7 @@ public abstract class FieldDataSource {
|
||||||
if (bytesValues == null) {
|
if (bytesValues == null) {
|
||||||
bytesValues = delegate.bytesValues();
|
bytesValues = delegate.bytesValues();
|
||||||
if (bytesValues.isMultiValued() &&
|
if (bytesValues.isMultiValued() &&
|
||||||
(delegate.getUniqueness() != Uniqueness.UNIQUE || bytesValues.getOrder() != Order.BYTES)) {
|
(!delegate.metaData().uniqueness.unique() || bytesValues.getOrder() != Order.BYTES)) {
|
||||||
bytesValues = new SortedUniqueBytesValues(bytesValues);
|
bytesValues = new SortedUniqueBytesValues(bytesValues);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -632,6 +743,11 @@ public abstract class FieldDataSource {
|
||||||
this.bytesValues = new BytesValues(delegate, script);
|
this.bytesValues = new BytesValues(delegate, script);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public MetaData metaData() {
|
||||||
|
return MetaData.UNKNOWN;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesValues bytesValues() {
|
public BytesValues bytesValues() {
|
||||||
return bytesValues;
|
return bytesValues;
|
||||||
|
@ -669,18 +785,20 @@ public abstract class FieldDataSource {
|
||||||
|
|
||||||
protected boolean needsHashes;
|
protected boolean needsHashes;
|
||||||
protected final IndexGeoPointFieldData<?> indexFieldData;
|
protected final IndexGeoPointFieldData<?> indexFieldData;
|
||||||
|
private final MetaData metaData;
|
||||||
protected AtomicGeoPointFieldData<?> atomicFieldData;
|
protected AtomicGeoPointFieldData<?> atomicFieldData;
|
||||||
private BytesValues bytesValues;
|
private BytesValues bytesValues;
|
||||||
private GeoPointValues geoPointValues;
|
private GeoPointValues geoPointValues;
|
||||||
|
|
||||||
public GeoPoint(IndexGeoPointFieldData<?> indexFieldData) {
|
public GeoPoint(IndexGeoPointFieldData<?> indexFieldData, MetaData metaData) {
|
||||||
this.indexFieldData = indexFieldData;
|
this.indexFieldData = indexFieldData;
|
||||||
|
this.metaData = metaData;
|
||||||
needsHashes = false;
|
needsHashes = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Uniqueness getUniqueness() {
|
public MetaData metaData() {
|
||||||
return Uniqueness.UNIQUE;
|
return metaData;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -26,6 +26,8 @@ import org.elasticsearch.index.fielddata.BytesValues;
|
||||||
*/
|
*/
|
||||||
public interface ValuesSource {
|
public interface ValuesSource {
|
||||||
|
|
||||||
|
FieldDataSource.MetaData metaData();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return A {@link org.apache.lucene.util.BytesRef bytesref} view over the values that are resolved from this value source.
|
* @return A {@link org.apache.lucene.util.BytesRef bytesref} view over the values that are resolved from this value source.
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -34,6 +34,11 @@ public class BytesValuesSource implements ValuesSource {
|
||||||
this.source = source;
|
this.source = source;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FieldDataSource.MetaData metaData() {
|
||||||
|
return source.metaData();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesValues bytesValues() {
|
public BytesValues bytesValues() {
|
||||||
return source.bytesValues();
|
return source.bytesValues();
|
||||||
|
|
|
@ -35,6 +35,11 @@ public final class GeoPointValuesSource implements ValuesSource {
|
||||||
this.source = source;
|
this.source = source;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FieldDataSource.MetaData metaData() {
|
||||||
|
return source.metaData();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesValues bytesValues() {
|
public BytesValues bytesValues() {
|
||||||
return source.bytesValues();
|
return source.bytesValues();
|
||||||
|
|
|
@ -41,6 +41,11 @@ public final class NumericValuesSource implements ValuesSource {
|
||||||
this.parser = parser;
|
this.parser = parser;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public FieldDataSource.MetaData metaData() {
|
||||||
|
return source.metaData();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BytesValues bytesValues() {
|
public BytesValues bytesValues() {
|
||||||
return source.bytesValues();
|
return source.bytesValues();
|
||||||
|
|
|
@ -60,13 +60,15 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
private IndexRequestBuilder indexCity(String name, String latLon) throws Exception {
|
private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception {
|
||||||
XContentBuilder source = jsonBuilder().startObject().field("city", name);
|
XContentBuilder source = jsonBuilder().startObject().field("city", name);
|
||||||
if (latLon != null) {
|
source.startArray("location");
|
||||||
source = source.field("location", latLon);
|
for (int i = 0; i < latLons.length; i++) {
|
||||||
|
source.value(latLons[i]);
|
||||||
}
|
}
|
||||||
|
source.endArray();
|
||||||
source = source.endObject();
|
source = source.endObject();
|
||||||
return client().prepareIndex("idx", "type").setSource(source);
|
return client().prepareIndex(idx, "type").setSource(source);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
|
@ -75,27 +77,45 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||||
.addMapping("type", "location", "type=geo_point", "city", "type=string,index=not_analyzed")
|
.addMapping("type", "location", "type=geo_point", "city", "type=string,index=not_analyzed")
|
||||||
.execute().actionGet();
|
.execute().actionGet();
|
||||||
|
|
||||||
|
prepareCreate("idx-multi")
|
||||||
|
.addMapping("type", "location", "type=geo_point", "city", "type=string,index=not_analyzed")
|
||||||
|
.execute().actionGet();
|
||||||
|
|
||||||
createIndex("idx_unmapped");
|
createIndex("idx_unmapped");
|
||||||
|
|
||||||
List<IndexRequestBuilder> cities = new ArrayList<IndexRequestBuilder>();
|
List<IndexRequestBuilder> cities = new ArrayList<IndexRequestBuilder>();
|
||||||
cities.addAll(Arrays.asList(
|
cities.addAll(Arrays.asList(
|
||||||
// below 500km
|
// below 500km
|
||||||
indexCity("utrecht", "52.0945, 5.116"),
|
indexCity("idx", "utrecht", "52.0945, 5.116"),
|
||||||
indexCity("haarlem", "52.3890, 4.637"),
|
indexCity("idx", "haarlem", "52.3890, 4.637"),
|
||||||
// above 500km, below 1000km
|
// above 500km, below 1000km
|
||||||
indexCity("berlin", "52.540, 13.409"),
|
indexCity("idx", "berlin", "52.540, 13.409"),
|
||||||
indexCity("prague", "50.086, 14.439"),
|
indexCity("idx", "prague", "50.097679, 14.441314"),
|
||||||
// above 1000km
|
// above 1000km
|
||||||
indexCity("tel-aviv", "32.0741, 34.777")));
|
indexCity("idx", "tel-aviv", "32.0741, 34.777")));
|
||||||
|
|
||||||
|
// random cities with no location
|
||||||
|
for (String cityName : Arrays.asList("london", "singapour", "tokyo", "milan")) {
|
||||||
|
if (randomBoolean()) {
|
||||||
|
cities.add(indexCity("idx", cityName));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
indexRandom(true, cities);
|
||||||
|
|
||||||
|
cities.clear();
|
||||||
|
cities.addAll(Arrays.asList(
|
||||||
|
indexCity("idx-multi", "city1", "52.3890, 4.637", "50.097679,14.441314"), // first point is within the ~17.5km, the second is ~710km
|
||||||
|
indexCity("idx-multi", "city2", "52.540, 13.409", "52.0945, 5.116"), // first point is ~576km, the second is within the ~35km
|
||||||
|
indexCity("idx-multi", "city3", "32.0741, 34.777"))); // above 1000km
|
||||||
|
|
||||||
// random cities with no location
|
// random cities with no location
|
||||||
for (String cityName : Arrays.asList("london", "singapour", "tokyo", "milan")) {
|
for (String cityName : Arrays.asList("london", "singapour", "tokyo", "milan")) {
|
||||||
if (randomBoolean() || true) {
|
if (randomBoolean() || true) {
|
||||||
cities.add(indexCity(cityName, null));
|
cities.add(indexCity("idx-multi", cityName));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
indexRandom(true, cities);
|
indexRandom(true, cities);
|
||||||
|
|
||||||
ensureSearchable();
|
ensureSearchable();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -370,6 +390,50 @@ public class GeoDistanceTests extends ElasticsearchIntegrationTest {
|
||||||
assertThat(geoDistance.buckets().get(0).getFrom(), equalTo(0.0));
|
assertThat(geoDistance.buckets().get(0).getFrom(), equalTo(0.0));
|
||||||
assertThat(geoDistance.buckets().get(0).getTo(), equalTo(100.0));
|
assertThat(geoDistance.buckets().get(0).getTo(), equalTo(100.0));
|
||||||
assertThat(geoDistance.buckets().get(0).getDocCount(), equalTo(0l));
|
assertThat(geoDistance.buckets().get(0).getDocCount(), equalTo(0l));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void multiValues() throws Exception {
|
||||||
|
SearchResponse response = client().prepareSearch("idx-multi")
|
||||||
|
.addAggregation(geoDistance("amsterdam_rings")
|
||||||
|
.field("location")
|
||||||
|
.unit(DistanceUnit.KILOMETERS)
|
||||||
|
.distanceType(org.elasticsearch.common.geo.GeoDistance.ARC)
|
||||||
|
.point("52.3760, 4.894") // coords of amsterdam
|
||||||
|
.addUnboundedTo(500)
|
||||||
|
.addRange(500, 1000)
|
||||||
|
.addUnboundedFrom(1000))
|
||||||
|
.execute().actionGet();
|
||||||
|
|
||||||
|
assertSearchResponse(response);
|
||||||
|
|
||||||
|
GeoDistance geoDist = response.getAggregations().get("amsterdam_rings");
|
||||||
|
assertThat(geoDist, notNullValue());
|
||||||
|
assertThat(geoDist.getName(), equalTo("amsterdam_rings"));
|
||||||
|
assertThat(geoDist.buckets().size(), equalTo(3));
|
||||||
|
|
||||||
|
GeoDistance.Bucket bucket = geoDist.getByKey("*-500.0");
|
||||||
|
assertThat(bucket, notNullValue());
|
||||||
|
assertThat(bucket.getKey(), equalTo("*-500.0"));
|
||||||
|
assertThat(bucket.getFrom(), equalTo(0.0));
|
||||||
|
assertThat(bucket.getTo(), equalTo(500.0));
|
||||||
|
assertThat(bucket.getDocCount(), equalTo(2l));
|
||||||
|
|
||||||
|
bucket = geoDist.getByKey("500.0-1000.0");
|
||||||
|
assertThat(bucket, notNullValue());
|
||||||
|
assertThat(bucket.getKey(), equalTo("500.0-1000.0"));
|
||||||
|
assertThat(bucket.getFrom(), equalTo(500.0));
|
||||||
|
assertThat(bucket.getTo(), equalTo(1000.0));
|
||||||
|
assertThat(bucket.getDocCount(), equalTo(2l));
|
||||||
|
|
||||||
|
bucket = geoDist.getByKey("1000.0-*");
|
||||||
|
assertThat(bucket, notNullValue());
|
||||||
|
assertThat(bucket.getKey(), equalTo("1000.0-*"));
|
||||||
|
assertThat(bucket.getFrom(), equalTo(1000.0));
|
||||||
|
assertThat(bucket.getTo(), equalTo(Double.POSITIVE_INFINITY));
|
||||||
|
assertThat(bucket.getDocCount(), equalTo(1l));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue