Fix date and ip sources in the composite aggregation (#29370)
This commit fixes the formatting of the values in the composite aggregation response. `date` fields should return timestamp as longs when used in a `terms` source and `ip` fields should always be formatted as strings. This commit also fixes the parsing of the `after` key for these field types. Finally, this commit disables the index optimization for the `ip` field and any source that provides a `missing` value.
This commit is contained in:
parent
73019ae887
commit
d755fcfd4b
|
@ -26,7 +26,11 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.StringFieldType;
|
||||
import org.elasticsearch.index.mapper.TextFieldMapper;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -40,8 +44,8 @@ class BinaryValuesSource extends SingleDimensionValuesSource<BytesRef> {
|
|||
private BytesRef currentValue;
|
||||
|
||||
BinaryValuesSource(MappedFieldType fieldType, CheckedFunction<LeafReaderContext, SortedBinaryDocValues, IOException> docValuesFunc,
|
||||
int size, int reverseMul) {
|
||||
super(fieldType, size, reverseMul);
|
||||
DocValueFormat format, Object missing, int size, int reverseMul) {
|
||||
super(format, fieldType, missing, size, reverseMul);
|
||||
this.docValuesFunc = docValuesFunc;
|
||||
this.values = new BytesRef[size];
|
||||
}
|
||||
|
@ -72,10 +76,8 @@ class BinaryValuesSource extends SingleDimensionValuesSource<BytesRef> {
|
|||
|
||||
@Override
|
||||
void setAfter(Comparable<?> value) {
|
||||
if (value.getClass() == BytesRef.class) {
|
||||
afterValue = (BytesRef) value;
|
||||
} else if (value.getClass() == String.class) {
|
||||
afterValue = new BytesRef((String) value);
|
||||
if (value.getClass() == String.class) {
|
||||
afterValue = format.parseBytesRef(value.toString());
|
||||
} else {
|
||||
throw new IllegalArgumentException("invalid value, expected string, got " + value.getClass().getSimpleName());
|
||||
}
|
||||
|
@ -120,7 +122,8 @@ class BinaryValuesSource extends SingleDimensionValuesSource<BytesRef> {
|
|||
@Override
|
||||
SortedDocsProducer createSortedDocsProducerOrNull(IndexReader reader, Query query) {
|
||||
if (checkIfSortedDocsIsApplicable(reader, fieldType) == false ||
|
||||
(query != null && query.getClass() != MatchAllDocsQuery.class)) {
|
||||
fieldType instanceof StringFieldType == false ||
|
||||
(query != null && query.getClass() != MatchAllDocsQuery.class)) {
|
||||
return null;
|
||||
}
|
||||
return new TermsSortedDocsProducer(fieldType.name());
|
||||
|
|
|
@ -263,30 +263,84 @@ final class CompositeAggregator extends BucketsAggregator {
|
|||
final int reverseMul = configs[i].reverseMul();
|
||||
if (configs[i].valuesSource() instanceof ValuesSource.Bytes.WithOrdinals && reader instanceof DirectoryReader) {
|
||||
ValuesSource.Bytes.WithOrdinals vs = (ValuesSource.Bytes.WithOrdinals) configs[i].valuesSource();
|
||||
sources[i] = new GlobalOrdinalValuesSource(bigArrays, configs[i].fieldType(), vs::globalOrdinalsValues, size, reverseMul);
|
||||
sources[i] = new GlobalOrdinalValuesSource(
|
||||
bigArrays,
|
||||
configs[i].fieldType(),
|
||||
vs::globalOrdinalsValues,
|
||||
configs[i].format(),
|
||||
configs[i].missing(),
|
||||
size,
|
||||
reverseMul
|
||||
);
|
||||
|
||||
if (i == 0 && sources[i].createSortedDocsProducerOrNull(reader, query) != null) {
|
||||
// this the leading source and we can optimize it with the sorted docs producer but
|
||||
// we don't want to use global ordinals because the number of visited documents
|
||||
// should be low and global ordinals need one lookup per visited term.
|
||||
Releasables.close(sources[i]);
|
||||
sources[i] = new BinaryValuesSource(configs[i].fieldType(), vs::bytesValues, size, reverseMul);
|
||||
sources[i] = new BinaryValuesSource(
|
||||
configs[i].fieldType(),
|
||||
vs::bytesValues,
|
||||
configs[i].format(),
|
||||
configs[i].missing(),
|
||||
size,
|
||||
reverseMul
|
||||
);
|
||||
}
|
||||
} else if (configs[i].valuesSource() instanceof ValuesSource.Bytes) {
|
||||
ValuesSource.Bytes vs = (ValuesSource.Bytes) configs[i].valuesSource();
|
||||
sources[i] = new BinaryValuesSource(configs[i].fieldType(), vs::bytesValues, size, reverseMul);
|
||||
sources[i] = new BinaryValuesSource(
|
||||
configs[i].fieldType(),
|
||||
vs::bytesValues,
|
||||
configs[i].format(),
|
||||
configs[i].missing(),
|
||||
size,
|
||||
reverseMul
|
||||
);
|
||||
|
||||
} else if (configs[i].valuesSource() instanceof ValuesSource.Numeric) {
|
||||
final ValuesSource.Numeric vs = (ValuesSource.Numeric) configs[i].valuesSource();
|
||||
if (vs.isFloatingPoint()) {
|
||||
sources[i] = new DoubleValuesSource(bigArrays, configs[i].fieldType(), vs::doubleValues, size, reverseMul);
|
||||
sources[i] = new DoubleValuesSource(
|
||||
bigArrays,
|
||||
configs[i].fieldType(),
|
||||
vs::doubleValues,
|
||||
configs[i].format(),
|
||||
configs[i].missing(),
|
||||
size,
|
||||
reverseMul
|
||||
);
|
||||
|
||||
} else {
|
||||
if (vs instanceof RoundingValuesSource) {
|
||||
sources[i] = new LongValuesSource(bigArrays, configs[i].fieldType(), vs::longValues,
|
||||
((RoundingValuesSource) vs)::round, configs[i].format(), size, reverseMul);
|
||||
sources[i] = new LongValuesSource(
|
||||
bigArrays,
|
||||
configs[i].fieldType(),
|
||||
vs::longValues,
|
||||
((RoundingValuesSource) vs)::round,
|
||||
configs[i].format(),
|
||||
configs[i].missing(),
|
||||
size,
|
||||
reverseMul
|
||||
);
|
||||
|
||||
} else {
|
||||
sources[i] = new LongValuesSource(bigArrays, configs[i].fieldType(), vs::longValues,
|
||||
(value) -> value, configs[i].format(), size, reverseMul);
|
||||
sources[i] = new LongValuesSource(
|
||||
bigArrays,
|
||||
configs[i].fieldType(),
|
||||
vs::longValues,
|
||||
(value) -> value,
|
||||
configs[i].format(),
|
||||
configs[i].missing(),
|
||||
size,
|
||||
reverseMul
|
||||
);
|
||||
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new IllegalArgumentException("Unknown value source: " + configs[i].valuesSource().getClass().getName() +
|
||||
" for field: " + sources[i].fieldType.name());
|
||||
}
|
||||
}
|
||||
return sources;
|
||||
|
|
|
@ -291,6 +291,7 @@ public abstract class CompositeValuesSourceBuilder<AB extends CompositeValuesSou
|
|||
public final CompositeValuesSourceConfig build(SearchContext context) throws IOException {
|
||||
ValuesSourceConfig<?> config = ValuesSourceConfig.resolve(context.getQueryShardContext(),
|
||||
valueType, field, script, missing, null, format);
|
||||
|
||||
if (config.unmapped() && field != null && config.missing() == null) {
|
||||
// this source cannot produce any values so we refuse to build
|
||||
// since composite buckets are not created on null values
|
||||
|
|
|
@ -32,13 +32,25 @@ class CompositeValuesSourceConfig {
|
|||
private final ValuesSource vs;
|
||||
private final DocValueFormat format;
|
||||
private final int reverseMul;
|
||||
private final Object missing;
|
||||
|
||||
CompositeValuesSourceConfig(String name, @Nullable MappedFieldType fieldType, ValuesSource vs, DocValueFormat format, SortOrder order) {
|
||||
/**
|
||||
* Creates a new {@link CompositeValuesSourceConfig}.
|
||||
* @param name The name of the source.
|
||||
* @param fieldType The field type or null if the source is a script.
|
||||
* @param vs The underlying {@link ValuesSource}.
|
||||
* @param format The {@link DocValueFormat} of this source.
|
||||
* @param order The sort order associated with this source.
|
||||
* @param missing The missing value or null if documents with missing value should be ignored.
|
||||
*/
|
||||
CompositeValuesSourceConfig(String name, @Nullable MappedFieldType fieldType, ValuesSource vs, DocValueFormat format,
|
||||
SortOrder order, @Nullable Object missing) {
|
||||
this.name = name;
|
||||
this.fieldType = fieldType;
|
||||
this.vs = vs;
|
||||
this.format = format;
|
||||
this.reverseMul = order == SortOrder.ASC ? 1 : -1;
|
||||
this.missing = missing;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -70,6 +82,13 @@ class CompositeValuesSourceConfig {
|
|||
return format;
|
||||
}
|
||||
|
||||
/**
|
||||
* The missing value for this configuration or null if documents with missing value should be ignored.
|
||||
*/
|
||||
Object missing() {
|
||||
return missing;
|
||||
}
|
||||
|
||||
/**
|
||||
* The sort order for the values source (e.g. -1 for descending and 1 for ascending).
|
||||
*/
|
||||
|
|
|
@ -33,7 +33,6 @@ import org.elasticsearch.script.Script;
|
|||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.support.FieldContext;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
|
@ -227,7 +226,7 @@ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuild
|
|||
// is specified in the builder.
|
||||
final DocValueFormat docValueFormat = format() == null ? DocValueFormat.RAW : config.format();
|
||||
final MappedFieldType fieldType = config.fieldContext() != null ? config.fieldContext().fieldType() : null;
|
||||
return new CompositeValuesSourceConfig(name, fieldType, vs, docValueFormat, order());
|
||||
return new CompositeValuesSourceConfig(name, fieldType, vs, docValueFormat, order(), missing());
|
||||
} else {
|
||||
throw new IllegalArgumentException("invalid source, expected numeric, got " + orig.getClass().getSimpleName());
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.util.BigArrays;
|
|||
import org.elasticsearch.common.util.DoubleArray;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -42,8 +43,8 @@ class DoubleValuesSource extends SingleDimensionValuesSource<Double> {
|
|||
|
||||
DoubleValuesSource(BigArrays bigArrays, MappedFieldType fieldType,
|
||||
CheckedFunction<LeafReaderContext, SortedNumericDoubleValues, IOException> docValuesFunc,
|
||||
int size, int reverseMul) {
|
||||
super(fieldType, size, reverseMul);
|
||||
DocValueFormat format, Object missing, int size, int reverseMul) {
|
||||
super(format, fieldType, missing, size, reverseMul);
|
||||
this.docValuesFunc = docValuesFunc;
|
||||
this.values = bigArrays.newDoubleArray(size, false);
|
||||
}
|
||||
|
@ -77,7 +78,9 @@ class DoubleValuesSource extends SingleDimensionValuesSource<Double> {
|
|||
if (value instanceof Number) {
|
||||
afterValue = ((Number) value).doubleValue();
|
||||
} else {
|
||||
afterValue = Double.parseDouble(value.toString());
|
||||
afterValue = format.parseDouble(value.toString(), false, () -> {
|
||||
throw new IllegalArgumentException("now() is not supported in [after] key");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -30,6 +30,8 @@ import org.elasticsearch.common.lease.Releasables;
|
|||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.common.util.LongArray;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.StringFieldType;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -52,8 +54,8 @@ class GlobalOrdinalValuesSource extends SingleDimensionValuesSource<BytesRef> {
|
|||
|
||||
GlobalOrdinalValuesSource(BigArrays bigArrays,
|
||||
MappedFieldType type, CheckedFunction<LeafReaderContext, SortedSetDocValues, IOException> docValuesFunc,
|
||||
int size, int reverseMul) {
|
||||
super(type, size, reverseMul);
|
||||
DocValueFormat format, Object missing, int size, int reverseMul) {
|
||||
super(format, type, missing, size, reverseMul);
|
||||
this.docValuesFunc = docValuesFunc;
|
||||
this.values = bigArrays.newLongArray(size, false);
|
||||
}
|
||||
|
@ -87,10 +89,8 @@ class GlobalOrdinalValuesSource extends SingleDimensionValuesSource<BytesRef> {
|
|||
|
||||
@Override
|
||||
void setAfter(Comparable<?> value) {
|
||||
if (value instanceof BytesRef) {
|
||||
afterValue = (BytesRef) value;
|
||||
} else if (value instanceof String) {
|
||||
afterValue = new BytesRef(value.toString());
|
||||
if (value.getClass() == String.class) {
|
||||
afterValue = format.parseBytesRef(value.toString());
|
||||
} else {
|
||||
throw new IllegalArgumentException("invalid value, expected string, got " + value.getClass().getSimpleName());
|
||||
}
|
||||
|
@ -164,7 +164,8 @@ class GlobalOrdinalValuesSource extends SingleDimensionValuesSource<BytesRef> {
|
|||
@Override
|
||||
SortedDocsProducer createSortedDocsProducerOrNull(IndexReader reader, Query query) {
|
||||
if (checkIfSortedDocsIsApplicable(reader, fieldType) == false ||
|
||||
(query != null && query.getClass() != MatchAllDocsQuery.class)) {
|
||||
fieldType instanceof StringFieldType == false ||
|
||||
(query != null && query.getClass() != MatchAllDocsQuery.class)) {
|
||||
return null;
|
||||
}
|
||||
return new TermsSortedDocsProducer(fieldType.name());
|
||||
|
|
|
@ -115,7 +115,7 @@ public class HistogramValuesSourceBuilder extends CompositeValuesSourceBuilder<H
|
|||
ValuesSource.Numeric numeric = (ValuesSource.Numeric) orig;
|
||||
final HistogramValuesSource vs = new HistogramValuesSource(numeric, interval);
|
||||
final MappedFieldType fieldType = config.fieldContext() != null ? config.fieldContext().fieldType() : null;
|
||||
return new CompositeValuesSourceConfig(name, fieldType, vs, config.format(), order());
|
||||
return new CompositeValuesSourceConfig(name, fieldType, vs, config.format(), order(), missing());
|
||||
} else {
|
||||
throw new IllegalArgumentException("invalid source, expected numeric, got " + orig.getClass().getSimpleName());
|
||||
}
|
||||
|
|
|
@ -47,19 +47,16 @@ import java.util.function.ToLongFunction;
|
|||
class LongValuesSource extends SingleDimensionValuesSource<Long> {
|
||||
private final CheckedFunction<LeafReaderContext, SortedNumericDocValues, IOException> docValuesFunc;
|
||||
private final LongUnaryOperator rounding;
|
||||
// handles "format" for date histogram source
|
||||
private final DocValueFormat format;
|
||||
|
||||
private final LongArray values;
|
||||
private long currentValue;
|
||||
|
||||
LongValuesSource(BigArrays bigArrays, MappedFieldType fieldType,
|
||||
CheckedFunction<LeafReaderContext, SortedNumericDocValues, IOException> docValuesFunc,
|
||||
LongUnaryOperator rounding, DocValueFormat format, int size, int reverseMul) {
|
||||
super(fieldType, size, reverseMul);
|
||||
LongUnaryOperator rounding, DocValueFormat format, Object missing, int size, int reverseMul) {
|
||||
super(format, fieldType, missing, size, reverseMul);
|
||||
this.docValuesFunc = docValuesFunc;
|
||||
this.rounding = rounding;
|
||||
this.format = format;
|
||||
this.values = bigArrays.newLongArray(size, false);
|
||||
}
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.LeafBucketCollector;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
||||
|
@ -35,21 +36,31 @@ import java.io.IOException;
|
|||
* A source that can record and compare values of similar type.
|
||||
*/
|
||||
abstract class SingleDimensionValuesSource<T extends Comparable<T>> implements Releasable {
|
||||
protected final DocValueFormat format;
|
||||
@Nullable
|
||||
protected final MappedFieldType fieldType;
|
||||
@Nullable
|
||||
protected final Object missing;
|
||||
|
||||
protected final int size;
|
||||
protected final int reverseMul;
|
||||
|
||||
protected T afterValue;
|
||||
@Nullable
|
||||
protected MappedFieldType fieldType;
|
||||
|
||||
/**
|
||||
* Ctr
|
||||
* Creates a new {@link SingleDimensionValuesSource}.
|
||||
*
|
||||
* @param fieldType The fieldType associated with the source.
|
||||
* @param format The format of the source.
|
||||
* @param fieldType The field type or null if the source is a script.
|
||||
* @param missing The missing value or null if documents with missing value should be ignored.
|
||||
* @param size The number of values to record.
|
||||
* @param reverseMul -1 if the natural order ({@link SortOrder#ASC} should be reversed.
|
||||
*/
|
||||
SingleDimensionValuesSource(@Nullable MappedFieldType fieldType, int size, int reverseMul) {
|
||||
SingleDimensionValuesSource(DocValueFormat format, @Nullable MappedFieldType fieldType, @Nullable Object missing,
|
||||
int size, int reverseMul) {
|
||||
this.format = format;
|
||||
this.fieldType = fieldType;
|
||||
this.missing = missing;
|
||||
this.size = size;
|
||||
this.reverseMul = reverseMul;
|
||||
this.afterValue = null;
|
||||
|
@ -127,6 +138,7 @@ abstract class SingleDimensionValuesSource<T extends Comparable<T>> implements R
|
|||
*/
|
||||
protected boolean checkIfSortedDocsIsApplicable(IndexReader reader, MappedFieldType fieldType) {
|
||||
if (fieldType == null ||
|
||||
missing != null ||
|
||||
fieldType.indexOptions() == IndexOptions.NONE ||
|
||||
// inverse of the natural order
|
||||
reverseMul == -1) {
|
||||
|
|
|
@ -24,7 +24,9 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
@ -84,6 +86,13 @@ public class TermsValuesSourceBuilder extends CompositeValuesSourceBuilder<Terms
|
|||
vs = ValuesSource.Numeric.EMPTY;
|
||||
}
|
||||
final MappedFieldType fieldType = config.fieldContext() != null ? config.fieldContext().fieldType() : null;
|
||||
return new CompositeValuesSourceConfig(name, fieldType, vs, config.format(), order());
|
||||
final DocValueFormat format;
|
||||
if (format() == null && fieldType instanceof DateFieldMapper.DateFieldType) {
|
||||
// defaults to the raw format on date fields (preserve timestamp as longs).
|
||||
format = DocValueFormat.RAW;
|
||||
} else {
|
||||
format = config.format();
|
||||
}
|
||||
return new CompositeValuesSourceConfig(name, fieldType, vs, format, order(), missing());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.bucket.composite;
|
|||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.DoublePoint;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.InetAddressPoint;
|
||||
import org.apache.lucene.document.IntPoint;
|
||||
import org.apache.lucene.document.LongPoint;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
|
@ -41,6 +42,7 @@ import org.apache.lucene.util.NumericUtils;
|
|||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.IpFieldMapper;
|
||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
|
@ -62,6 +64,7 @@ import org.junit.After;
|
|||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -87,7 +90,7 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
|
|||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
FIELD_TYPES = new MappedFieldType[6];
|
||||
FIELD_TYPES = new MappedFieldType[7];
|
||||
FIELD_TYPES[0] = new KeywordFieldMapper.KeywordFieldType();
|
||||
FIELD_TYPES[0].setName("keyword");
|
||||
FIELD_TYPES[0].setHasDocValues(true);
|
||||
|
@ -113,6 +116,10 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
|
|||
FIELD_TYPES[5] = new KeywordFieldMapper.KeywordFieldType();
|
||||
FIELD_TYPES[5].setName("terms");
|
||||
FIELD_TYPES[5].setHasDocValues(true);
|
||||
|
||||
FIELD_TYPES[6] = new IpFieldMapper.IpFieldType();
|
||||
FIELD_TYPES[6].setName("ip");
|
||||
FIELD_TYPES[6].setHasDocValues(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -812,6 +819,47 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
|
|||
);
|
||||
}
|
||||
|
||||
public void testWithDateTerms() throws IOException {
|
||||
final List<Map<String, List<Object>>> dataset = new ArrayList<>();
|
||||
dataset.addAll(
|
||||
Arrays.asList(
|
||||
createDocument("date", asLong("2017-10-20T03:08:45")),
|
||||
createDocument("date", asLong("2016-09-20T09:00:34")),
|
||||
createDocument("date", asLong("2016-09-20T11:34:00")),
|
||||
createDocument("date", asLong("2017-10-20T06:09:24")),
|
||||
createDocument("date", asLong("2017-10-19T06:09:24")),
|
||||
createDocument("long", 4L)
|
||||
)
|
||||
);
|
||||
testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("date"),
|
||||
LongPoint.newRangeQuery(
|
||||
"date",
|
||||
asLong("2016-09-20T09:00:34"),
|
||||
asLong("2017-10-20T06:09:24")
|
||||
)), dataset,
|
||||
() -> {
|
||||
TermsValuesSourceBuilder histo = new TermsValuesSourceBuilder("date")
|
||||
.field("date");
|
||||
return new CompositeAggregationBuilder("name", Collections.singletonList(histo));
|
||||
},
|
||||
(result) -> {
|
||||
assertEquals(5, result.getBuckets().size());
|
||||
assertEquals("{date=1508479764000}", result.afterKey().toString());
|
||||
assertThat(result.getBuckets().get(0).getKey().get("date"), instanceOf(Long.class));
|
||||
assertEquals("{date=1474362034000}", result.getBuckets().get(0).getKeyAsString());
|
||||
assertEquals(1L, result.getBuckets().get(0).getDocCount());
|
||||
assertEquals("{date=1474371240000}", result.getBuckets().get(1).getKeyAsString());
|
||||
assertEquals(1L, result.getBuckets().get(1).getDocCount());
|
||||
assertEquals("{date=1508393364000}", result.getBuckets().get(2).getKeyAsString());
|
||||
assertEquals(1L, result.getBuckets().get(2).getDocCount());
|
||||
assertEquals("{date=1508468925000}", result.getBuckets().get(3).getKeyAsString());
|
||||
assertEquals(1L, result.getBuckets().get(3).getDocCount());
|
||||
assertEquals("{date=1508479764000}", result.getBuckets().get(4).getKeyAsString());
|
||||
assertEquals(1L, result.getBuckets().get(4).getDocCount());
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
public void testWithDateHistogramAndFormat() throws IOException {
|
||||
final List<Map<String, List<Object>>> dataset = new ArrayList<>();
|
||||
dataset.addAll(
|
||||
|
@ -1437,6 +1485,51 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
|
|||
assertEquals(expected, seen);
|
||||
}
|
||||
|
||||
public void testWithIP() throws Exception {
|
||||
final List<Map<String, List<Object>>> dataset = new ArrayList<>();
|
||||
dataset.addAll(
|
||||
Arrays.asList(
|
||||
createDocument("ip", InetAddress.getByName("127.0.0.1")),
|
||||
createDocument("ip", InetAddress.getByName("192.168.0.1")),
|
||||
createDocument("ip", InetAddress.getByName("::1")),
|
||||
createDocument("ip", InetAddress.getByName("::1")),
|
||||
createDocument("ip", InetAddress.getByName("192.168.0.1"))
|
||||
)
|
||||
);
|
||||
testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("ip")), dataset,
|
||||
() -> {
|
||||
TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("ip")
|
||||
.field("ip");
|
||||
return new CompositeAggregationBuilder("name", Collections.singletonList(terms));
|
||||
}, (result) -> {
|
||||
assertEquals(3, result.getBuckets().size());
|
||||
assertEquals("{ip=192.168.0.1}", result.afterKey().toString());
|
||||
assertEquals("{ip=::1}", result.getBuckets().get(0).getKeyAsString());
|
||||
assertEquals(2L, result.getBuckets().get(0).getDocCount());
|
||||
assertEquals("{ip=127.0.0.1}", result.getBuckets().get(1).getKeyAsString());
|
||||
assertEquals(1L, result.getBuckets().get(1).getDocCount());
|
||||
assertEquals("{ip=192.168.0.1}", result.getBuckets().get(2).getKeyAsString());
|
||||
assertEquals(2L, result.getBuckets().get(2).getDocCount());
|
||||
}
|
||||
);
|
||||
|
||||
testSearchCase(Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery("ip")), dataset,
|
||||
() -> {
|
||||
TermsValuesSourceBuilder terms = new TermsValuesSourceBuilder("ip")
|
||||
.field("ip");
|
||||
return new CompositeAggregationBuilder("name", Collections.singletonList(terms))
|
||||
.aggregateAfter(Collections.singletonMap("ip", "::1"));
|
||||
}, (result) -> {
|
||||
assertEquals(2, result.getBuckets().size());
|
||||
assertEquals("{ip=192.168.0.1}", result.afterKey().toString());
|
||||
assertEquals("{ip=127.0.0.1}", result.getBuckets().get(0).getKeyAsString());
|
||||
assertEquals(1L, result.getBuckets().get(0).getDocCount());
|
||||
assertEquals("{ip=192.168.0.1}", result.getBuckets().get(1).getKeyAsString());
|
||||
assertEquals(2L, result.getBuckets().get(1).getDocCount());
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
private void testSearchCase(List<Query> queries,
|
||||
List<Map<String, List<Object>>> dataset,
|
||||
Supplier<CompositeAggregationBuilder> create,
|
||||
|
@ -1491,6 +1584,9 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
|
|||
} else if (value instanceof String) {
|
||||
doc.add(new SortedSetDocValuesField(name, new BytesRef((String) value)));
|
||||
doc.add(new StringField(name, new BytesRef((String) value), Field.Store.NO));
|
||||
} else if (value instanceof InetAddress) {
|
||||
doc.add(new SortedSetDocValuesField(name, new BytesRef(InetAddressPoint.encode((InetAddress) value))));
|
||||
doc.add(new InetAddressPoint(name, (InetAddress) value));
|
||||
} else {
|
||||
throw new AssertionError("invalid object: " + value.getClass().getSimpleName());
|
||||
}
|
||||
|
|
|
@ -212,20 +212,28 @@ public class CompositeValuesCollectorQueueTests extends AggregatorTestCase {
|
|||
if (types[i].clazz == Long.class) {
|
||||
sources[i] = new LongValuesSource(bigArrays, fieldType,
|
||||
context -> DocValues.getSortedNumeric(context.reader(), fieldType.name()), value -> value,
|
||||
DocValueFormat.RAW, size, 1);
|
||||
DocValueFormat.RAW, null, size, 1);
|
||||
} else if (types[i].clazz == Double.class) {
|
||||
sources[i] = new DoubleValuesSource(bigArrays, fieldType,
|
||||
sources[i] = new DoubleValuesSource(
|
||||
bigArrays, fieldType,
|
||||
context -> FieldData.sortableLongBitsToDoubles(DocValues.getSortedNumeric(context.reader(), fieldType.name())),
|
||||
size, 1);
|
||||
DocValueFormat.RAW, null, size, 1
|
||||
);
|
||||
} else if (types[i].clazz == BytesRef.class) {
|
||||
if (forceMerge) {
|
||||
// we don't create global ordinals but we test this mode when the reader has a single segment
|
||||
// since ordinals are global in this case.
|
||||
sources[i] = new GlobalOrdinalValuesSource(bigArrays, fieldType,
|
||||
context -> DocValues.getSortedSet(context.reader(), fieldType.name()), size, 1);
|
||||
sources[i] = new GlobalOrdinalValuesSource(
|
||||
bigArrays, fieldType,
|
||||
context -> DocValues.getSortedSet(context.reader(), fieldType.name()),
|
||||
DocValueFormat.RAW, null, size, 1
|
||||
);
|
||||
} else {
|
||||
sources[i] = new BinaryValuesSource(fieldType,
|
||||
context -> FieldData.toString(DocValues.getSortedSet(context.reader(), fieldType.name())), size, 1);
|
||||
sources[i] = new BinaryValuesSource(
|
||||
fieldType,
|
||||
context -> FieldData.toString(DocValues.getSortedSet(context.reader(), fieldType.name())),
|
||||
DocValueFormat.RAW, null, size, 1
|
||||
);
|
||||
}
|
||||
} else {
|
||||
assert(false);
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.index.Term;
|
|||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.index.mapper.IpFieldMapper;
|
||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||
|
@ -38,28 +39,100 @@ public class SingleDimensionValuesSourceTests extends ESTestCase {
|
|||
public void testBinarySorted() {
|
||||
MappedFieldType keyword = new KeywordFieldMapper.KeywordFieldType();
|
||||
keyword.setName("keyword");
|
||||
BinaryValuesSource source = new BinaryValuesSource(keyword, context -> null, 1, 1);
|
||||
BinaryValuesSource source = new BinaryValuesSource(
|
||||
keyword,
|
||||
context -> null,
|
||||
DocValueFormat.RAW,
|
||||
null,
|
||||
1,
|
||||
1
|
||||
);
|
||||
assertNull(source.createSortedDocsProducerOrNull(mockIndexReader(100, 49), null));
|
||||
IndexReader reader = mockIndexReader(1, 1);
|
||||
assertNotNull(source.createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery()));
|
||||
assertNotNull(source.createSortedDocsProducerOrNull(reader, null));
|
||||
assertNull(source.createSortedDocsProducerOrNull(reader,
|
||||
new TermQuery(new Term("keyword", "toto)"))));
|
||||
source = new BinaryValuesSource(keyword, context -> null, 0, -1);
|
||||
|
||||
source = new BinaryValuesSource(
|
||||
keyword,
|
||||
context -> null,
|
||||
DocValueFormat.RAW,
|
||||
"missing_value",
|
||||
1,
|
||||
1
|
||||
);
|
||||
assertNull(source.createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery()));
|
||||
assertNull(source.createSortedDocsProducerOrNull(reader, null));
|
||||
|
||||
source = new BinaryValuesSource(
|
||||
keyword,
|
||||
context -> null,
|
||||
DocValueFormat.RAW,
|
||||
null,
|
||||
0,
|
||||
-1
|
||||
);
|
||||
assertNull(source.createSortedDocsProducerOrNull(reader, null));
|
||||
|
||||
MappedFieldType ip = new IpFieldMapper.IpFieldType();
|
||||
ip.setName("ip");
|
||||
source = new BinaryValuesSource(ip, context -> null, DocValueFormat.RAW,null, 1, 1);
|
||||
assertNull(source.createSortedDocsProducerOrNull(reader, null));
|
||||
}
|
||||
|
||||
public void testGlobalOrdinalsSorted() {
|
||||
MappedFieldType keyword = new KeywordFieldMapper.KeywordFieldType();
|
||||
final MappedFieldType keyword = new KeywordFieldMapper.KeywordFieldType();
|
||||
keyword.setName("keyword");
|
||||
BinaryValuesSource source = new BinaryValuesSource(keyword, context -> null, 1, 1);
|
||||
GlobalOrdinalValuesSource source = new GlobalOrdinalValuesSource(
|
||||
BigArrays.NON_RECYCLING_INSTANCE,
|
||||
keyword, context -> null,
|
||||
DocValueFormat.RAW,
|
||||
null,
|
||||
1,
|
||||
1
|
||||
);
|
||||
assertNull(source.createSortedDocsProducerOrNull(mockIndexReader(100, 49), null));
|
||||
IndexReader reader = mockIndexReader(1, 1);
|
||||
assertNotNull(source.createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery()));
|
||||
assertNotNull(source.createSortedDocsProducerOrNull(reader, null));
|
||||
assertNull(source.createSortedDocsProducerOrNull(reader,
|
||||
new TermQuery(new Term("keyword", "toto)"))));
|
||||
source = new BinaryValuesSource(keyword, context -> null, 1, -1);
|
||||
|
||||
source = new GlobalOrdinalValuesSource(
|
||||
BigArrays.NON_RECYCLING_INSTANCE,
|
||||
keyword,
|
||||
context -> null,
|
||||
DocValueFormat.RAW,
|
||||
"missing_value",
|
||||
1,
|
||||
1
|
||||
);
|
||||
assertNull(source.createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery()));
|
||||
assertNull(source.createSortedDocsProducerOrNull(reader, null));
|
||||
|
||||
source = new GlobalOrdinalValuesSource(
|
||||
BigArrays.NON_RECYCLING_INSTANCE,
|
||||
keyword,
|
||||
context -> null,
|
||||
DocValueFormat.RAW,
|
||||
null,
|
||||
1,
|
||||
-1
|
||||
);
|
||||
assertNull(source.createSortedDocsProducerOrNull(reader, null));
|
||||
|
||||
final MappedFieldType ip = new IpFieldMapper.IpFieldType();
|
||||
ip.setName("ip");
|
||||
source = new GlobalOrdinalValuesSource(
|
||||
BigArrays.NON_RECYCLING_INSTANCE,
|
||||
ip,
|
||||
context -> null,
|
||||
DocValueFormat.RAW,
|
||||
null,
|
||||
1,
|
||||
1
|
||||
);
|
||||
assertNull(source.createSortedDocsProducerOrNull(reader, null));
|
||||
}
|
||||
|
||||
|
@ -72,23 +145,62 @@ public class SingleDimensionValuesSourceTests extends ESTestCase {
|
|||
numberType == NumberFieldMapper.NumberType.SHORT ||
|
||||
numberType == NumberFieldMapper.NumberType.INTEGER ||
|
||||
numberType == NumberFieldMapper.NumberType.LONG) {
|
||||
source = new LongValuesSource(BigArrays.NON_RECYCLING_INSTANCE,
|
||||
number, context -> null, value -> value, DocValueFormat.RAW, 1, 1);
|
||||
|
||||
source = new LongValuesSource(
|
||||
BigArrays.NON_RECYCLING_INSTANCE,
|
||||
number,
|
||||
context -> null,
|
||||
value -> value,
|
||||
DocValueFormat.RAW,
|
||||
null,
|
||||
1,
|
||||
1
|
||||
);
|
||||
assertNull(source.createSortedDocsProducerOrNull(mockIndexReader(100, 49), null));
|
||||
IndexReader reader = mockIndexReader(1, 1);
|
||||
assertNotNull(source.createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery()));
|
||||
assertNotNull(source.createSortedDocsProducerOrNull(reader, null));
|
||||
assertNotNull(source.createSortedDocsProducerOrNull(reader, LongPoint.newRangeQuery("number", 0, 1)));
|
||||
assertNull(source.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("keyword", "toto)"))));
|
||||
LongValuesSource sourceRev =
|
||||
new LongValuesSource(BigArrays.NON_RECYCLING_INSTANCE,
|
||||
number, context -> null, value -> value, DocValueFormat.RAW, 1, -1);
|
||||
|
||||
LongValuesSource sourceWithMissing = new LongValuesSource(
|
||||
BigArrays.NON_RECYCLING_INSTANCE,
|
||||
number,
|
||||
context -> null,
|
||||
value -> value,
|
||||
DocValueFormat.RAW,
|
||||
0d,
|
||||
1,
|
||||
1);
|
||||
assertNull(sourceWithMissing.createSortedDocsProducerOrNull(reader, new MatchAllDocsQuery()));
|
||||
assertNull(sourceWithMissing.createSortedDocsProducerOrNull(reader, null));
|
||||
assertNull(sourceWithMissing.createSortedDocsProducerOrNull(reader, new TermQuery(new Term("keyword", "toto)"))));
|
||||
|
||||
LongValuesSource sourceRev = new LongValuesSource(
|
||||
BigArrays.NON_RECYCLING_INSTANCE,
|
||||
number,
|
||||
context -> null,
|
||||
value -> value,
|
||||
DocValueFormat.RAW,
|
||||
null,
|
||||
1,
|
||||
-1
|
||||
);
|
||||
assertNull(sourceRev.createSortedDocsProducerOrNull(reader, null));
|
||||
} else if (numberType == NumberFieldMapper.NumberType.HALF_FLOAT ||
|
||||
numberType == NumberFieldMapper.NumberType.FLOAT ||
|
||||
numberType == NumberFieldMapper.NumberType.DOUBLE) {
|
||||
source = new DoubleValuesSource(BigArrays.NON_RECYCLING_INSTANCE,
|
||||
number, context -> null, 1, 1);
|
||||
numberType == NumberFieldMapper.NumberType.FLOAT ||
|
||||
numberType == NumberFieldMapper.NumberType.DOUBLE) {
|
||||
source = new DoubleValuesSource(
|
||||
BigArrays.NON_RECYCLING_INSTANCE,
|
||||
number,
|
||||
context -> null,
|
||||
DocValueFormat.RAW,
|
||||
null,
|
||||
1,
|
||||
1
|
||||
);
|
||||
IndexReader reader = mockIndexReader(1, 1);
|
||||
assertNull(source.createSortedDocsProducerOrNull(reader, null));
|
||||
} else{
|
||||
throw new AssertionError ("missing type:" + numberType.typeName());
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue