[7.x] Comprehensively test supported/unsupported field type:agg combinations (#54451)
* Comprehensively test supported/unsupported field type:agg combinations (#52493) This adds a test to AggregatorTestCase that allows us to programmatically verify that an aggregator supports or does not support a particular field type. It fetches the list of registered field type parsers, creates a MappedFieldType from the parser and then attempts to run a basic agg against the field. A supplied list of supported VSTypes are then compared against the output (success or exception) and suceeds or fails the test accordingly. Co-Authored-By: Mark Tozzi <mark.tozzi@gmail.com> * Skip fields that are not aggregatable * Use newIndexSearcher() to avoid incompatible readers (#52723) Lucene's `newSearcher()` can generate readers like ParallelCompositeReader which we can't use. We need to instead use our helper `newIndexSearcher`
This commit is contained in:
parent
0d4a001ef2
commit
c9db2de41d
|
@ -62,6 +62,8 @@ import org.elasticsearch.search.DocValueFormat;
|
|||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.search.sort.BucketedSort;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
|
@ -295,6 +297,11 @@ public class ScaledFloatFieldMapper extends FieldMapper {
|
|||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.NUMERIC;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
|
|
|
@ -31,6 +31,8 @@ import org.elasticsearch.index.mapper.MappedFieldType;
|
|||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.StringFieldType;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -95,6 +97,11 @@ public class MetaJoinFieldMapper extends FieldMapper {
|
|||
return new DocValuesIndexFieldData.Builder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
|
|
|
@ -41,6 +41,8 @@ import org.elasticsearch.index.mapper.Mapper;
|
|||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.StringFieldType;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
@ -118,6 +120,11 @@ public final class ParentIdFieldMapper extends FieldMapper {
|
|||
return new DocValuesIndexFieldData.Builder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
|
|
|
@ -43,6 +43,8 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.StringFieldType;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -227,6 +229,11 @@ public final class ParentJoinFieldMapper extends FieldMapper {
|
|||
return new DocValuesIndexFieldData.Builder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
|
|
|
@ -46,6 +46,8 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
|||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
|
@ -139,6 +141,11 @@ public class ICUCollationKeywordFieldMapper extends FieldMapper {
|
|||
return new DocValuesIndexFieldData.Builder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected BytesRef indexedValueForSearch(Object value) {
|
||||
if (value == null) {
|
||||
|
|
|
@ -39,6 +39,8 @@ import org.elasticsearch.index.mapper.ParseContext;
|
|||
import org.elasticsearch.index.mapper.TypeParsers;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -124,6 +126,11 @@ public class Murmur3FieldMapper extends FieldMapper {
|
|||
return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.NUMERIC;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query existsQuery(QueryShardContext context) {
|
||||
return new DocValuesFieldExistsQuery(name());
|
||||
|
|
|
@ -40,6 +40,8 @@ import org.elasticsearch.index.fielddata.plain.BytesBinaryDVIndexFieldData;
|
|||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
|
@ -136,6 +138,11 @@ public class BinaryFieldMapper extends FieldMapper {
|
|||
return new BytesBinaryDVIndexFieldData.Builder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query existsQuery(QueryShardContext context) {
|
||||
if (hasDocValues()) {
|
||||
|
|
|
@ -40,6 +40,8 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
|||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
|
@ -189,6 +191,11 @@ public class BooleanFieldMapper extends FieldMapper {
|
|||
return new DocValuesIndexFieldData.Builder().numericType(NumericType.BOOLEAN);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.NUMERIC;
|
||||
}
|
||||
|
||||
@Override
|
||||
public DocValueFormat docValueFormat(@Nullable String format, ZoneId timeZone) {
|
||||
if (format != null) {
|
||||
|
|
|
@ -54,6 +54,8 @@ import org.elasticsearch.index.query.DateRangeIncludingNowQuery;
|
|||
import org.elasticsearch.index.query.QueryRewriteContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.DateTimeException;
|
||||
|
@ -73,6 +75,7 @@ import static org.elasticsearch.common.time.DateUtils.toLong;
|
|||
public final class DateFieldMapper extends FieldMapper {
|
||||
|
||||
public static final String CONTENT_TYPE = "date";
|
||||
public static final String DATE_NANOS_CONTENT_TYPE = "date_nanos";
|
||||
public static final DateFormatter DEFAULT_DATE_TIME_FORMATTER = DateFormatter.forPattern("strict_date_optional_time||epoch_millis");
|
||||
|
||||
public static class Defaults {
|
||||
|
@ -101,7 +104,7 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
return LongPoint.decodeDimension(value, 0);
|
||||
}
|
||||
},
|
||||
NANOSECONDS("date_nanos", NumericType.DATE_NANOSECONDS) {
|
||||
NANOSECONDS(DATE_NANOS_CONTENT_TYPE, NumericType.DATE_NANOSECONDS) {
|
||||
@Override
|
||||
public long convert(Instant instant) {
|
||||
return toLong(instant);
|
||||
|
@ -540,6 +543,11 @@ public final class DateFieldMapper extends FieldMapper {
|
|||
return new DocValuesIndexFieldData.Builder().numericType(resolution.numericType());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.NUMERIC;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object valueForDisplay(Object value) {
|
||||
Long val = (Long) value;
|
||||
|
|
|
@ -41,6 +41,8 @@ import org.elasticsearch.index.fielddata.plain.AbstractLatLonPointDVIndexFieldDa
|
|||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.index.query.VectorGeoPointShapeQueryProcessor;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -243,6 +245,11 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||
return new AbstractLatLonPointDVIndexFieldData.Builder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.GEOPOINT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query existsQuery(QueryShardContext context) {
|
||||
if (hasDocValues()) {
|
||||
|
@ -254,8 +261,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
|
|||
|
||||
@Override
|
||||
public Query termQuery(Object value, QueryShardContext context) {
|
||||
throw new QueryShardException(context,
|
||||
"Geo fields do not support exact searching, use dedicated geo queries instead: ["
|
||||
throw new QueryShardException(context, "Geo fields do not support exact searching, use dedicated geo queries instead: ["
|
||||
+ name() + "]");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,6 +50,8 @@ import org.elasticsearch.search.DocValueFormat;
|
|||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.search.sort.BucketedSort;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
@ -157,6 +159,12 @@ public class IdFieldMapper extends MetadataFieldMapper {
|
|||
return new TermInSetQuery(name(), bytesRefs);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
// TODO: should this even exist? Is aggregating on the ID field valid?
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
|
||||
if (indexOptions() == IndexOptions.NONE) {
|
||||
|
|
|
@ -27,6 +27,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.ConstantIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -110,6 +112,12 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName) {
|
||||
return new ConstantIndexFieldData.Builder(mapperService -> fullyQualifiedIndexName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
// TODO: Should Index fields be aggregatable? What even is an IndexField?
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
}
|
||||
|
||||
private IndexFieldMapper(Settings indexSettings, MappedFieldType existing) {
|
||||
|
|
|
@ -44,6 +44,8 @@ import org.elasticsearch.index.fielddata.ScriptDocValues;
|
|||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
|
@ -294,6 +296,11 @@ public class IpFieldMapper extends FieldMapper {
|
|||
return new DocValuesIndexFieldData.Builder().scriptFunction(IpScriptDocValues::new);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
|
|
|
@ -43,6 +43,8 @@ import org.elasticsearch.index.analysis.NamedAnalyzer;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -272,6 +274,11 @@ public final class KeywordFieldMapper extends FieldMapper {
|
|||
return new DocValuesIndexFieldData.Builder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
|
|
|
@ -50,6 +50,7 @@ import org.elasticsearch.index.query.QueryShardContext;
|
|||
import org.elasticsearch.index.query.QueryShardException;
|
||||
import org.elasticsearch.index.similarity.SimilarityProvider;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
|
@ -118,6 +119,16 @@ public abstract class MappedFieldType extends FieldType {
|
|||
throw new IllegalArgumentException("Fielddata is not supported on field [" + name() + "] of type [" + typeName() + "]");
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link ValuesSourceType} which supports this field type. This is tightly coupled to field data and aggregations support,
|
||||
* so any implementation that returns a value from {@link MappedFieldType#fielddataBuilder} should also return a value from here.
|
||||
*
|
||||
* @return The appropriate {@link ValuesSourceType} for this field type.
|
||||
*/
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
throw new IllegalArgumentException("Aggregations are not supported on field [" + name() + "] of type [" + typeName() + "]");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (!super.equals(o)) return false;
|
||||
|
|
|
@ -55,6 +55,8 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
|
|||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.ZoneId;
|
||||
|
@ -958,6 +960,11 @@ public class NumberFieldMapper extends FieldMapper {
|
|||
return new DocValuesIndexFieldData.Builder().numericType(type.numericType());
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.NUMERIC;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object valueForDisplay(Object value) {
|
||||
if (value == null) {
|
||||
|
|
|
@ -46,6 +46,8 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
|||
import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
|
@ -93,14 +95,6 @@ public class RangeFieldMapper extends FieldMapper {
|
|||
return (RangeFieldType)fieldType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Builder docValues(boolean docValues) {
|
||||
if (docValues) {
|
||||
throw new IllegalArgumentException("field [" + name + "] does not currently support " + TypeParsers.DOC_VALUES);
|
||||
}
|
||||
return super.docValues(docValues);
|
||||
}
|
||||
|
||||
public Builder coerce(boolean coerce) {
|
||||
this.coerce = coerce;
|
||||
return builder;
|
||||
|
@ -249,6 +243,11 @@ public class RangeFieldMapper extends FieldMapper {
|
|||
return new DocValuesIndexFieldData.Builder().setRangeType(rangeType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.RANGE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String typeName() {
|
||||
return rangeType.name;
|
||||
|
|
|
@ -38,6 +38,8 @@ import org.elasticsearch.index.fielddata.plain.DocValuesIndexFieldData;
|
|||
import org.elasticsearch.index.mapper.ParseContext.Document;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.seqno.SequenceNumbers;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
@ -219,6 +221,10 @@ public class SeqNoFieldMapper extends MetadataFieldMapper {
|
|||
return new DocValuesIndexFieldData.Builder().numericType(NumericType.LONG);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.NUMERIC;
|
||||
}
|
||||
}
|
||||
|
||||
public SeqNoFieldMapper(Settings indexSettings) {
|
||||
|
|
|
@ -69,6 +69,8 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
|||
import org.elasticsearch.index.fielddata.plain.PagedBytesIndexFieldData;
|
||||
import org.elasticsearch.index.query.IntervalBuilder;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -764,6 +766,11 @@ public class TextFieldMapper extends FieldMapper {
|
|||
return new PagedBytesIndexFieldData.Builder(fielddataMinFrequency, fielddataMaxFrequency, fielddataMinSegmentSize);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void checkCompatibility(MappedFieldType other, List<String> conflicts) {
|
||||
super.checkCompatibility(other, conflicts);
|
||||
|
|
|
@ -46,6 +46,8 @@ import org.elasticsearch.index.fielddata.IndexFieldData;
|
|||
import org.elasticsearch.index.fielddata.plain.ConstantIndexFieldData;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.query.support.QueryParsers;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
@ -119,6 +121,11 @@ public class TypeFieldMapper extends MetadataFieldMapper {
|
|||
return new ConstantIndexFieldData.Builder(typeFunction);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSearchable() {
|
||||
return true;
|
||||
|
|
|
@ -23,27 +23,8 @@ import org.elasticsearch.common.network.NetworkAddress;
|
|||
import org.elasticsearch.search.aggregations.BaseAggregationTestCase;
|
||||
import org.elasticsearch.search.aggregations.bucket.range.IpRangeAggregationBuilder;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
|
||||
public class IpRangeTests extends BaseAggregationTestCase<IpRangeAggregationBuilder> {
|
||||
|
||||
private static String randomIp(boolean v4) {
|
||||
try {
|
||||
if (v4) {
|
||||
byte[] ipv4 = new byte[4];
|
||||
random().nextBytes(ipv4);
|
||||
return NetworkAddress.format(InetAddress.getByAddress(ipv4));
|
||||
} else {
|
||||
byte[] ipv6 = new byte[16];
|
||||
random().nextBytes(ipv6);
|
||||
return NetworkAddress.format(InetAddress.getByAddress(ipv6));
|
||||
}
|
||||
} catch (UnknownHostException e) {
|
||||
throw new AssertionError();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected IpRangeAggregationBuilder createTestAggregatorBuilder() {
|
||||
int numRanges = randomIntBetween(1, 10);
|
||||
|
@ -62,16 +43,17 @@ public class IpRangeTests extends BaseAggregationTestCase<IpRangeAggregationBuil
|
|||
} else {
|
||||
prefixLength = randomInt(128);
|
||||
}
|
||||
factory.addMaskRange(key, randomIp(v4) + "/" + prefixLength);
|
||||
factory.addMaskRange(key, NetworkAddress.format(randomIp(v4)) + "/" + prefixLength);
|
||||
break;
|
||||
case 1:
|
||||
factory.addUnboundedFrom(key, randomIp(randomBoolean()));
|
||||
factory.addUnboundedFrom(key, NetworkAddress.format(randomIp(randomBoolean())));
|
||||
break;
|
||||
case 2:
|
||||
factory.addUnboundedTo(key, randomIp(randomBoolean()));
|
||||
factory.addUnboundedTo(key, NetworkAddress.format(randomIp(randomBoolean())));
|
||||
break;
|
||||
case 3:
|
||||
factory.addRange(key, randomIp(randomBoolean()), randomIp(randomBoolean()));
|
||||
v4 = randomBoolean();
|
||||
factory.addRange(key, NetworkAddress.format(randomIp(v4)), NetworkAddress.format(randomIp(v4)));
|
||||
break;
|
||||
default:
|
||||
fail();
|
||||
|
@ -82,7 +64,7 @@ public class IpRangeTests extends BaseAggregationTestCase<IpRangeAggregationBuil
|
|||
factory.keyed(randomBoolean());
|
||||
}
|
||||
if (randomBoolean()) {
|
||||
factory.missing(randomIp(randomBoolean()));
|
||||
factory.missing(NetworkAddress.format(randomIp(randomBoolean())));
|
||||
}
|
||||
return factory;
|
||||
}
|
||||
|
|
|
@ -35,29 +35,11 @@ import org.elasticsearch.search.DocValueFormat;
|
|||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
|
||||
public class IpRangeAggregatorTests extends AggregatorTestCase {
|
||||
|
||||
private static InetAddress randomIp(boolean v4) {
|
||||
try {
|
||||
if (v4) {
|
||||
byte[] ipv4 = new byte[4];
|
||||
random().nextBytes(ipv4);
|
||||
return InetAddress.getByAddress(ipv4);
|
||||
} else {
|
||||
byte[] ipv6 = new byte[16];
|
||||
random().nextBytes(ipv6);
|
||||
return InetAddress.getByAddress(ipv6);
|
||||
}
|
||||
} catch (UnknownHostException e) {
|
||||
throw new AssertionError();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private static boolean isInRange(BytesRef value, BytesRef from, BytesRef to) {
|
||||
if ((to == null || to.compareTo(value) > 0) && (from == null || from.compareTo(value) <= 0)) {
|
||||
return true;
|
||||
|
|
|
@ -81,7 +81,9 @@ import org.elasticsearch.search.aggregations.metrics.TopHitsAggregationBuilder;
|
|||
import org.elasticsearch.search.aggregations.pipeline.BucketScriptPipelineAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree;
|
||||
import org.elasticsearch.search.aggregations.support.AggregationInspectionHelper;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import org.elasticsearch.search.sort.FieldSortBuilder;
|
||||
import org.elasticsearch.search.sort.ScoreSortBuilder;
|
||||
import org.elasticsearch.test.geo.RandomGeoGenerator;
|
||||
|
@ -98,6 +100,7 @@ import java.util.Map;
|
|||
import java.util.function.BiFunction;
|
||||
import java.util.function.Function;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.elasticsearch.index.mapper.SeqNoFieldMapper.PRIMARY_TERM_NAME;
|
||||
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
|
||||
import static org.elasticsearch.search.aggregations.PipelineAggregatorBuilders.bucketScript;
|
||||
|
@ -123,6 +126,17 @@ public class TermsAggregatorTests extends AggregatorTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
|
||||
return new TermsAggregationBuilder("foo", ValueType.STRING).field(fieldName);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
|
||||
return Collections.unmodifiableList(asList(CoreValuesSourceType.NUMERIC,
|
||||
CoreValuesSourceType.BYTES));
|
||||
}
|
||||
|
||||
public void testGlobalOrdinalsExecutionHint() throws Exception {
|
||||
randomizeAggregatorImpl = false;
|
||||
|
||||
|
|
|
@ -18,20 +18,29 @@
|
|||
*/
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import org.apache.lucene.document.BinaryDocValuesField;
|
||||
import org.apache.lucene.document.InetAddressPoint;
|
||||
import org.apache.lucene.document.LatLonDocValuesField;
|
||||
import org.apache.lucene.document.SortedNumericDocValuesField;
|
||||
import org.apache.lucene.document.SortedSetDocValuesField;
|
||||
import org.apache.lucene.index.AssertingDirectoryReader;
|
||||
import org.apache.lucene.index.CompositeReaderContext;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexReaderContext;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.RandomIndexWriter;
|
||||
import org.apache.lucene.search.AssertingIndexSearcher;
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.QueryCache;
|
||||
import org.apache.lucene.search.QueryCachingPolicy;
|
||||
import org.apache.lucene.search.ScoreMode;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.breaker.CircuitBreaker;
|
||||
|
@ -51,24 +60,40 @@ import org.elasticsearch.index.cache.query.DisabledQueryCache;
|
|||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldDataService;
|
||||
import org.elasticsearch.index.mapper.BinaryFieldMapper;
|
||||
import org.elasticsearch.index.mapper.BooleanFieldMapper;
|
||||
import org.elasticsearch.index.mapper.CompletionFieldMapper;
|
||||
import org.elasticsearch.index.mapper.ContentPath;
|
||||
import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.FieldAliasMapper;
|
||||
import org.elasticsearch.index.mapper.FieldMapper;
|
||||
import org.elasticsearch.index.mapper.GeoShapeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.IpFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ObjectMapper;
|
||||
import org.elasticsearch.index.mapper.ObjectMapper.Nested;
|
||||
import org.elasticsearch.index.mapper.RangeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.RangeType;
|
||||
import org.elasticsearch.index.mapper.TextFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.indices.IndicesModule;
|
||||
import org.elasticsearch.indices.breaker.CircuitBreakerService;
|
||||
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
|
||||
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
|
||||
import org.elasticsearch.indices.mapper.MapperRegistry;
|
||||
import org.elasticsearch.mock.orig.Mockito;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import org.elasticsearch.search.fetch.FetchPhase;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchDocValuesPhase;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourcePhase;
|
||||
|
@ -91,6 +116,7 @@ import java.util.function.BiFunction;
|
|||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.singleton;
|
||||
import static org.elasticsearch.test.InternalAggregationTestCase.DEFAULT_MAX_BUCKETS;
|
||||
import static org.mockito.Matchers.anyObject;
|
||||
import static org.mockito.Matchers.anyString;
|
||||
|
@ -108,6 +134,21 @@ public abstract class AggregatorTestCase extends ESTestCase {
|
|||
private List<Releasable> releasables = new ArrayList<>();
|
||||
private static final String TYPE_NAME = "type";
|
||||
|
||||
// A list of field types that should not be tested, or are not currently supported
|
||||
private static List<String> TYPE_TEST_BLACKLIST;
|
||||
|
||||
static {
|
||||
List<String> blacklist = new ArrayList<>();
|
||||
blacklist.add(ObjectMapper.CONTENT_TYPE); // Cannot aggregate objects
|
||||
blacklist.add(GeoShapeFieldMapper.CONTENT_TYPE); // Cannot aggregate geoshapes (yet)
|
||||
blacklist.add(TextFieldMapper.CONTENT_TYPE); // TODO Does not support doc values, but does support FD, needs a lot of mocking
|
||||
blacklist.add(ObjectMapper.NESTED_CONTENT_TYPE); // TODO support for nested
|
||||
blacklist.add(CompletionFieldMapper.CONTENT_TYPE); // TODO support completion
|
||||
blacklist.add(FieldAliasMapper.CONTENT_TYPE); // TODO support alias
|
||||
TYPE_TEST_BLACKLIST = blacklist;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Allows subclasses to provide alternate names for the provided field type, which
|
||||
* can be useful when testing aggregations on field aliases.
|
||||
|
@ -116,9 +157,8 @@ public abstract class AggregatorTestCase extends ESTestCase {
|
|||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
private void registerFieldTypes(SearchContext searchContext,
|
||||
MapperService mapperService,
|
||||
Map<String, MappedFieldType> fieldNameToType) {
|
||||
private static void registerFieldTypes(SearchContext searchContext, MapperService mapperService,
|
||||
Map<String, MappedFieldType> fieldNameToType) {
|
||||
for (Map.Entry<String, MappedFieldType> entry : fieldNameToType.entrySet()) {
|
||||
String fieldName = entry.getKey();
|
||||
MappedFieldType fieldType = entry.getValue();
|
||||
|
@ -521,6 +561,189 @@ public abstract class AggregatorTestCase extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Implementors should return a list of {@link ValuesSourceType} that the aggregator supports.
|
||||
* This is used to test the matrix of supported/unsupported field types against the aggregator
|
||||
* and verify it works (or doesn't) as expected.
|
||||
*
|
||||
* If this method is implemented, {@link AggregatorTestCase#createAggBuilderForTypeTest(MappedFieldType, String)}
|
||||
* should be implemented as well.
|
||||
*
|
||||
* @return list of supported ValuesSourceTypes
|
||||
*/
|
||||
protected List<ValuesSourceType> getSupportedValuesSourceTypes() {
|
||||
// If aggs don't override this method, an empty list allows the test to be skipped.
|
||||
// Once all aggs implement this method we should make it abstract and not allow skipping.
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is invoked each time a field type is tested in {@link AggregatorTestCase#testSupportedFieldTypes()}.
|
||||
* The field type and name are provided, and the implementor is expected to return an AggBuilder accordingly.
|
||||
* The AggBuilder should be returned even if the aggregation does not support the field type, because
|
||||
* the test will check if an exception is thrown in that case.
|
||||
*
|
||||
* The list of supported types are provided by {@link AggregatorTestCase#getSupportedValuesSourceTypes()},
|
||||
* which must also be implemented.
|
||||
*
|
||||
* @param fieldType the type of the field that will be tested
|
||||
* @param fieldName the name of the field that will be test
|
||||
* @return an aggregation builder to test against the field
|
||||
*/
|
||||
protected AggregationBuilder createAggBuilderForTypeTest(MappedFieldType fieldType, String fieldName) {
|
||||
throw new UnsupportedOperationException("If getSupportedValuesSourceTypes() is implemented, " +
|
||||
"createAggBuilderForTypeTest() must be implemented as well.");
|
||||
}
|
||||
|
||||
/**
|
||||
* This test will validate that an aggregator succeeds or fails to run against all the field types
|
||||
* that are registered in {@link IndicesModule} (e.g. all the core field types). An aggregator
|
||||
* is provided by the implementor class, and it is executed against each field type in turn. If
|
||||
* an exception is thrown when the field is supported, that will fail the test. Similarly, if
|
||||
* an exception _is not_ thrown when a field is unsupported, that will also fail the test.
|
||||
*
|
||||
* Exception types/messages are not currently checked, just presence/absence of an exception.
|
||||
*/
|
||||
public void testSupportedFieldTypes() throws IOException {
|
||||
MapperRegistry mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry();
|
||||
Settings settings = Settings.builder().put("index.version.created", Version.CURRENT.id).build();
|
||||
String fieldName = "typeTestFieldName";
|
||||
List<ValuesSourceType> supportedVSTypes = getSupportedValuesSourceTypes();
|
||||
|
||||
if (supportedVSTypes.isEmpty()) {
|
||||
// If the test says it doesn't support any VStypes, it has not been converted yet so skip
|
||||
return;
|
||||
} else if (supportedVSTypes.contains(CoreValuesSourceType.ANY)) {
|
||||
throw new IllegalArgumentException("Tests should not specify CoreValuesSourceType.ANY as a supported ValuesSourceType, " +
|
||||
"but should instead list the concrete ValuesSourceTypes that are supported");
|
||||
}
|
||||
|
||||
for (Map.Entry<String, Mapper.TypeParser> mappedType : mapperRegistry.getMapperParsers().entrySet()) {
|
||||
|
||||
// Some field types should not be tested, or require more work and are not ready yet
|
||||
if (TYPE_TEST_BLACKLIST.contains(mappedType.getKey())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Map<String, Object> source = new HashMap<>();
|
||||
source.put("type", mappedType.getKey());
|
||||
source.put("doc_values", "true");
|
||||
|
||||
Mapper.Builder builder = mappedType.getValue().parse(fieldName, source, new MockParserContext());
|
||||
FieldMapper mapper = (FieldMapper) builder.build(new BuilderContext(settings, new ContentPath()));
|
||||
|
||||
MappedFieldType fieldType = mapper.fieldType();
|
||||
|
||||
// Non-aggregatable fields are not testable (they will throw an error on all aggs anyway), so skip
|
||||
if (fieldType.isAggregatable() == false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
try (Directory directory = newDirectory()) {
|
||||
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
|
||||
writeTestDoc(fieldType, fieldName, indexWriter);
|
||||
indexWriter.close();
|
||||
|
||||
try (IndexReader indexReader = DirectoryReader.open(directory)) {
|
||||
IndexSearcher indexSearcher = newIndexSearcher(indexReader);
|
||||
AggregationBuilder aggregationBuilder = createAggBuilderForTypeTest(fieldType, fieldName);
|
||||
|
||||
// TODO in the future we can make this more explicit with expectThrows(), when the exceptions are standardized
|
||||
try {
|
||||
searchAndReduce(indexSearcher, new MatchAllDocsQuery(), aggregationBuilder, fieldType);
|
||||
if (supportedVSTypes.contains(fieldType.getValuesSourceType()) == false) {
|
||||
fail("Aggregator [" + aggregationBuilder.getType() + "] should not support field type ["
|
||||
+ fieldType.typeName() + "] but executing against the field did not throw an excetion");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
if (supportedVSTypes.contains(fieldType.getValuesSourceType())) {
|
||||
fail("Aggregator [" + aggregationBuilder.getType() + "] supports field type ["
|
||||
+ fieldType.typeName() + "] but executing against the field threw an exception: [" + e.getMessage() + "]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method to write a single document with a single value specific to the requested fieldType.
|
||||
*
|
||||
* Throws an exception if it encounters an unknown field type, to prevent new ones from sneaking in without
|
||||
* being tested.
|
||||
*/
|
||||
private void writeTestDoc(MappedFieldType fieldType, String fieldName, RandomIndexWriter iw) throws IOException {
|
||||
|
||||
if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.NUMERIC)) {
|
||||
// TODO note: once VS refactor adds DATE/BOOLEAN, this conditional will go away
|
||||
if (fieldType.typeName().equals(DateFieldMapper.CONTENT_TYPE)
|
||||
|| fieldType.typeName().equals(DateFieldMapper.DATE_NANOS_CONTENT_TYPE)) {
|
||||
iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomNonNegativeLong())));
|
||||
} else if (fieldType.typeName().equals(BooleanFieldMapper.CONTENT_TYPE)) {
|
||||
iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomBoolean() ? 0 : 1)));
|
||||
} else {
|
||||
iw.addDocument(singleton(new SortedNumericDocValuesField(fieldName, randomLong())));
|
||||
}
|
||||
} else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.BYTES)) {
|
||||
if (fieldType.typeName().equals(BinaryFieldMapper.CONTENT_TYPE)) {
|
||||
iw.addDocument(singleton(new BinaryFieldMapper.CustomBinaryDocValuesField(fieldName, new BytesRef("a").bytes)));
|
||||
} else if (fieldType.typeName().equals(IpFieldMapper.CONTENT_TYPE)) {
|
||||
// TODO note: once VS refactor adds IP, this conditional will go away
|
||||
boolean v4 = randomBoolean();
|
||||
iw.addDocument(singleton(new SortedSetDocValuesField(fieldName, new BytesRef(InetAddressPoint.encode(randomIp(v4))))));
|
||||
} else {
|
||||
iw.addDocument(singleton(new SortedSetDocValuesField(fieldName, new BytesRef("a"))));
|
||||
}
|
||||
} else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.RANGE)) {
|
||||
Object start;
|
||||
Object end;
|
||||
RangeType rangeType;
|
||||
|
||||
if (fieldType.typeName().equals(RangeType.DOUBLE.typeName())) {
|
||||
start = randomDouble();
|
||||
end = RangeType.DOUBLE.nextUp(start);
|
||||
rangeType = RangeType.DOUBLE;
|
||||
} else if (fieldType.typeName().equals(RangeType.FLOAT.typeName())) {
|
||||
start = randomFloat();
|
||||
end = RangeType.FLOAT.nextUp(start);
|
||||
rangeType = RangeType.DOUBLE;
|
||||
} else if (fieldType.typeName().equals(RangeType.IP.typeName())) {
|
||||
boolean v4 = randomBoolean();
|
||||
start = randomIp(v4);
|
||||
end = RangeType.IP.nextUp(start);
|
||||
rangeType = RangeType.IP;
|
||||
} else if (fieldType.typeName().equals(RangeType.LONG.typeName())) {
|
||||
start = randomLong();
|
||||
end = RangeType.LONG.nextUp(start);
|
||||
rangeType = RangeType.LONG;
|
||||
} else if (fieldType.typeName().equals(RangeType.INTEGER.typeName())) {
|
||||
start = randomInt();
|
||||
end = RangeType.INTEGER.nextUp(start);
|
||||
rangeType = RangeType.INTEGER;
|
||||
} else if (fieldType.typeName().equals(RangeType.DATE.typeName())) {
|
||||
start = randomNonNegativeLong();
|
||||
end = RangeType.DATE.nextUp(start);
|
||||
rangeType = RangeType.DATE;
|
||||
} else {
|
||||
throw new IllegalStateException("Unknown type of range [" + fieldType.typeName() + "]");
|
||||
}
|
||||
|
||||
final RangeFieldMapper.Range range = new RangeFieldMapper.Range(rangeType, start, end, true, true);
|
||||
iw.addDocument(singleton(new BinaryDocValuesField(fieldName, rangeType.encodeRanges(Collections.singleton(range)))));
|
||||
|
||||
} else if (fieldType.getValuesSourceType().equals(CoreValuesSourceType.GEOPOINT)) {
|
||||
iw.addDocument(singleton(new LatLonDocValuesField(fieldName, randomDouble(), randomDouble())));
|
||||
} else {
|
||||
throw new IllegalStateException("Unknown field type [" + fieldType.typeName() + "]");
|
||||
}
|
||||
}
|
||||
|
||||
private class MockParserContext extends Mapper.TypeParser.ParserContext {
|
||||
MockParserContext() {
|
||||
super(null, null, null, null, null);
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
private void cleanupReleasables() {
|
||||
Releasables.close(releasables);
|
||||
|
|
|
@ -29,7 +29,6 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
|
|||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||
import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter;
|
||||
|
||||
import org.apache.logging.log4j.Level;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
@ -119,6 +118,8 @@ import org.junit.rules.RuleChain;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.nio.file.Path;
|
||||
import java.time.ZoneId;
|
||||
import java.util.ArrayList;
|
||||
|
@ -1434,4 +1435,20 @@ public abstract class ESTestCase extends LuceneTestCase {
|
|||
assert startAt >= 0 : "Unexpected test worker Id, resulting port range would be negative";
|
||||
return 10300 + (startAt * 100);
|
||||
}
|
||||
|
||||
protected static InetAddress randomIp(boolean v4) {
|
||||
try {
|
||||
if (v4) {
|
||||
byte[] ipv4 = new byte[4];
|
||||
random().nextBytes(ipv4);
|
||||
return InetAddress.getByAddress(ipv4);
|
||||
} else {
|
||||
byte[] ipv6 = new byte[16];
|
||||
random().nextBytes(ipv6);
|
||||
return InetAddress.getByAddress(ipv6);
|
||||
}
|
||||
} catch (UnknownHostException e) {
|
||||
throw new AssertionError();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -53,6 +53,8 @@ import org.elasticsearch.search.DocValueFormat;
|
|||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.search.sort.BucketedSort;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -276,6 +278,12 @@ public class HistogramFieldMapper extends FieldMapper {
|
|||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
// TODO: Histogram ValuesSourceType should move into this plugin.
|
||||
return CoreValuesSourceType.HISTOGRAM;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query existsQuery(QueryShardContext context) {
|
||||
if (hasDocValues()) {
|
||||
|
|
|
@ -55,6 +55,8 @@ import org.elasticsearch.search.DocValueFormat;
|
|||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.search.sort.BucketedSort;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -349,6 +351,11 @@ public final class FlatObjectFieldMapper extends DynamicKeyFieldMapper {
|
|||
failIfNoDocValues();
|
||||
return new KeyedFlatObjectFieldData.Builder(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -27,6 +27,8 @@ import org.elasticsearch.index.mapper.MapperParsingException;
|
|||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSourceType;
|
||||
import org.elasticsearch.xpack.vectors.query.VectorDVIndexFieldData;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -150,6 +152,11 @@ public class DenseVectorFieldMapper extends FieldMapper implements ArrayValueMap
|
|||
return new VectorDVIndexFieldData.Builder(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ValuesSourceType getValuesSourceType() {
|
||||
return CoreValuesSourceType.BYTES;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query termQuery(Object value, QueryShardContext context) {
|
||||
throw new UnsupportedOperationException(
|
||||
|
|
Loading…
Reference in New Issue