Convert percolator, murmur3 and histogram mappers to parametrized form (#63004)

Relates to #62988
This commit is contained in:
Alan Woodward 2020-09-29 14:37:33 +01:00 committed by Alan Woodward
parent a914d8bc90
commit de08ba58bf
17 changed files with 170 additions and 301 deletions

View File

@ -65,6 +65,7 @@ import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.ParametrizedFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.RangeFieldMapper;
import org.elasticsearch.index.mapper.RangeType;
@ -96,9 +97,10 @@ import java.util.function.Supplier;
import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder;
public class PercolatorFieldMapper extends FieldMapper {
public class PercolatorFieldMapper extends ParametrizedFieldMapper {
static final XContentType QUERY_BUILDER_CONTENT_TYPE = XContentType.SMILE;
static final Setting<Boolean> INDEX_MAP_UNMAPPED_FIELDS_AS_TEXT_SETTING = Setting.boolSetting(
"index.percolator.map_unmapped_fields_as_text", false, Setting.Property.IndexScope);
static final String CONTENT_TYPE = "percolator";
@ -114,18 +116,30 @@ public class PercolatorFieldMapper extends FieldMapper {
static final String RANGE_FIELD_NAME = "range_field";
static final String MINIMUM_SHOULD_MATCH_FIELD_NAME = "minimum_should_match_field";
static class Builder extends FieldMapper.Builder<Builder> {
@Override
public ParametrizedFieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName(), queryShardContext).init(this);
}
static class Builder extends ParametrizedFieldMapper.Builder {
private final Parameter<Map<String, String>> meta = Parameter.metaParam();
private final Supplier<QueryShardContext> queryShardContext;
Builder(String fieldName, Supplier<QueryShardContext> queryShardContext) {
super(fieldName, new FieldType());
super(fieldName);
this.queryShardContext = queryShardContext;
}
@Override
protected List<Parameter<?>> getParameters() {
return Arrays.asList(meta);
}
@Override
public PercolatorFieldMapper build(BuilderContext context) {
PercolatorFieldType fieldType = new PercolatorFieldType(buildFullName(context), meta);
PercolatorFieldType fieldType = new PercolatorFieldType(buildFullName(context), meta.getValue());
context.path().add(name());
KeywordFieldMapper extractedTermsField = createExtractQueryFieldBuilder(EXTRACTED_TERMS_FIELD_NAME, context);
fieldType.queryTermsField = extractedTermsField.fieldType();
@ -142,8 +156,8 @@ public class PercolatorFieldMapper extends FieldMapper {
fieldType.mapUnmappedFieldsAsText = getMapUnmappedFieldAsText(context.indexSettings());
context.path().remove();
return new PercolatorFieldMapper(name(), Builder.this.fieldType, fieldType,
multiFieldsBuilder.build(this, context), copyTo, queryShardContext, extractedTermsField,
return new PercolatorFieldMapper(name(), fieldType,
multiFieldsBuilder.build(this, context), copyTo.build(), queryShardContext, extractedTermsField,
extractionResultField, queryBuilderField, rangeFieldMapper, minimumShouldMatchFieldMapper,
getMapUnmappedFieldAsText(context.indexSettings()));
}
@ -314,12 +328,12 @@ public class PercolatorFieldMapper extends FieldMapper {
private final RangeFieldMapper rangeFieldMapper;
private final boolean mapUnmappedFieldsAsText;
PercolatorFieldMapper(String simpleName, FieldType fieldType, MappedFieldType mappedFieldType,
PercolatorFieldMapper(String simpleName, MappedFieldType mappedFieldType,
MultiFields multiFields, CopyTo copyTo, Supplier<QueryShardContext> queryShardContext,
KeywordFieldMapper queryTermsField, KeywordFieldMapper extractionResultField,
BinaryFieldMapper queryBuilderField, RangeFieldMapper rangeFieldMapper,
NumberFieldMapper minimumShouldMatchFieldMapper, boolean mapUnmappedFieldsAsText) {
super(simpleName, fieldType, mappedFieldType, multiFields, copyTo);
super(simpleName, mappedFieldType, multiFields, copyTo);
this.queryShardContext = queryShardContext;
this.queryTermsField = queryTermsField;
this.extractionResultField = extractionResultField;
@ -476,12 +490,7 @@ public class PercolatorFieldMapper extends FieldMapper {
}
@Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
protected void parseCreateField(ParseContext context) {
throw new UnsupportedOperationException("should not be invoked");
}
@ -548,14 +557,4 @@ public class PercolatorFieldMapper extends FieldMapper {
System.arraycopy(maxEncoded, 0, bytes, BinaryRange.BYTES + offset, maxEncoded.length);
return bytes;
}
@Override
protected boolean indexedByDefault() {
return false;
}
@Override
protected boolean docValuesByDefault() {
return false;
}
}

View File

@ -31,24 +31,23 @@ import org.elasticsearch.index.fielddata.IndexNumericFieldData.NumericType;
import org.elasticsearch.index.fielddata.plain.SortedNumericIndexFieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParametrizedFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.SourceValueFetcher;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.mapper.TypeParsers;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
public class Murmur3FieldMapper extends FieldMapper {
public class Murmur3FieldMapper extends ParametrizedFieldMapper {
public static final String CONTENT_TYPE = "murmur3";
@ -60,39 +59,35 @@ public class Murmur3FieldMapper extends FieldMapper {
}
}
public static class Builder extends FieldMapper.Builder<Builder> {
private static Murmur3FieldMapper toType(FieldMapper in) {
return (Murmur3FieldMapper) in;
}
public static class Builder extends ParametrizedFieldMapper.Builder {
final Parameter<Boolean> stored = Parameter.storeParam(m -> toType(m).fieldType().isStored(), false);
final Parameter<Map<String, String>> meta = Parameter.metaParam();
public Builder(String name) {
super(name, Defaults.FIELD_TYPE);
builder = this;
super(name);
}
@Override
protected List<Parameter<?>> getParameters() {
return Arrays.asList(stored, meta);
}
@Override
public Murmur3FieldMapper build(BuilderContext context) {
return new Murmur3FieldMapper(name, fieldType, new Murmur3FieldType(buildFullName(context), fieldType.stored(), meta),
multiFieldsBuilder.build(this, context), copyTo);
return new Murmur3FieldMapper(
name,
new Murmur3FieldType(buildFullName(context), stored.getValue(), meta.getValue()),
multiFieldsBuilder.build(this, context),
copyTo.build());
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<?> parse(String name, Map<String, Object> node, ParserContext parserContext)
throws MapperParsingException {
Builder builder = new Builder(name);
// tweaking these settings is no longer allowed, the entire purpose of murmur3 fields is to store a hash
if (node.get("doc_values") != null) {
throw new MapperParsingException("Setting [doc_values] cannot be modified for field [" + name + "]");
}
if (node.get("index") != null) {
throw new MapperParsingException("Setting [index] cannot be modified for field [" + name + "]");
}
TypeParsers.parseField(builder, name, node, parserContext);
return builder;
}
}
public static TypeParser PARSER = new TypeParser((n, c) -> new Builder(n));
// this only exists so a check can be done to match the field type to using murmur3 hashing...
public static class Murmur3FieldType extends MappedFieldType {
@ -117,9 +112,16 @@ public class Murmur3FieldMapper extends FieldMapper {
}
}
protected Murmur3FieldMapper(String simpleName, FieldType fieldType, MappedFieldType mappedFieldType,
MultiFields multiFields, CopyTo copyTo) {
super(simpleName, fieldType, mappedFieldType, multiFields, copyTo);
protected Murmur3FieldMapper(String simpleName,
MappedFieldType mappedFieldType,
MultiFields multiFields,
CopyTo copyTo) {
super(simpleName, mappedFieldType, multiFields, copyTo);
}
@Override
public ParametrizedFieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName()).init(this);
}
@Override
@ -140,7 +142,7 @@ public class Murmur3FieldMapper extends FieldMapper {
final BytesRef bytes = new BytesRef(value.toString());
final long hash = MurmurHash3.hash128(bytes.bytes, bytes.offset, bytes.length, 0, new MurmurHash3.Hash128()).h1;
context.doc().add(new SortedNumericDocValuesField(fieldType().name(), hash));
if (fieldType.stored()) {
if (fieldType().isStored()) {
context.doc().add(new StoredField(name(), hash));
}
}
@ -159,13 +161,4 @@ public class Murmur3FieldMapper extends FieldMapper {
};
}
@Override
protected boolean indexedByDefault() {
return false;
}
@Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
}
}

View File

@ -19,18 +19,18 @@
package org.elasticsearch.plugin.mapper;
import java.util.Collections;
import java.util.Map;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.murmur3.Murmur3FieldMapper;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import java.util.Collections;
import java.util.Map;
public class MapperMurmur3Plugin extends Plugin implements MapperPlugin {
@Override
public Map<String, Mapper.TypeParser> getMappers() {
return Collections.singletonMap(Murmur3FieldMapper.CONTENT_TYPE, new Murmur3FieldMapper.TypeParser());
return Collections.singletonMap(Murmur3FieldMapper.CONTENT_TYPE, Murmur3FieldMapper.PARSER);
}
}

View File

@ -24,8 +24,7 @@ import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapperTestCase2;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperTestCase;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.plugin.mapper.MapperMurmur3Plugin;
import org.elasticsearch.plugins.Plugin;
@ -33,22 +32,14 @@ import org.elasticsearch.plugins.Plugin;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Set;
import static org.hamcrest.Matchers.containsString;
public class Murmur3FieldMapperTests extends FieldMapperTestCase2<Murmur3FieldMapper.Builder> {
public class Murmur3FieldMapperTests extends MapperTestCase {
@Override
protected void writeFieldValue(XContentBuilder builder) throws IOException {
builder.value("value");
}
@Override
protected Set<String> unsupportedProperties() {
return org.elasticsearch.common.collect.Set.of("analyzer", "similarity", "doc_values", "index");
}
@Override
protected Collection<? extends Plugin> getPlugins() {
return org.elasticsearch.common.collect.List.of(new MapperMurmur3Plugin());
@ -60,8 +51,8 @@ public class Murmur3FieldMapperTests extends FieldMapperTestCase2<Murmur3FieldMa
}
@Override
protected void registerParameters(ParameterChecker checker) {
// no parameters to configure
protected void registerParameters(ParameterChecker checker) throws IOException {
checker.registerConflictCheck("store", b -> b.field("store", true));
}
public void testDefaults() throws Exception {
@ -75,38 +66,4 @@ public class Murmur3FieldMapperTests extends FieldMapperTestCase2<Murmur3FieldMa
assertEquals(DocValuesType.SORTED_NUMERIC, field.fieldType().docValuesType());
}
public void testDocValuesSettingNotAllowed() throws Exception {
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(fieldMapping(b -> b.field("type", "murmur3").field("doc_values", false)))
);
assertThat(e.getMessage(), containsString("Setting [doc_values] cannot be modified"));
// even setting to the default is not allowed, the setting is invalid
e = expectThrows(
MapperParsingException.class,
() -> createMapperService(fieldMapping(b -> b.field("type", "murmur3").field("doc_values", true)))
);
assertThat(e.getMessage(), containsString("Setting [doc_values] cannot be modified"));
}
public void testIndexSettingNotAllowed() throws Exception {
Exception e = expectThrows(
MapperParsingException.class,
() -> createMapperService(fieldMapping(b -> b.field("type", "murmur3").field("index", "not_analyzed")))
);
assertThat(e.getMessage(), containsString("Setting [index] cannot be modified"));
// even setting to the default is not allowed, the setting is invalid
e = expectThrows(
MapperParsingException.class,
() -> createMapperService(fieldMapping(b -> b.field("type", "murmur3").field("index", "no")))
);
assertThat(e.getMessage(), containsString("Setting [index] cannot be modified"));
}
@Override
protected Murmur3FieldMapper.Builder newBuilder() {
return new Murmur3FieldMapper.Builder("murmur");
}
}

View File

@ -155,7 +155,7 @@ public class AnalyticsPlugin extends Plugin implements SearchPlugin, ActionPlugi
@Override
public Map<String, Mapper.TypeParser> getMappers() {
return Collections.singletonMap(HistogramFieldMapper.CONTENT_TYPE, new HistogramFieldMapper.TypeParser());
return Collections.singletonMap(HistogramFieldMapper.CONTENT_TYPE, HistogramFieldMapper.PARSER);
}
@Override

View File

@ -10,10 +10,8 @@ import com.carrotsearch.hppc.DoubleArrayList;
import com.carrotsearch.hppc.IntArrayList;
import org.apache.lucene.document.BinaryDocValuesField;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.SortField;
@ -23,32 +21,27 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentSubParser;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.fielddata.HistogramValue;
import org.elasticsearch.index.fielddata.HistogramValues;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.fielddata.IndexHistogramFieldData;
import org.elasticsearch.index.fielddata.LeafHistogramFieldData;
import org.elasticsearch.index.fielddata.ScriptDocValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParametrizedFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.SourceValueFetcher;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.mapper.TypeParsers;
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.lookup.SearchLookup;
@ -57,7 +50,7 @@ import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.xpack.analytics.aggregations.support.AnalyticsValuesSourceType;
import java.io.IOException;
import java.util.Iterator;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
@ -67,106 +60,68 @@ import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpect
/**
* Field Mapper for pre-aggregated histograms.
*/
public class HistogramFieldMapper extends FieldMapper {
public class HistogramFieldMapper extends ParametrizedFieldMapper {
public static final String CONTENT_TYPE = "histogram";
public static class Names {
public static final String IGNORE_MALFORMED = "ignore_malformed";
}
public static class Defaults {
public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit<>(false, false);
public static final FieldType FIELD_TYPE = new FieldType();
static {
FIELD_TYPE.setTokenized(false);
FIELD_TYPE.setIndexOptions(IndexOptions.NONE);
FIELD_TYPE.freeze();
}
}
public static final ParseField COUNTS_FIELD = new ParseField("counts");
public static final ParseField VALUES_FIELD = new ParseField("values");
public static class Builder extends FieldMapper.Builder<Builder> {
protected Boolean ignoreMalformed;
private static HistogramFieldMapper toType(FieldMapper in) {
return (HistogramFieldMapper) in;
}
public Builder(String name) {
super(name, Defaults.FIELD_TYPE);
builder = this;
public static class Builder extends ParametrizedFieldMapper.Builder {
private final Parameter<Map<String, String>> meta = Parameter.metaParam();
private final Parameter<Explicit<Boolean>> ignoreMalformed;
public Builder(String name, boolean ignoreMalformedByDefault) {
super(name);
this.ignoreMalformed
= Parameter.explicitBoolParam("ignore_malformed", true, m -> toType(m).ignoreMalformed, ignoreMalformedByDefault);
}
public Builder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return builder;
}
protected Explicit<Boolean> ignoreMalformed(BuilderContext context) {
if (ignoreMalformed != null) {
return new Explicit<>(ignoreMalformed, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(IGNORE_MALFORMED_SETTING.get(context.indexSettings()), false);
}
return HistogramFieldMapper.Defaults.IGNORE_MALFORMED;
@Override
protected List<Parameter<?>> getParameters() {
return Arrays.asList(ignoreMalformed, meta);
}
@Override
public HistogramFieldMapper build(BuilderContext context) {
return new HistogramFieldMapper(name, fieldType, new HistogramFieldType(buildFullName(context), hasDocValues, meta),
multiFieldsBuilder.build(this, context), ignoreMalformed(context), copyTo);
return new HistogramFieldMapper(name, new HistogramFieldType(buildFullName(context), meta.getValue()),
multiFieldsBuilder.build(this, context), copyTo.build(), this);
}
}
public static class TypeParser implements Mapper.TypeParser {
@Override
public Mapper.Builder<Builder> parse(String name, Map<String, Object> node, ParserContext parserContext)
throws MapperParsingException {
Builder builder = new HistogramFieldMapper.Builder(name);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next();
String propName = entry.getKey();
Object propNode = entry.getValue();
if (propName.equals(Names.IGNORE_MALFORMED)) {
builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(propNode, name + "." + Names.IGNORE_MALFORMED));
iterator.remove();
}
if (propName.equals("meta")) {
builder.meta(TypeParsers.parseMeta(propName, propNode));
iterator.remove();
}
}
return builder;
}
}
public static final TypeParser PARSER
= new TypeParser((n, c) -> new Builder(n, IGNORE_MALFORMED_SETTING.get(c.getSettings())));
protected Explicit<Boolean> ignoreMalformed;
private final Explicit<Boolean> ignoreMalformed;
private final boolean ignoreMalformedByDefault;
public HistogramFieldMapper(String simpleName, FieldType fieldType, MappedFieldType mappedFieldType,
MultiFields multiFields, Explicit<Boolean> ignoreMalformed, CopyTo copyTo) {
super(simpleName, fieldType, mappedFieldType, multiFields, copyTo);
this.ignoreMalformed = ignoreMalformed;
public HistogramFieldMapper(String simpleName, MappedFieldType mappedFieldType,
MultiFields multiFields, CopyTo copyTo, Builder builder) {
super(simpleName, mappedFieldType, multiFields, copyTo);
this.ignoreMalformed = builder.ignoreMalformed.getValue();
this.ignoreMalformedByDefault = builder.ignoreMalformed.getDefaultValue().value();
}
boolean ignoreMalformed() {
return ignoreMalformed.value();
}
@Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) {
HistogramFieldMapper gpfmMergeWith = (HistogramFieldMapper) other;
if (gpfmMergeWith.ignoreMalformed.explicit()) {
this.ignoreMalformed = gpfmMergeWith.ignoreMalformed;
}
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
public ParametrizedFieldMapper.Builder getMergeBuilder() {
return new Builder(simpleName(), ignoreMalformedByDefault).init(this);
}
@Override
protected void parseCreateField(ParseContext context) {
throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called");
}
@ -185,8 +140,8 @@ public class HistogramFieldMapper extends FieldMapper {
public static class HistogramFieldType extends MappedFieldType {
public HistogramFieldType(String name, boolean hasDocValues, Map<String, String> meta) {
super(name, false, false, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
public HistogramFieldType(String name, Map<String, String> meta) {
super(name, false, false, true, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
}
@Override
@ -197,89 +152,78 @@ public class HistogramFieldMapper extends FieldMapper {
@Override
public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, Supplier<SearchLookup> searchLookup) {
failIfNoDocValues();
return new IndexFieldData.Builder() {
return (cache, breakerService, mapperService) -> new IndexHistogramFieldData(name(), AnalyticsValuesSourceType.HISTOGRAM) {
@Override
public IndexFieldData<?> build(
IndexFieldDataCache cache,
CircuitBreakerService breakerService,
MapperService mapperService
) {
return new IndexHistogramFieldData(name(), AnalyticsValuesSourceType.HISTOGRAM) {
public LeafHistogramFieldData load(LeafReaderContext context) {
return new LeafHistogramFieldData() {
@Override
public LeafHistogramFieldData load(LeafReaderContext context) {
return new LeafHistogramFieldData() {
@Override
public HistogramValues getHistogramValues() throws IOException {
try {
final BinaryDocValues values = DocValues.getBinary(context.reader(), fieldName);
final InternalHistogramValue value = new InternalHistogramValue();
return new HistogramValues() {
public HistogramValues getHistogramValues() throws IOException {
try {
final BinaryDocValues values = DocValues.getBinary(context.reader(), fieldName);
final InternalHistogramValue value = new InternalHistogramValue();
return new HistogramValues() {
@Override
public boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
@Override
public HistogramValue histogram() throws IOException {
try {
value.reset(values.binaryValue());
return value;
} catch (IOException e) {
throw new IOException("Cannot load doc value", e);
}
}
};
} catch (IOException e) {
throw new IOException("Cannot load doc values", e);
@Override
public boolean advanceExact(int doc) throws IOException {
return values.advanceExact(doc);
}
}
@Override
public ScriptDocValues<?> getScriptValues() {
throw new UnsupportedOperationException("The [" + CONTENT_TYPE + "] field does not " +
"support scripts");
}
@Override
public SortedBinaryDocValues getBytesValues() {
throw new UnsupportedOperationException("String representation of doc values " +
"for [" + CONTENT_TYPE + "] fields is not supported");
}
@Override
public long ramBytesUsed() {
return 0; // Unknown
}
@Override
public void close() {
}
};
@Override
public HistogramValue histogram() throws IOException {
try {
value.reset(values.binaryValue());
return value;
} catch (IOException e) {
throw new IOException("Cannot load doc value", e);
}
}
};
} catch (IOException e) {
throw new IOException("Cannot load doc values", e);
}
}
@Override
public LeafHistogramFieldData loadDirect(LeafReaderContext context) throws Exception {
return load(context);
public ScriptDocValues<?> getScriptValues() {
throw new UnsupportedOperationException("The [" + CONTENT_TYPE + "] field does not " +
"support scripts");
}
@Override
public SortField sortField(Object missingValue, MultiValueMode sortMode,
XFieldComparatorSource.Nested nested, boolean reverse) {
throw new UnsupportedOperationException("can't sort on the [" + CONTENT_TYPE + "] field");
public SortedBinaryDocValues getBytesValues() {
throw new UnsupportedOperationException("String representation of doc values " +
"for [" + CONTENT_TYPE + "] fields is not supported");
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode,
Nested nested, SortOrder sortOrder, DocValueFormat format, int bucketSize, BucketedSort.ExtraData extra) {
throw new IllegalArgumentException("can't sort on the [" + CONTENT_TYPE + "] field");
public long ramBytesUsed() {
return 0; // Unknown
}
@Override
public void close() {
}
};
}
@Override
public LeafHistogramFieldData loadDirect(LeafReaderContext context) {
return load(context);
}
@Override
public SortField sortField(Object missingValue, MultiValueMode sortMode,
Nested nested, boolean reverse) {
throw new UnsupportedOperationException("can't sort on the [" + CONTENT_TYPE + "] field");
}
@Override
public BucketedSort newBucketedSort(BigArrays bigArrays, Object missingValue, MultiValueMode sortMode,
Nested nested, SortOrder sortOrder, DocValueFormat format, int bucketSize, BucketedSort.ExtraData extra) {
throw new IllegalArgumentException("can't sort on the [" + CONTENT_TYPE + "] field");
}
};
}
@ -297,7 +241,7 @@ public class HistogramFieldMapper extends FieldMapper {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] can't be used in multi-fields");
}
context.path().add(simpleName());
XContentParser.Token token = null;
XContentParser.Token token;
XContentSubParser subParser = null;
try {
token = context.parser().currentToken();
@ -402,19 +346,6 @@ public class HistogramFieldMapper extends FieldMapper {
context.path().remove();
}
@Override
protected void doXContentBody(XContentBuilder builder, boolean includeDefaults, Params params) throws IOException {
super.doXContentBody(builder, includeDefaults, params);
if (includeDefaults || ignoreMalformed.explicit()) {
builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value());
}
}
@Override
protected boolean indexedByDefault() {
return false;
}
/** re-usable {@link HistogramValue} implementation */
private static class InternalHistogramValue extends HistogramValue {
double value;

View File

@ -226,7 +226,7 @@ public class HistoBackedHistogramAggregatorTests extends AggregatorTestCase {
}
private MappedFieldType defaultFieldType(String fieldName) {
return new HistogramFieldMapper.HistogramFieldType(fieldName, true, Collections.emptyMap());
return new HistogramFieldMapper.HistogramFieldType(fieldName, Collections.emptyMap());
}
}

View File

@ -91,7 +91,7 @@ public class HDRPreAggregatedPercentileRanksAggregatorTests extends AggregatorTe
PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[]{0.1, 0.5, 12})
.field("field")
.method(PercentilesMethod.HDR);
MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("field", true, Collections.emptyMap());
MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("field", Collections.emptyMap());
try (IndexReader reader = w.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);

View File

@ -150,7 +150,7 @@ public class HDRPreAggregatedPercentilesAggregatorTests extends AggregatorTestCa
PercentilesAggregationBuilder builder =
new PercentilesAggregationBuilder("test").field("number").method(PercentilesMethod.HDR);
MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("number", true, Collections.emptyMap());
MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("number", Collections.emptyMap());
Aggregator aggregator = createAggregator(builder, indexSearcher, fieldType);
aggregator.preCollection();
indexSearcher.search(query, aggregator);

View File

@ -150,6 +150,6 @@ public class HistoBackedAvgAggregatorTests extends AggregatorTestCase {
}
private MappedFieldType defaultFieldType() {
return new HistogramFieldMapper.HistogramFieldType(HistoBackedAvgAggregatorTests.FIELD_NAME, true, Collections.emptyMap());
return new HistogramFieldMapper.HistogramFieldType(HistoBackedAvgAggregatorTests.FIELD_NAME, Collections.emptyMap());
}
}

View File

@ -150,6 +150,6 @@ public class HistoBackedMaxAggregatorTests extends AggregatorTestCase {
}
private MappedFieldType defaultFieldType() {
return new HistogramFieldMapper.HistogramFieldType(HistoBackedMaxAggregatorTests.FIELD_NAME, true, Collections.emptyMap());
return new HistogramFieldMapper.HistogramFieldType(HistoBackedMaxAggregatorTests.FIELD_NAME, Collections.emptyMap());
}
}

View File

@ -150,6 +150,6 @@ public class HistoBackedMinAggregatorTests extends AggregatorTestCase {
}
private MappedFieldType defaultFieldType() {
return new HistogramFieldMapper.HistogramFieldType(HistoBackedMinAggregatorTests.FIELD_NAME, true, Collections.emptyMap());
return new HistogramFieldMapper.HistogramFieldType(HistoBackedMinAggregatorTests.FIELD_NAME, Collections.emptyMap());
}
}

View File

@ -150,6 +150,6 @@ public class HistoBackedSumAggregatorTests extends AggregatorTestCase {
}
private MappedFieldType defaultFieldType() {
return new HistogramFieldMapper.HistogramFieldType(HistoBackedSumAggregatorTests.FIELD_NAME, true, Collections.emptyMap());
return new HistogramFieldMapper.HistogramFieldType(HistoBackedSumAggregatorTests.FIELD_NAME, Collections.emptyMap());
}
}

View File

@ -158,6 +158,6 @@ public class HistoBackedValueCountAggregatorTests extends AggregatorTestCase {
}
private MappedFieldType defaultFieldType() {
return new HistogramFieldMapper.HistogramFieldType("field", true, Collections.emptyMap());
return new HistogramFieldMapper.HistogramFieldType("field", Collections.emptyMap());
}
}

View File

@ -93,7 +93,7 @@ public class TDigestPreAggregatedPercentileRanksAggregatorTests extends Aggregat
PercentileRanksAggregationBuilder aggBuilder = new PercentileRanksAggregationBuilder("my_agg", new double[] {0.1, 0.5, 12})
.field("field")
.method(PercentilesMethod.TDIGEST);
MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("field", true, Collections.emptyMap());
MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("field", Collections.emptyMap());
try (IndexReader reader = w.getReader()) {
IndexSearcher searcher = new IndexSearcher(reader);
PercentileRanks ranks = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType);

View File

@ -147,7 +147,7 @@ public class TDigestPreAggregatedPercentilesAggregatorTests extends AggregatorTe
PercentilesAggregationBuilder builder =
new PercentilesAggregationBuilder("test").field("number").method(PercentilesMethod.TDIGEST);
MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("number", true, Collections.emptyMap());
MappedFieldType fieldType = new HistogramFieldMapper.HistogramFieldType("number", Collections.emptyMap());
Aggregator aggregator = createAggregator(builder, indexSearcher, fieldType);
aggregator.preCollection();
indexSearcher.search(query, aggregator);

View File

@ -8,8 +8,8 @@ package org.elasticsearch.xpack.analytics.mapper;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.FieldMapperTestCase2;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperTestCase;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.plugins.Plugin;
@ -17,35 +17,24 @@ import org.elasticsearch.xpack.analytics.AnalyticsPlugin;
import java.io.IOException;
import java.util.Collection;
import java.util.Set;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class HistogramFieldMapperTests extends FieldMapperTestCase2<HistogramFieldMapper.Builder> {
public class HistogramFieldMapperTests extends MapperTestCase {
@Override
protected void writeFieldValue(XContentBuilder builder) throws IOException {
builder.startObject().field("values", new double[] { 2, 3 }).field("counts", new int[] { 0, 4 }).endObject();
}
@Override
protected Set<String> unsupportedProperties() {
return org.elasticsearch.common.collect.Set.of("analyzer", "similarity", "doc_values", "store", "index");
}
@Override
protected Collection<? extends Plugin> getPlugins() {
return org.elasticsearch.common.collect.List.of(new AnalyticsPlugin(Settings.EMPTY));
}
@Override
protected HistogramFieldMapper.Builder newBuilder() {
return new HistogramFieldMapper.Builder("histo");
}
@Override
protected void minimalMapping(XContentBuilder b) throws IOException {
b.field("type", "histogram");