[7.x] Allow metadata fields in the _source (#62616)

Backports #61590 to 7.x

    So far we don't allow metadata fields in the document _source. However, in the case of the _doc_count field mapper (#58339) we want to be able to set

    This PR adds a method to the metadata field parsers that exposes if the field can be included in the document source or not.
    This way each metadata field can configure if it can be included in the document _source
This commit is contained in:
Christos Soulios 2020-09-18 19:56:41 +03:00 committed by GitHub
parent 1dd8a5971f
commit 6a298970fd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 301 additions and 297 deletions

View File

@ -65,17 +65,6 @@ public class RankFeatureMetaFieldMapper extends MetadataFieldMapper {
super(RankFeatureMetaFieldType.INSTANCE); super(RankFeatureMetaFieldType.INSTANCE);
} }
@Override
public void preParse(ParseContext context) {}
@Override
protected void parseCreateField(ParseContext context) {
throw new AssertionError("Should never be called");
}
@Override
public void postParse(ParseContext context) {}
@Override @Override
protected String contentType() { protected String contentType() {
return CONTENT_TYPE; return CONTENT_TYPE;

View File

@ -69,6 +69,7 @@ public class RankFeatureMetaFieldMapperTests extends ESSingleNodeTestCase {
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(rfMetaField, 0).endObject()); BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(rfMetaField, 0).endObject());
MapperParsingException e = expectThrows(MapperParsingException.class, () -> MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapper.parse(new SourceToParse("test", "_doc", "1", bytes, XContentType.JSON))); mapper.parse(new SourceToParse("test", "_doc", "1", bytes, XContentType.JSON)));
assertTrue(e.getMessage().contains("Field ["+ rfMetaField + "] is a metadata field and cannot be added inside a document.")); assertTrue(
e.getCause().getMessage().contains("Field ["+ rfMetaField + "] is a metadata field and cannot be added inside a document."));
} }
} }

View File

@ -80,23 +80,9 @@ public class SizeFieldMapper extends MetadataFieldMapper {
return this.enabled.value(); return this.enabled.value();
} }
@Override
public void preParse(ParseContext context) {
}
@Override @Override
public void postParse(ParseContext context) throws IOException { public void postParse(ParseContext context) throws IOException {
// we post parse it so we get the size stored, possibly compressed (source will be preParse) // we post parse it so we get the size stored, possibly compressed (source will be preParse)
super.parse(context);
}
@Override
public void parse(ParseContext context) {
// nothing to do here, we call the parent in postParse
}
@Override
protected void parseCreateField(ParseContext context) {
if (enabled.value() == false) { if (enabled.value() == false) {
return; return;
} }

View File

@ -26,7 +26,6 @@ import org.apache.lucene.search.Query;
import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Explicit;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
@ -104,25 +103,6 @@ public class AllFieldMapper extends MetadataFieldMapper {
this.enabled = enabled; this.enabled = enabled;
} }
@Override
public void preParse(ParseContext context) {
}
@Override
public void postParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public void parse(ParseContext context) throws IOException {
// we parse in post parse
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
// noop mapper
}
@Override @Override
protected String contentType() { protected String contentType() {
return CONTENT_TYPE; return CONTENT_TYPE;

View File

@ -407,10 +407,7 @@ final class DocumentParser {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
paths = splitAndValidatePath(currentFieldName); paths = splitAndValidatePath(currentFieldName);
if (context.mapperService().isMetadataField(context.path().pathAsText(currentFieldName))) { if (containsDisabledObjectMapper(mapper, paths)) {
throw new MapperParsingException("Field [" + currentFieldName + "] is a metadata field and cannot be added inside"
+ " a document. Use the index API request parameters.");
} else if (containsDisabledObjectMapper(mapper, paths)) {
parser.nextToken(); parser.nextToken();
parser.skipChildren(); parser.skipChildren();
} }
@ -499,7 +496,7 @@ final class DocumentParser {
String[] paths) throws IOException { String[] paths) throws IOException {
assert currentFieldName != null; assert currentFieldName != null;
Mapper objectMapper = getMapper(mapper, currentFieldName, paths); Mapper objectMapper = getMapper(context, mapper, currentFieldName, paths);
if (objectMapper != null) { if (objectMapper != null) {
context.path().add(currentFieldName); context.path().add(currentFieldName);
parseObjectOrField(context, objectMapper); parseObjectOrField(context, objectMapper);
@ -536,7 +533,7 @@ final class DocumentParser {
String[] paths) throws IOException { String[] paths) throws IOException {
String arrayFieldName = lastFieldName; String arrayFieldName = lastFieldName;
Mapper mapper = getMapper(parentMapper, lastFieldName, paths); Mapper mapper = getMapper(context, parentMapper, lastFieldName, paths);
if (mapper != null) { if (mapper != null) {
// There is a concrete mapper for this field already. Need to check if the mapper // There is a concrete mapper for this field already. Need to check if the mapper
// expects an array, if so we pass the context straight to the mapper and if not // expects an array, if so we pass the context straight to the mapper and if not
@ -613,7 +610,7 @@ final class DocumentParser {
throw new MapperParsingException("object mapping [" + parentMapper.name() + "] trying to serialize a value with" throw new MapperParsingException("object mapping [" + parentMapper.name() + "] trying to serialize a value with"
+ " no field associated with it, current value [" + context.parser().textOrNull() + "]"); + " no field associated with it, current value [" + context.parser().textOrNull() + "]");
} }
Mapper mapper = getMapper(parentMapper, currentFieldName, paths); Mapper mapper = getMapper(context, parentMapper, currentFieldName, paths);
if (mapper != null) { if (mapper != null) {
parseObjectOrField(context, mapper); parseObjectOrField(context, mapper);
} else { } else {
@ -630,7 +627,7 @@ final class DocumentParser {
private static void parseNullValue(ParseContext context, ObjectMapper parentMapper, String lastFieldName, private static void parseNullValue(ParseContext context, ObjectMapper parentMapper, String lastFieldName,
String[] paths) throws IOException { String[] paths) throws IOException {
// we can only handle null values if we have mappings for them // we can only handle null values if we have mappings for them
Mapper mapper = getMapper(parentMapper, lastFieldName, paths); Mapper mapper = getMapper(context, parentMapper, lastFieldName, paths);
if (mapper != null) { if (mapper != null) {
// TODO: passing null to an object seems bogus? // TODO: passing null to an object seems bogus?
parseObjectOrField(context, mapper); parseObjectOrField(context, mapper);
@ -898,9 +895,16 @@ final class DocumentParser {
} }
// looks up a child mapper, but takes into account field names that expand to objects // looks up a child mapper, but takes into account field names that expand to objects
private static Mapper getMapper(ObjectMapper objectMapper, String fieldName, String[] subfields) { private static Mapper getMapper(final ParseContext context, ObjectMapper objectMapper, String fieldName, String[] subfields) {
String fieldPath = context.path().pathAsText(fieldName);
// Check if mapper is a metadata mapper first
Mapper mapper = context.docMapper().mapping().getMetadataMapper(fieldPath);
if (mapper != null) {
return mapper;
}
for (int i = 0; i < subfields.length - 1; ++i) { for (int i = 0; i < subfields.length - 1; ++i) {
Mapper mapper = objectMapper.getMapper(subfields[i]); mapper = objectMapper.getMapper(subfields[i]);
if (mapper == null || (mapper instanceof ObjectMapper) == false) { if (mapper == null || (mapper instanceof ObjectMapper) == false) {
return null; return null;
} }

View File

@ -45,7 +45,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(FieldNamesFieldMapper.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(FieldNamesFieldMapper.class);
public static final String NAME = "_field_names"; public static final String NAME = "_field_names";
public static final String CONTENT_TYPE = "_field_names"; public static final String CONTENT_TYPE = "_field_names";
@ -91,6 +90,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
this.indexVersionCreated = indexVersionCreated; this.indexVersionCreated = indexVersionCreated;
} }
@Override
protected List<Parameter<?>> getParameters() { protected List<Parameter<?>> getParameters() {
return Collections.singletonList(enabled); return Collections.singletonList(enabled);
} }
@ -158,20 +158,34 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
return (FieldNamesFieldType) super.fieldType(); return (FieldNamesFieldType) super.fieldType();
} }
@Override
public void preParse(ParseContext context) {
}
@Override @Override
public void postParse(ParseContext context) throws IOException { public void postParse(ParseContext context) throws IOException {
if (context.indexSettings().getIndexVersionCreated().before(Version.V_6_1_0)) { if (context.indexSettings().getIndexVersionCreated().before(Version.V_6_1_0)) {
super.parse(context); if (fieldType().isEnabled() == false) {
return;
}
for (ParseContext.Document document : context) {
final List<String> paths = new ArrayList<>(document.getFields().size());
String previousPath = ""; // used as a sentinel - field names can't be empty
for (IndexableField field : document.getFields()) {
final String path = field.name();
if (path.equals(previousPath)) {
// Sometimes mappers create multiple Lucene fields, eg. one for indexing,
// one for doc values and one for storing. Deduplicating is not required
// for correctness but this simple check helps save utf-8 conversions and
// gives Lucene fewer values to deal with.
continue;
}
paths.add(path);
previousPath = path;
}
for (String path : paths) {
for (String fieldName : extractFieldNames(path)) {
document.add(new Field(fieldType().name(), fieldName, Defaults.FIELD_TYPE));
}
}
} }
} }
@Override
public void parse(ParseContext context) throws IOException {
// Adding values to the _field_names field is handled by the mappers for each field type
} }
static Iterable<String> extractFieldNames(final String fullPath) { static Iterable<String> extractFieldNames(final String fullPath) {
@ -179,7 +193,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
@Override @Override
public Iterator<String> iterator() { public Iterator<String> iterator() {
return new Iterator<String>() { return new Iterator<String>() {
int endIndex = nextEndIndex(0); int endIndex = nextEndIndex(0);
private int nextEndIndex(int index) { private int nextEndIndex(int index) {
@ -211,34 +224,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
}; };
} }
@Override
protected void parseCreateField(ParseContext context) throws IOException {
if (fieldType().isEnabled() == false) {
return;
}
for (ParseContext.Document document : context) {
final List<String> paths = new ArrayList<>(document.getFields().size());
String previousPath = ""; // used as a sentinel - field names can't be empty
for (IndexableField field : document.getFields()) {
final String path = field.name();
if (path.equals(previousPath)) {
// Sometimes mappers create multiple Lucene fields, eg. one for indexing,
// one for doc values and one for storing. Deduplicating is not required
// for correctness but this simple check helps save utf-8 conversions and
// gives Lucene fewer values to deal with.
continue;
}
paths.add(path);
previousPath = path;
}
for (String path : paths) {
for (String fieldName : extractFieldNames(path)) {
document.add(new Field(fieldType().name(), fieldName, Defaults.FIELD_TYPE));
}
}
}
}
@Override @Override
protected String contentType() { protected String contentType() {
return CONTENT_TYPE; return CONTENT_TYPE;

View File

@ -256,11 +256,6 @@ public class IdFieldMapper extends MetadataFieldMapper {
@Override @Override
public void preParse(ParseContext context) throws IOException { public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
BytesRef id = Uid.encodeId(context.sourceToParse().id()); BytesRef id = Uid.encodeId(context.sourceToParse().id());
context.doc().add(new Field(NAME, id, Defaults.FIELD_TYPE)); context.doc().add(new Field(NAME, id, Defaults.FIELD_TYPE));
} }

View File

@ -82,22 +82,8 @@ public final class IgnoredFieldMapper extends MetadataFieldMapper {
super(IgnoredFieldType.INSTANCE); super(IgnoredFieldType.INSTANCE);
} }
@Override
public void preParse(ParseContext context) throws IOException {
}
@Override @Override
public void postParse(ParseContext context) throws IOException { public void postParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public void parse(ParseContext context) throws IOException {
// done in post-parse
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
for (String field : context.getIgnoredFields()) { for (String field : context.getIgnoredFields()) {
context.doc().add(new Field(NAME, field, Defaults.FIELD_TYPE)); context.doc().add(new Field(NAME, field, Defaults.FIELD_TYPE));
} }

View File

@ -27,7 +27,6 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Collections; import java.util.Collections;
import java.util.function.Supplier; import java.util.function.Supplier;
@ -73,12 +72,6 @@ public class IndexFieldMapper extends MetadataFieldMapper {
super(IndexFieldType.INSTANCE); super(IndexFieldType.INSTANCE);
} }
@Override
public void preParse(ParseContext context) throws IOException {}
@Override
protected void parseCreateField(ParseContext context) throws IOException {}
@Override @Override
protected String contentType() { protected String contentType() {
return CONTENT_TYPE; return CONTENT_TYPE;

View File

@ -48,6 +48,7 @@ public final class Mapping implements ToXContentFragment {
final RootObjectMapper root; final RootObjectMapper root;
final MetadataFieldMapper[] metadataMappers; final MetadataFieldMapper[] metadataMappers;
final Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap; final Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap;
final Map<String, MetadataFieldMapper> metadataMappersByName;
final Map<String, Object> meta; final Map<String, Object> meta;
public Mapping(Version indexCreated, RootObjectMapper rootObjectMapper, public Mapping(Version indexCreated, RootObjectMapper rootObjectMapper,
@ -55,8 +56,10 @@ public final class Mapping implements ToXContentFragment {
this.indexCreated = indexCreated; this.indexCreated = indexCreated;
this.metadataMappers = metadataMappers; this.metadataMappers = metadataMappers;
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap = new HashMap<>(); Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap = new HashMap<>();
Map<String, MetadataFieldMapper> metadataMappersByName = new HashMap<>();
for (MetadataFieldMapper metadataMapper : metadataMappers) { for (MetadataFieldMapper metadataMapper : metadataMappers) {
metadataMappersMap.put(metadataMapper.getClass(), metadataMapper); metadataMappersMap.put(metadataMapper.getClass(), metadataMapper);
metadataMappersByName.put(metadataMapper.name(), metadataMapper);
} }
this.root = rootObjectMapper; this.root = rootObjectMapper;
// keep root mappers sorted for consistent serialization // keep root mappers sorted for consistent serialization
@ -67,6 +70,7 @@ public final class Mapping implements ToXContentFragment {
} }
}); });
this.metadataMappersMap = unmodifiableMap(metadataMappersMap); this.metadataMappersMap = unmodifiableMap(metadataMappersMap);
this.metadataMappersByName = unmodifiableMap(metadataMappersByName);
this.meta = meta; this.meta = meta;
} }
@ -136,6 +140,10 @@ public final class Mapping implements ToXContentFragment {
return new Mapping(indexCreated, mergedRoot, mergedMetadataMappers.values().toArray(new MetadataFieldMapper[0]), mergedMeta); return new Mapping(indexCreated, mergedRoot, mergedMetadataMappers.values().toArray(new MetadataFieldMapper[0]), mergedMeta);
} }
public MetadataFieldMapper getMetadataMapper(String mapperName) {
return metadataMappersByName.get(mapperName);
}
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
root.toXContent(builder, params, new ToXContent() { root.toXContent(builder, params, new ToXContent() {

View File

@ -156,10 +156,18 @@ public abstract class MetadataFieldMapper extends ParametrizedFieldMapper {
return builder.endObject(); return builder.endObject();
} }
@Override
protected void parseCreateField(ParseContext context) throws IOException {
throw new MapperParsingException("Field [" + name() + "] is a metadata field and cannot be added inside"
+ " a document. Use the index API request parameters.");
}
/** /**
* Called before {@link FieldMapper#parse(ParseContext)} on the {@link RootObjectMapper}. * Called before {@link FieldMapper#parse(ParseContext)} on the {@link RootObjectMapper}.
*/ */
public abstract void preParse(ParseContext context) throws IOException; public void preParse(ParseContext context) throws IOException {
// do nothing
}
/** /**
* Called after {@link FieldMapper#parse(ParseContext)} on the {@link RootObjectMapper}. * Called after {@link FieldMapper#parse(ParseContext)} on the {@link RootObjectMapper}.
@ -172,5 +180,4 @@ public abstract class MetadataFieldMapper extends ParametrizedFieldMapper {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) { public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "]."); throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
} }
} }

View File

@ -117,18 +117,6 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
@Override @Override
public void preParse(ParseContext context) throws IOException { public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public void parse(ParseContext context) throws IOException {
// no need ot parse here, we either get the routing in the sourceToParse
// or we don't have routing, if we get it in sourceToParse, we process it in preParse
// which will always be called
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
String routing = context.sourceToParse().routing(); String routing = context.sourceToParse().routing();
if (routing != null) { if (routing != null) {
context.doc().add(new Field(fieldType().name(), routing, Defaults.FIELD_TYPE)); context.doc().add(new Field(fieldType().name(), routing, Defaults.FIELD_TYPE));

View File

@ -183,11 +183,6 @@ public class SeqNoFieldMapper extends MetadataFieldMapper {
@Override @Override
public void preParse(ParseContext context) throws IOException { public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
// see InternalEngine.innerIndex to see where the real version value is set // see InternalEngine.innerIndex to see where the real version value is set
// also see ParsedDocument.updateSeqID (called by innerIndex) // also see ParsedDocument.updateSeqID (called by innerIndex)
SequenceIDFields seqID = SequenceIDFields.emptySeqID(); SequenceIDFields seqID = SequenceIDFields.emptySeqID();
@ -197,11 +192,6 @@ public class SeqNoFieldMapper extends MetadataFieldMapper {
context.doc().add(seqID.primaryTerm); context.doc().add(seqID.primaryTerm);
} }
@Override
public void parse(ParseContext context) throws IOException {
// fields are added in parseCreateField
}
@Override @Override
public void postParse(ParseContext context) throws IOException { public void postParse(ParseContext context) throws IOException {
// In the case of nested docs, let's fill nested docs with the original // In the case of nested docs, let's fill nested docs with the original

View File

@ -156,16 +156,6 @@ public class SourceFieldMapper extends MetadataFieldMapper {
@Override @Override
public void preParse(ParseContext context) throws IOException { public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public void parse(ParseContext context) throws IOException {
// nothing to do here, we will call it in pre parse
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
BytesReference originalSource = context.sourceToParse().source(); BytesReference originalSource = context.sourceToParse().source();
XContentType contentType = context.sourceToParse().getXContentType(); XContentType contentType = context.sourceToParse().getXContentType();
final BytesReference adaptedSource = applyFilters(originalSource, contentType); final BytesReference adaptedSource = applyFilters(originalSource, contentType);

View File

@ -172,7 +172,6 @@ public class TypeFieldMapper extends MetadataFieldMapper {
QueryParsers.setRewriteMethod(query, method); QueryParsers.setRewriteMethod(query, method);
return query; return query;
} }
} }
/** /**
@ -263,16 +262,6 @@ public class TypeFieldMapper extends MetadataFieldMapper {
@Override @Override
public void preParse(ParseContext context) throws IOException { public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public void parse(ParseContext context) throws IOException {
// we parse in pre parse
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) { if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) {
return; return;
} }

View File

@ -68,22 +68,12 @@ public class VersionFieldMapper extends MetadataFieldMapper {
@Override @Override
public void preParse(ParseContext context) throws IOException { public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
// see InternalEngine.updateVersion to see where the real version value is set // see InternalEngine.updateVersion to see where the real version value is set
final Field version = new NumericDocValuesField(NAME, -1L); final Field version = new NumericDocValuesField(NAME, -1L);
context.version(version); context.version(version);
context.doc().add(version); context.doc().add(version);
} }
@Override
public void parse(ParseContext context) throws IOException {
// _version added in preparse
}
@Override @Override
public void postParse(ParseContext context) throws IOException { public void postParse(ParseContext context) throws IOException {
// In the case of nested docs, let's fill nested docs with version=1 so that Lucene doesn't write a Bitset for documents // In the case of nested docs, let's fill nested docs with version=1 so that Lucene doesn't write a Bitset for documents

View File

@ -46,7 +46,6 @@ import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParametrizedFieldMapper; import org.elasticsearch.index.mapper.ParametrizedFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.TextSearchInfo; import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.IndexTemplateMissingException; import org.elasticsearch.indices.IndexTemplateMissingException;
@ -1610,16 +1609,6 @@ public class MetadataIndexTemplateServiceTests extends ESSingleNodeTestCase {
return new MetadataTimestampFieldBuilder().init(this); return new MetadataTimestampFieldBuilder().init(this);
} }
@Override
public void preParse(ParseContext context) {
}
@Override
protected void parseCreateField(ParseContext context) {
}
@Override @Override
protected String contentType() { protected String contentType() {
return "_data_stream_timestamp"; return "_data_stream_timestamp";

View File

@ -31,15 +31,18 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.ParseContext.Document; import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.plugins.Plugin;
import java.io.IOException; import java.io.IOException;
import java.math.BigDecimal; import java.math.BigDecimal;
import java.math.BigInteger; import java.math.BigInteger;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import static java.util.Collections.singletonList;
import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath; import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
@ -50,6 +53,11 @@ import static org.hamcrest.Matchers.notNullValue;
public class DocumentParserTests extends MapperServiceTestCase { public class DocumentParserTests extends MapperServiceTestCase {
@Override
protected Collection<? extends Plugin> getPlugins() {
return singletonList(new MockMetadataMapperPlugin());
}
public void testFieldDisabled() throws Exception { public void testFieldDisabled() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> { DocumentMapper mapper = createDocumentMapper(mapping(b -> {
b.startObject("foo").field("enabled", false).endObject(); b.startObject("foo").field("enabled", false).endObject();
@ -499,7 +507,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testDynamicLongArray() throws Exception { public void testDynamicLongArray() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {})); DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = mapper.parse(source(b -> b.startArray("foo").value(0).value(1).endArray())); ParsedDocument doc = mapper.parse(source(b -> b.startArray("foo").value(0).value(1).endArray()));
assertEquals(4, doc.rootDoc().getFields("foo").length); assertEquals(4, doc.rootDoc().getFields("foo").length);
} }
@ -669,7 +678,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testDynamicDottedFieldNameLongArray() throws Exception { public void testDynamicDottedFieldNameLongArray() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {})); DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = mapper.parse(source(b -> b.startArray("foo.bar.baz").value(0).value(1).endArray())); ParsedDocument doc = mapper.parse(source(b -> b.startArray("foo.bar.baz").value(0).value(1).endArray()));
assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length); assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length);
@ -753,7 +763,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testDynamicDottedFieldNameLong() throws Exception { public void testDynamicDottedFieldNameLong() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {})); DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = mapper.parse(source(b -> b.field("foo.bar.baz", 0))); ParsedDocument doc = mapper.parse(source(b -> b.field("foo.bar.baz", 0)));
assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length);
Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo");
@ -835,7 +846,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testDynamicDottedFieldNameObject() throws Exception { public void testDynamicDottedFieldNameObject() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {})); DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = mapper.parse(source(b -> b.startObject("foo.bar.baz").field("a", 0).endObject())); ParsedDocument doc = mapper.parse(source(b -> b.startObject("foo.bar.baz").field("a", 0).endObject()));
assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length); assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length);
Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo"); Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo");
@ -927,15 +939,41 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testDocumentContainsMetadataField() throws Exception { public void testDocumentContainsMetadataField() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {})); DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
MapperParsingException e = expectThrows(MapperParsingException.class, () -> MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapper.parse(source(b -> b.field("_field_names", 0)))); mapper.parse(source(b -> b.field("_field_names", 0))));
assertTrue(e.getMessage(), assertTrue(e.getCause().getMessage(),
e.getMessage().contains("Field [_field_names] is a metadata field and cannot be added inside a document.")); e.getCause().getMessage().contains("Field [_field_names] is a metadata field and cannot be added inside a document."));
mapper.parse(source(b -> b.field("foo._field_names", 0))); // parses without error mapper.parse(source(b -> b.field("foo._field_names", 0))); // parses without error
} }
public void testDocumentContainsAllowedMetadataField() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
{
// A metadata field that parses a value fails to parse a null value
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapper.parse(source(b -> b.nullField(MockMetadataMapperPlugin.MockMetadataMapper.CONTENT_TYPE))));
assertTrue(e.getMessage(), e.getMessage().contains("failed to parse field [_mock_metadata]"));
}
{
// A metadata field that parses a value fails to parse an object
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapper.parse(source(b -> b.field(MockMetadataMapperPlugin.MockMetadataMapper.CONTENT_TYPE)
.startObject().field("sub-field", "true").endObject())));
assertTrue(e.getMessage(), e.getMessage().contains("failed to parse field [_mock_metadata]"));
}
{
ParsedDocument doc = mapper.parse(source(b ->
b.field(MockMetadataMapperPlugin.MockMetadataMapper.CONTENT_TYPE, "mock-metadata-field-value")
));
IndexableField field = doc.rootDoc().getField(MockMetadataMapperPlugin.MockMetadataMapper.CONTENT_TYPE);
assertEquals("mock-metadata-field-value", field.stringValue());
}
}
public void testSimpleMapper() throws Exception { public void testSimpleMapper() throws Exception {
DocumentMapper docMapper = createDocumentMapper(mapping(b -> { DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
b.startObject("name"); b.startObject("name");
@ -970,7 +1008,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testParseToJsonAndParse() throws Exception { public void testParseToJsonAndParse() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json"); String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
MapperService mapperService = createMapperService(mapping(b -> {})); MapperService mapperService = createMapperService(mapping(b -> {
}));
merge("person", mapperService, mapping); merge("person", mapperService, mapping);
String builtMapping = mapperService.documentMapper().mappingSource().string(); String builtMapping = mapperService.documentMapper().mappingSource().string();
// reparse it // reparse it
@ -1015,7 +1054,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testNoDocumentSent() throws Exception { public void testNoDocumentSent() throws Exception {
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {})); DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
}));
BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8)); BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8));
MapperParsingException e = expectThrows(MapperParsingException.class, MapperParsingException e = expectThrows(MapperParsingException.class,
() -> docMapper.parse(new SourceToParse("test", "_doc", "1", json, XContentType.JSON))); () -> docMapper.parse(new SourceToParse("test", "_doc", "1", json, XContentType.JSON)));
@ -1023,7 +1063,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testNoLevel() throws Exception { public void testNoLevel() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {})); DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> { ParsedDocument doc = defaultMapper.parse(source(b -> {
b.field("test1", "value1"); b.field("test1", "value1");
b.field("test2", "value2"); b.field("test2", "value2");
@ -1038,7 +1079,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
// we no longer have types? // we no longer have types?
public void testTypeLevel() throws Exception { public void testTypeLevel() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {})); DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> { ParsedDocument doc = defaultMapper.parse(source(b -> {
b.startObject("type"); b.startObject("type");
@ -1056,7 +1098,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testNoLevelWithFieldTypeAsValue() throws Exception { public void testNoLevelWithFieldTypeAsValue() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {})); DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> { ParsedDocument doc = defaultMapper.parse(source(b -> {
b.field("type", "value_type"); b.field("type", "value_type");
@ -1073,7 +1116,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testTypeLevelWithFieldTypeAsValue() throws Exception { public void testTypeLevelWithFieldTypeAsValue() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {})); DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> { ParsedDocument doc = defaultMapper.parse(source(b -> {
b.startObject("type"); b.startObject("type");
@ -1093,7 +1137,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testNoLevelWithFieldTypeAsObject() throws Exception { public void testNoLevelWithFieldTypeAsObject() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {})); DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> { ParsedDocument doc = defaultMapper.parse(source(b -> {
b.startObject("type").field("type_field", "type_value").endObject(); b.startObject("type").field("type_field", "type_value").endObject();
@ -1109,7 +1154,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testTypeLevelWithFieldTypeAsObject() throws Exception { public void testTypeLevelWithFieldTypeAsObject() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {})); DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> { ParsedDocument doc = defaultMapper.parse(source(b -> {
b.startObject("type"); b.startObject("type");
@ -1129,7 +1175,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testNoLevelWithFieldTypeAsValueNotFirst() throws Exception { public void testNoLevelWithFieldTypeAsValueNotFirst() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {})); DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> { ParsedDocument doc = defaultMapper.parse(source(b -> {
b.startObject("type"); b.startObject("type");
@ -1149,7 +1196,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testTypeLevelWithFieldTypeAsValueNotFirst() throws Exception { public void testTypeLevelWithFieldTypeAsValueNotFirst() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {})); DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> { ParsedDocument doc = defaultMapper.parse(source(b -> {
b.startObject("type"); b.startObject("type");
@ -1169,7 +1217,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testNoLevelWithFieldTypeAsObjectNotFirst() throws Exception { public void testNoLevelWithFieldTypeAsObjectNotFirst() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {})); DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> { ParsedDocument doc = defaultMapper.parse(source(b -> {
b.field("test1", "value1"); b.field("test1", "value1");
@ -1186,7 +1235,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testTypeLevelWithFieldTypeAsObjectNotFirst() throws Exception { public void testTypeLevelWithFieldTypeAsObjectNotFirst() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {})); DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> { ParsedDocument doc = defaultMapper.parse(source(b -> {
b.startObject("type"); b.startObject("type");
@ -1231,7 +1281,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
} }
public void testDynamicFieldsStartingAndEndingWithDot() throws Exception { public void testDynamicFieldsStartingAndEndingWithDot() throws Exception {
MapperService mapperService = createMapperService(mapping(b -> {})); MapperService mapperService = createMapperService(mapping(b -> {
}));
merge(mapperService, dynamicMapping(mapperService.documentMapper().parse(source(b -> { merge(mapperService, dynamicMapping(mapperService.documentMapper().parse(source(b -> {
b.startArray("top."); b.startArray("top.");
{ {
@ -1279,7 +1330,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testDynamicFieldsEmptyName() throws Exception { public void testDynamicFieldsEmptyName() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {})); DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
IllegalArgumentException emptyFieldNameException = expectThrows(IllegalArgumentException.class, IllegalArgumentException emptyFieldNameException = expectThrows(IllegalArgumentException.class,
() -> mapper.parse(source(b -> { () -> mapper.parse(source(b -> {
@ -1300,7 +1352,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testBlankFieldNames() throws Exception { public void testBlankFieldNames() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {})); DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
MapperParsingException err = expectThrows(MapperParsingException.class, () -> MapperParsingException err = expectThrows(MapperParsingException.class, () ->
mapper.parse(source(b -> b.field("", "foo")))); mapper.parse(source(b -> b.field("", "foo"))));
assertThat(err.getCause(), notNullValue()); assertThat(err.getCause(), notNullValue());

View File

@ -37,11 +37,6 @@ public class ExternalMetadataMapper extends MetadataFieldMapper {
super(new BooleanFieldMapper.BooleanFieldType(FIELD_NAME)); super(new BooleanFieldMapper.BooleanFieldType(FIELD_NAME));
} }
@Override
protected void parseCreateField(ParseContext context) throws IOException {
// handled in post parse
}
@Override @Override
public Iterator<Mapper> iterator() { public Iterator<Mapper> iterator() {
return Collections.emptyIterator(); return Collections.emptyIterator();
@ -52,10 +47,6 @@ public class ExternalMetadataMapper extends MetadataFieldMapper {
return CONTENT_TYPE; return CONTENT_TYPE;
} }
@Override
public void preParse(ParseContext context) throws IOException {
}
@Override @Override
public void postParse(ParseContext context) throws IOException { public void postParse(ParseContext context) throws IOException {
context.doc().add(new StringField(FIELD_NAME, FIELD_VALUE, Store.YES)); context.doc().add(new StringField(FIELD_NAME, FIELD_VALUE, Store.YES));

View File

@ -59,7 +59,8 @@ public class IdFieldMapperTests extends ESSingleNodeTestCase {
.startObject().field("_id", "1").endObject()), XContentType.JSON)); .startObject().field("_id", "1").endObject()), XContentType.JSON));
fail("Expected failure to parse metadata field"); fail("Expected failure to parse metadata field");
} catch (MapperParsingException e) { } catch (MapperParsingException e) {
assertTrue(e.getMessage(), e.getMessage().contains("Field [_id] is a metadata field and cannot be added inside a document")); assertTrue(e.getCause().getMessage(),
e.getCause().getMessage().contains("Field [_id] is a metadata field and cannot be added inside a document"));
} }
} }

View File

@ -0,0 +1,98 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* Mapper plugin providing a mock metadata field mapper implementation that supports setting its value
* through the document source.
*/
public class MockMetadataMapperPlugin extends Plugin implements MapperPlugin {
/**
* A mock metadata field mapper that supports being set from the document source.
*/
public static class MockMetadataMapper extends MetadataFieldMapper {
static final String CONTENT_TYPE = "_mock_metadata";
static final String FIELD_NAME = "_mock_metadata";
protected MockMetadataMapper() {
super(new KeywordFieldMapper.KeywordFieldType(FIELD_NAME));
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
if (context.parser().currentToken() == XContentParser.Token.VALUE_STRING) {
context.doc().add(new StringField(FIELD_NAME, context.parser().text(), Field.Store.YES));
} else {
throw new IllegalArgumentException("Field [" + fieldType().name() + "] must be a string.");
}
}
@Override
public Iterator<Mapper> iterator() {
return Collections.emptyIterator();
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
public static class Builder extends MetadataFieldMapper.Builder {
protected Builder() {
super(FIELD_NAME);
}
@Override
protected List<Parameter<?>> getParameters() {
return Collections.emptyList();
}
@Override
public MockMetadataMapper build(BuilderContext context) {
return new MockMetadataMapper();
}
}
public static final TypeParser PARSER = new ConfigurableTypeParser(
c -> new MockMetadataMapper(),
c -> new MockMetadataMapper.Builder()) {
};
}
@Override
public Map<String, MetadataFieldMapper.TypeParser> getMetadataMappers() {
return Collections.singletonMap(MockMetadataMapper.CONTENT_TYPE, MockMetadataMapper.PARSER);
}
}

View File

@ -58,7 +58,7 @@ public class RoutingFieldMapperTests extends ESSingleNodeTestCase {
.startObject().field("_routing", "foo").endObject()),XContentType.JSON)); .startObject().field("_routing", "foo").endObject()),XContentType.JSON));
fail("Expected failure to parse metadata field"); fail("Expected failure to parse metadata field");
} catch (MapperParsingException e) { } catch (MapperParsingException e) {
assertThat(e.getMessage(), e.getMessage(), assertThat(e.getCause().getMessage(), e.getCause().getMessage(),
containsString("Field [_routing] is a metadata field and cannot be added inside a document")); containsString("Field [_routing] is a metadata field and cannot be added inside a document"));
} }
} }

View File

@ -172,15 +172,6 @@ public class DataStreamTimestampFieldMapper extends MetadataFieldMapper {
} }
} }
@Override
public void preParse(ParseContext context) throws IOException {}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
// Meta field doesn't create any fields, so this shouldn't happen.
throw new IllegalStateException(NAME + " field mapper cannot create fields");
}
@Override @Override
public void postParse(ParseContext context) throws IOException { public void postParse(ParseContext context) throws IOException {
if (enabled == false) { if (enabled == false) {