[7.x] Allow metadata fields in the _source (#62616)

Backports #61590 to 7.x

    So far we don't allow metadata fields in the document _source. However, in the case of the _doc_count field mapper (#58339) we want to be able to set

    This PR adds a method to the metadata field parsers that exposes if the field can be included in the document source or not.
    This way each metadata field can configure if it can be included in the document _source
This commit is contained in:
Christos Soulios 2020-09-18 19:56:41 +03:00 committed by GitHub
parent 1dd8a5971f
commit 6a298970fd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 301 additions and 297 deletions

View File

@ -65,17 +65,6 @@ public class RankFeatureMetaFieldMapper extends MetadataFieldMapper {
super(RankFeatureMetaFieldType.INSTANCE);
}
@Override
public void preParse(ParseContext context) {}
@Override
protected void parseCreateField(ParseContext context) {
throw new AssertionError("Should never be called");
}
@Override
public void postParse(ParseContext context) {}
@Override
protected String contentType() {
return CONTENT_TYPE;

View File

@ -69,6 +69,7 @@ public class RankFeatureMetaFieldMapperTests extends ESSingleNodeTestCase {
BytesReference bytes = BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(rfMetaField, 0).endObject());
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapper.parse(new SourceToParse("test", "_doc", "1", bytes, XContentType.JSON)));
assertTrue(e.getMessage().contains("Field ["+ rfMetaField + "] is a metadata field and cannot be added inside a document."));
assertTrue(
e.getCause().getMessage().contains("Field ["+ rfMetaField + "] is a metadata field and cannot be added inside a document."));
}
}

View File

@ -80,23 +80,9 @@ public class SizeFieldMapper extends MetadataFieldMapper {
return this.enabled.value();
}
@Override
public void preParse(ParseContext context) {
}
@Override
public void postParse(ParseContext context) throws IOException {
// we post parse it so we get the size stored, possibly compressed (source will be preParse)
super.parse(context);
}
@Override
public void parse(ParseContext context) {
// nothing to do here, we call the parent in postParse
}
@Override
protected void parseCreateField(ParseContext context) {
if (enabled.value() == false) {
return;
}

View File

@ -26,7 +26,6 @@ import org.apache.lucene.search.Query;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.index.query.QueryShardContext;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
@ -104,25 +103,6 @@ public class AllFieldMapper extends MetadataFieldMapper {
this.enabled = enabled;
}
@Override
public void preParse(ParseContext context) {
}
@Override
public void postParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public void parse(ParseContext context) throws IOException {
// we parse in post parse
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
// noop mapper
}
@Override
protected String contentType() {
return CONTENT_TYPE;

View File

@ -407,10 +407,7 @@ final class DocumentParser {
if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
paths = splitAndValidatePath(currentFieldName);
if (context.mapperService().isMetadataField(context.path().pathAsText(currentFieldName))) {
throw new MapperParsingException("Field [" + currentFieldName + "] is a metadata field and cannot be added inside"
+ " a document. Use the index API request parameters.");
} else if (containsDisabledObjectMapper(mapper, paths)) {
if (containsDisabledObjectMapper(mapper, paths)) {
parser.nextToken();
parser.skipChildren();
}
@ -499,7 +496,7 @@ final class DocumentParser {
String[] paths) throws IOException {
assert currentFieldName != null;
Mapper objectMapper = getMapper(mapper, currentFieldName, paths);
Mapper objectMapper = getMapper(context, mapper, currentFieldName, paths);
if (objectMapper != null) {
context.path().add(currentFieldName);
parseObjectOrField(context, objectMapper);
@ -536,7 +533,7 @@ final class DocumentParser {
String[] paths) throws IOException {
String arrayFieldName = lastFieldName;
Mapper mapper = getMapper(parentMapper, lastFieldName, paths);
Mapper mapper = getMapper(context, parentMapper, lastFieldName, paths);
if (mapper != null) {
// There is a concrete mapper for this field already. Need to check if the mapper
// expects an array, if so we pass the context straight to the mapper and if not
@ -613,7 +610,7 @@ final class DocumentParser {
throw new MapperParsingException("object mapping [" + parentMapper.name() + "] trying to serialize a value with"
+ " no field associated with it, current value [" + context.parser().textOrNull() + "]");
}
Mapper mapper = getMapper(parentMapper, currentFieldName, paths);
Mapper mapper = getMapper(context, parentMapper, currentFieldName, paths);
if (mapper != null) {
parseObjectOrField(context, mapper);
} else {
@ -630,7 +627,7 @@ final class DocumentParser {
private static void parseNullValue(ParseContext context, ObjectMapper parentMapper, String lastFieldName,
String[] paths) throws IOException {
// we can only handle null values if we have mappings for them
Mapper mapper = getMapper(parentMapper, lastFieldName, paths);
Mapper mapper = getMapper(context, parentMapper, lastFieldName, paths);
if (mapper != null) {
// TODO: passing null to an object seems bogus?
parseObjectOrField(context, mapper);
@ -898,9 +895,16 @@ final class DocumentParser {
}
// looks up a child mapper, but takes into account field names that expand to objects
private static Mapper getMapper(ObjectMapper objectMapper, String fieldName, String[] subfields) {
private static Mapper getMapper(final ParseContext context, ObjectMapper objectMapper, String fieldName, String[] subfields) {
String fieldPath = context.path().pathAsText(fieldName);
// Check if mapper is a metadata mapper first
Mapper mapper = context.docMapper().mapping().getMetadataMapper(fieldPath);
if (mapper != null) {
return mapper;
}
for (int i = 0; i < subfields.length - 1; ++i) {
Mapper mapper = objectMapper.getMapper(subfields[i]);
mapper = objectMapper.getMapper(subfields[i]);
if (mapper == null || (mapper instanceof ObjectMapper) == false) {
return null;
}

View File

@ -45,7 +45,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(FieldNamesFieldMapper.class);
public static final String NAME = "_field_names";
public static final String CONTENT_TYPE = "_field_names";
@ -91,6 +90,7 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
this.indexVersionCreated = indexVersionCreated;
}
@Override
protected List<Parameter<?>> getParameters() {
return Collections.singletonList(enabled);
}
@ -158,28 +158,41 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
return (FieldNamesFieldType) super.fieldType();
}
@Override
public void preParse(ParseContext context) {
}
@Override
public void postParse(ParseContext context) throws IOException {
if (context.indexSettings().getIndexVersionCreated().before(Version.V_6_1_0)) {
super.parse(context);
if (fieldType().isEnabled() == false) {
return;
}
for (ParseContext.Document document : context) {
final List<String> paths = new ArrayList<>(document.getFields().size());
String previousPath = ""; // used as a sentinel - field names can't be empty
for (IndexableField field : document.getFields()) {
final String path = field.name();
if (path.equals(previousPath)) {
// Sometimes mappers create multiple Lucene fields, eg. one for indexing,
// one for doc values and one for storing. Deduplicating is not required
// for correctness but this simple check helps save utf-8 conversions and
// gives Lucene fewer values to deal with.
continue;
}
paths.add(path);
previousPath = path;
}
for (String path : paths) {
for (String fieldName : extractFieldNames(path)) {
document.add(new Field(fieldType().name(), fieldName, Defaults.FIELD_TYPE));
}
}
}
}
}
@Override
public void parse(ParseContext context) throws IOException {
// Adding values to the _field_names field is handled by the mappers for each field type
}
static Iterable<String> extractFieldNames(final String fullPath) {
return new Iterable<String>() {
@Override
public Iterator<String> iterator() {
return new Iterator<String>() {
int endIndex = nextEndIndex(0);
private int nextEndIndex(int index) {
@ -211,34 +224,6 @@ public class FieldNamesFieldMapper extends MetadataFieldMapper {
};
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
if (fieldType().isEnabled() == false) {
return;
}
for (ParseContext.Document document : context) {
final List<String> paths = new ArrayList<>(document.getFields().size());
String previousPath = ""; // used as a sentinel - field names can't be empty
for (IndexableField field : document.getFields()) {
final String path = field.name();
if (path.equals(previousPath)) {
// Sometimes mappers create multiple Lucene fields, eg. one for indexing,
// one for doc values and one for storing. Deduplicating is not required
// for correctness but this simple check helps save utf-8 conversions and
// gives Lucene fewer values to deal with.
continue;
}
paths.add(path);
previousPath = path;
}
for (String path : paths) {
for (String fieldName : extractFieldNames(path)) {
document.add(new Field(fieldType().name(), fieldName, Defaults.FIELD_TYPE));
}
}
}
}
@Override
protected String contentType() {
return CONTENT_TYPE;

View File

@ -256,11 +256,6 @@ public class IdFieldMapper extends MetadataFieldMapper {
@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
BytesRef id = Uid.encodeId(context.sourceToParse().id());
context.doc().add(new Field(NAME, id, Defaults.FIELD_TYPE));
}

View File

@ -82,22 +82,8 @@ public final class IgnoredFieldMapper extends MetadataFieldMapper {
super(IgnoredFieldType.INSTANCE);
}
@Override
public void preParse(ParseContext context) throws IOException {
}
@Override
public void postParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public void parse(ParseContext context) throws IOException {
// done in post-parse
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
for (String field : context.getIgnoredFields()) {
context.doc().add(new Field(NAME, field, Defaults.FIELD_TYPE));
}

View File

@ -27,7 +27,6 @@ import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.aggregations.support.CoreValuesSourceType;
import org.elasticsearch.search.lookup.SearchLookup;
import java.io.IOException;
import java.util.Collections;
import java.util.function.Supplier;
@ -73,12 +72,6 @@ public class IndexFieldMapper extends MetadataFieldMapper {
super(IndexFieldType.INSTANCE);
}
@Override
public void preParse(ParseContext context) throws IOException {}
@Override
protected void parseCreateField(ParseContext context) throws IOException {}
@Override
protected String contentType() {
return CONTENT_TYPE;

View File

@ -48,6 +48,7 @@ public final class Mapping implements ToXContentFragment {
final RootObjectMapper root;
final MetadataFieldMapper[] metadataMappers;
final Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap;
final Map<String, MetadataFieldMapper> metadataMappersByName;
final Map<String, Object> meta;
public Mapping(Version indexCreated, RootObjectMapper rootObjectMapper,
@ -55,8 +56,10 @@ public final class Mapping implements ToXContentFragment {
this.indexCreated = indexCreated;
this.metadataMappers = metadataMappers;
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> metadataMappersMap = new HashMap<>();
Map<String, MetadataFieldMapper> metadataMappersByName = new HashMap<>();
for (MetadataFieldMapper metadataMapper : metadataMappers) {
metadataMappersMap.put(metadataMapper.getClass(), metadataMapper);
metadataMappersByName.put(metadataMapper.name(), metadataMapper);
}
this.root = rootObjectMapper;
// keep root mappers sorted for consistent serialization
@ -67,6 +70,7 @@ public final class Mapping implements ToXContentFragment {
}
});
this.metadataMappersMap = unmodifiableMap(metadataMappersMap);
this.metadataMappersByName = unmodifiableMap(metadataMappersByName);
this.meta = meta;
}
@ -136,6 +140,10 @@ public final class Mapping implements ToXContentFragment {
return new Mapping(indexCreated, mergedRoot, mergedMetadataMappers.values().toArray(new MetadataFieldMapper[0]), mergedMeta);
}
public MetadataFieldMapper getMetadataMapper(String mapperName) {
return metadataMappersByName.get(mapperName);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
root.toXContent(builder, params, new ToXContent() {

View File

@ -156,10 +156,18 @@ public abstract class MetadataFieldMapper extends ParametrizedFieldMapper {
return builder.endObject();
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
throw new MapperParsingException("Field [" + name() + "] is a metadata field and cannot be added inside"
+ " a document. Use the index API request parameters.");
}
/**
* Called before {@link FieldMapper#parse(ParseContext)} on the {@link RootObjectMapper}.
*/
public abstract void preParse(ParseContext context) throws IOException;
public void preParse(ParseContext context) throws IOException {
// do nothing
}
/**
* Called after {@link FieldMapper#parse(ParseContext)} on the {@link RootObjectMapper}.
@ -172,5 +180,4 @@ public abstract class MetadataFieldMapper extends ParametrizedFieldMapper {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup lookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + name() + "].");
}
}

View File

@ -117,18 +117,6 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public void parse(ParseContext context) throws IOException {
// no need ot parse here, we either get the routing in the sourceToParse
// or we don't have routing, if we get it in sourceToParse, we process it in preParse
// which will always be called
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
String routing = context.sourceToParse().routing();
if (routing != null) {
context.doc().add(new Field(fieldType().name(), routing, Defaults.FIELD_TYPE));

View File

@ -183,11 +183,6 @@ public class SeqNoFieldMapper extends MetadataFieldMapper {
@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
// see InternalEngine.innerIndex to see where the real version value is set
// also see ParsedDocument.updateSeqID (called by innerIndex)
SequenceIDFields seqID = SequenceIDFields.emptySeqID();
@ -197,11 +192,6 @@ public class SeqNoFieldMapper extends MetadataFieldMapper {
context.doc().add(seqID.primaryTerm);
}
@Override
public void parse(ParseContext context) throws IOException {
// fields are added in parseCreateField
}
@Override
public void postParse(ParseContext context) throws IOException {
// In the case of nested docs, let's fill nested docs with the original

View File

@ -156,16 +156,6 @@ public class SourceFieldMapper extends MetadataFieldMapper {
@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public void parse(ParseContext context) throws IOException {
// nothing to do here, we will call it in pre parse
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
BytesReference originalSource = context.sourceToParse().source();
XContentType contentType = context.sourceToParse().getXContentType();
final BytesReference adaptedSource = applyFilters(originalSource, contentType);

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.SortedSetDocValuesField;
import org.apache.lucene.index.IndexOptions;
@ -172,7 +172,6 @@ public class TypeFieldMapper extends MetadataFieldMapper {
QueryParsers.setRewriteMethod(query, method);
return query;
}
}
/**
@ -263,16 +262,6 @@ public class TypeFieldMapper extends MetadataFieldMapper {
@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public void parse(ParseContext context) throws IOException {
// we parse in pre parse
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
if (fieldType.indexOptions() == IndexOptions.NONE && !fieldType.stored()) {
return;
}

View File

@ -68,22 +68,12 @@ public class VersionFieldMapper extends MetadataFieldMapper {
@Override
public void preParse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
// see InternalEngine.updateVersion to see where the real version value is set
final Field version = new NumericDocValuesField(NAME, -1L);
context.version(version);
context.doc().add(version);
}
@Override
public void parse(ParseContext context) throws IOException {
// _version added in preparse
}
@Override
public void postParse(ParseContext context) throws IOException {
// In the case of nested docs, let's fill nested docs with version=1 so that Lucene doesn't write a Bitset for documents

View File

@ -46,7 +46,6 @@ import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MetadataFieldMapper;
import org.elasticsearch.index.mapper.ParametrizedFieldMapper;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.TextSearchInfo;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.indices.IndexTemplateMissingException;
@ -1610,16 +1609,6 @@ public class MetadataIndexTemplateServiceTests extends ESSingleNodeTestCase {
return new MetadataTimestampFieldBuilder().init(this);
}
@Override
public void preParse(ParseContext context) {
}
@Override
protected void parseCreateField(ParseContext context) {
}
@Override
protected String contentType() {
return "_data_stream_timestamp";

View File

@ -31,15 +31,18 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.plugins.Plugin;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import static java.util.Collections.singletonList;
import static org.elasticsearch.test.StreamsUtils.copyToBytesFromClasspath;
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.hamcrest.Matchers.containsString;
@ -50,6 +53,11 @@ import static org.hamcrest.Matchers.notNullValue;
public class DocumentParserTests extends MapperServiceTestCase {
@Override
protected Collection<? extends Plugin> getPlugins() {
return singletonList(new MockMetadataMapperPlugin());
}
public void testFieldDisabled() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
b.startObject("foo").field("enabled", false).endObject();
@ -154,10 +162,10 @@ public class DocumentParserTests extends MapperServiceTestCase {
}));
MapperParsingException e = expectThrows(MapperParsingException.class,
() -> mapper.parse(source(b -> b.field("field.bar", 123))));
() -> mapper.parse(source(b -> b.field("field.bar", 123))));
assertEquals(
"Cannot add a value for field [field.bar] since one of the intermediate objects is mapped as a nested object: [field]",
e.getMessage());
"Cannot add a value for field [field.bar] since one of the intermediate objects is mapped as a nested object: [field]",
e.getMessage());
}
public void testUnexpectedFieldMappingType() throws Exception {
@ -167,12 +175,12 @@ public class DocumentParserTests extends MapperServiceTestCase {
}));
{
MapperException exception = expectThrows(MapperException.class,
() -> mapper.parse(source(b -> b.field("foo", true))));
() -> mapper.parse(source(b -> b.field("foo", true))));
assertThat(exception.getMessage(), containsString("failed to parse field [foo] of type [long] in document with id '1'"));
}
{
MapperException exception = expectThrows(MapperException.class,
() -> mapper.parse(source(b -> b.field("bar", "bar"))));
() -> mapper.parse(source(b -> b.field("bar", "bar"))));
assertThat(exception.getMessage(), containsString("failed to parse field [bar] of type [boolean] in document with id '1'"));
}
}
@ -196,10 +204,10 @@ public class DocumentParserTests extends MapperServiceTestCase {
}));
MapperParsingException e = expectThrows(MapperParsingException.class,
() -> mapper.parse(source(b -> b.field("foo.bar", 42))));
() -> mapper.parse(source(b -> b.field("foo.bar", 42))));
assertEquals(
"It is forbidden to create dynamic nested objects ([foo]) through `copy_to` or dots in field names",
e.getMessage());
"It is forbidden to create dynamic nested objects ([foo]) through `copy_to` or dots in field names",
e.getMessage());
}
public void testNestedHaveIdAndTypeFields() throws Exception {
@ -350,7 +358,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
}
Mapper.Builder<?> builder = new ObjectMapper.Builder<>(nameParts[nameParts.length - 1]).enabled(true);
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings().getSettings(), context.path());
return (ObjectMapper)builder.build(builderContext);
return (ObjectMapper) builder.build(builderContext);
}
public void testEmptyMappingUpdate() throws Exception {
@ -374,8 +382,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
Mapper xMapper = mapping.root().getMapper("x");
assertNotNull(xMapper);
assertTrue(xMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)xMapper).getMapper("foo"));
assertNull(((ObjectMapper)xMapper).getMapper("subx"));
assertNotNull(((ObjectMapper) xMapper).getMapper("foo"));
assertNull(((ObjectMapper) xMapper).getMapper("subx"));
}
public void testMultipleSubfieldMappingUpdate() throws Exception {
@ -388,9 +396,9 @@ public class DocumentParserTests extends MapperServiceTestCase {
Mapper xMapper = mapping.root().getMapper("x");
assertNotNull(xMapper);
assertTrue(xMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)xMapper).getMapper("foo"));
assertNotNull(((ObjectMapper)xMapper).getMapper("bar"));
assertNull(((ObjectMapper)xMapper).getMapper("subx"));
assertNotNull(((ObjectMapper) xMapper).getMapper("foo"));
assertNotNull(((ObjectMapper) xMapper).getMapper("bar"));
assertNull(((ObjectMapper) xMapper).getMapper("subx"));
}
public void testDeepSubfieldMappingUpdate() throws Exception {
@ -401,10 +409,10 @@ public class DocumentParserTests extends MapperServiceTestCase {
Mapper xMapper = mapping.root().getMapper("x");
assertNotNull(xMapper);
assertTrue(xMapper instanceof ObjectMapper);
Mapper subxMapper = ((ObjectMapper)xMapper).getMapper("subx");
Mapper subxMapper = ((ObjectMapper) xMapper).getMapper("subx");
assertTrue(subxMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)subxMapper).getMapper("foo"));
assertNull(((ObjectMapper)subxMapper).getMapper("subsubx"));
assertNotNull(((ObjectMapper) subxMapper).getMapper("foo"));
assertNull(((ObjectMapper) subxMapper).getMapper("subsubx"));
}
public void testDeepSubfieldAfterSubfieldMappingUpdate() throws Exception {
@ -417,10 +425,10 @@ public class DocumentParserTests extends MapperServiceTestCase {
Mapper xMapper = mapping.root().getMapper("x");
assertNotNull(xMapper);
assertTrue(xMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)xMapper).getMapper("a"));
Mapper subxMapper = ((ObjectMapper)xMapper).getMapper("subx");
assertNotNull(((ObjectMapper) xMapper).getMapper("a"));
Mapper subxMapper = ((ObjectMapper) xMapper).getMapper("subx");
assertTrue(subxMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)subxMapper).getMapper("b"));
assertNotNull(((ObjectMapper) subxMapper).getMapper("b"));
}
public void testObjectMappingUpdate() throws Exception {
@ -436,10 +444,10 @@ public class DocumentParserTests extends MapperServiceTestCase {
Mapper fooMapper = mapping.root().getMapper("foo");
assertNotNull(fooMapper);
assertTrue(fooMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)fooMapper).getMapper("field"));
Mapper barMapper = ((ObjectMapper)fooMapper).getMapper("bar");
assertNotNull(((ObjectMapper) fooMapper).getMapper("field"));
Mapper barMapper = ((ObjectMapper) fooMapper).getMapper("bar");
assertTrue(barMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)barMapper).getMapper("baz"));
assertNotNull(((ObjectMapper) barMapper).getMapper("baz"));
}
public void testDynamicGeoPointArrayWithTemplate() throws Exception {
@ -499,7 +507,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
}
public void testDynamicLongArray() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = mapper.parse(source(b -> b.startArray("foo").value(0).value(1).endArray()));
assertEquals(4, doc.rootDoc().getFields("foo").length);
}
@ -513,7 +522,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testDynamicStrictLongArray() throws Exception {
DocumentMapper mapper = createDocumentMapper(topMapping(b -> b.field("dynamic", "strict")));
StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class,
() -> mapper.parse(source(b -> b.startArray("foo").value(0).value(1).endArray())));
() -> mapper.parse(source(b -> b.startArray("foo").value(0).value(1).endArray())));
assertEquals("mapping set to strict, dynamic introduction of [foo] within [_doc] is not allowed", exception.getMessage());
}
@ -579,7 +588,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testDynamicStrictObject() throws Exception {
DocumentMapper mapper = createDocumentMapper(topMapping(b -> b.field("dynamic", "strict")));
StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class,
() -> mapper.parse(source(b -> b.startObject("foo").field("bar", "baz").endObject())));
() -> mapper.parse(source(b -> b.startObject("foo").field("bar", "baz").endObject())));
assertEquals("mapping set to strict, dynamic introduction of [foo] within [_doc] is not allowed", exception.getMessage());
}
@ -592,7 +601,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testDynamicStrictValue() throws Exception {
DocumentMapper mapper = createDocumentMapper(topMapping(b -> b.field("dynamic", "strict")));
StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class,
() -> mapper.parse(source(b -> b.field("bar", "baz"))));
() -> mapper.parse(source(b -> b.field("bar", "baz"))));
assertEquals("mapping set to strict, dynamic introduction of [bar] within [_doc] is not allowed", exception.getMessage());
}
@ -605,7 +614,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testDynamicStrictNull() throws Exception {
DocumentMapper mapper = createDocumentMapper(topMapping(b -> b.field("dynamic", "strict")));
StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class,
() -> mapper.parse(source(b -> b.nullField("bar"))));
() -> mapper.parse(source(b -> b.nullField("bar"))));
assertEquals("mapping set to strict, dynamic introduction of [bar] within [_doc] is not allowed", exception.getMessage());
}
@ -669,7 +678,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
}
public void testDynamicDottedFieldNameLongArray() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = mapper.parse(source(b -> b.startArray("foo.bar.baz").value(0).value(1).endArray()));
assertEquals(4, doc.rootDoc().getFields("foo.bar.baz").length);
@ -734,9 +744,9 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testDynamicDottedFieldNameLongArrayWithExistingParentWrongType() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "long")));
MapperParsingException exception = expectThrows(MapperParsingException.class,
() -> mapper.parse(source(b -> b.startArray("field.bar.baz").value(0).value(1).endArray())));
() -> mapper.parse(source(b -> b.startArray("field.bar.baz").value(0).value(1).endArray())));
assertEquals("Could not dynamically add mapping for field [field.bar.baz]. "
+ "Existing mapping for [field] must be of type object but found [long].", exception.getMessage());
+ "Existing mapping for [field] must be of type object but found [long].", exception.getMessage());
}
public void testDynamicFalseDottedFieldNameLongArray() throws Exception {
@ -748,12 +758,13 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testDynamicStrictDottedFieldNameLongArray() throws Exception {
DocumentMapper mapper = createDocumentMapper(topMapping(b -> b.field("dynamic", "strict")));
StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class,
() -> mapper.parse(source(b -> b.startArray("foo.bar.baz").value(0).value(1).endArray())));
() -> mapper.parse(source(b -> b.startArray("foo.bar.baz").value(0).value(1).endArray())));
assertEquals("mapping set to strict, dynamic introduction of [foo] within [_doc] is not allowed", exception.getMessage());
}
public void testDynamicDottedFieldNameLong() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = mapper.parse(source(b -> b.field("foo.bar.baz", 0)));
assertEquals(2, doc.rootDoc().getFields("foo.bar.baz").length);
Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo");
@ -816,9 +827,9 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testDynamicDottedFieldNameLongWithExistingParentWrongType() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "long")));
MapperParsingException exception = expectThrows(MapperParsingException.class,
() -> mapper.parse(source(b -> b.field("field.bar.baz", 0))));
() -> mapper.parse(source(b -> b.field("field.bar.baz", 0))));
assertEquals("Could not dynamically add mapping for field [field.bar.baz]. "
+ "Existing mapping for [field] must be of type object but found [long].", exception.getMessage());
+ "Existing mapping for [field] must be of type object but found [long].", exception.getMessage());
}
public void testDynamicFalseDottedFieldNameLong() throws Exception {
@ -830,12 +841,13 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testDynamicStrictDottedFieldNameLong() throws Exception {
DocumentMapper mapper = createDocumentMapper(topMapping(b -> b.field("dynamic", "strict")));
StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class,
() -> mapper.parse(source(b -> b.field("foo.bar.baz", 0))));
() -> mapper.parse(source(b -> b.field("foo.bar.baz", 0))));
assertEquals("mapping set to strict, dynamic introduction of [foo] within [_doc] is not allowed", exception.getMessage());
}
public void testDynamicDottedFieldNameObject() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = mapper.parse(source(b -> b.startObject("foo.bar.baz").field("a", 0).endObject()));
assertEquals(2, doc.rootDoc().getFields("foo.bar.baz.a").length);
Mapper fooMapper = doc.dynamicMappingsUpdate().root().getMapper("foo");
@ -908,9 +920,9 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testDynamicDottedFieldNameObjectWithExistingParentWrongType() throws Exception {
DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "long")));
MapperParsingException exception = expectThrows(MapperParsingException.class,
() -> mapper.parse(source(b -> b.startObject("field.bar.baz").field("a", 0).endObject())));
() -> mapper.parse(source(b -> b.startObject("field.bar.baz").field("a", 0).endObject())));
assertEquals("Could not dynamically add mapping for field [field.bar.baz]. "
+ "Existing mapping for [field] must be of type object but found [long].", exception.getMessage());
+ "Existing mapping for [field] must be of type object but found [long].", exception.getMessage());
}
public void testDynamicFalseDottedFieldNameObject() throws Exception {
@ -922,20 +934,46 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testDynamicStrictDottedFieldNameObject() throws Exception {
DocumentMapper mapper = createDocumentMapper(topMapping(b -> b.field("dynamic", "strict")));
StrictDynamicMappingException exception = expectThrows(StrictDynamicMappingException.class,
() -> mapper.parse(source(b -> b.startObject("foo.bar.baz").field("a", 0).endObject())));
() -> mapper.parse(source(b -> b.startObject("foo.bar.baz").field("a", 0).endObject())));
assertEquals("mapping set to strict, dynamic introduction of [foo] within [_doc] is not allowed", exception.getMessage());
}
public void testDocumentContainsMetadataField() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapper.parse(source(b -> b.field("_field_names", 0))));
assertTrue(e.getMessage(),
e.getMessage().contains("Field [_field_names] is a metadata field and cannot be added inside a document."));
assertTrue(e.getCause().getMessage(),
e.getCause().getMessage().contains("Field [_field_names] is a metadata field and cannot be added inside a document."));
mapper.parse(source(b -> b.field("foo._field_names", 0))); // parses without error
}
public void testDocumentContainsAllowedMetadataField() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
{
// A metadata field that parses a value fails to parse a null value
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapper.parse(source(b -> b.nullField(MockMetadataMapperPlugin.MockMetadataMapper.CONTENT_TYPE))));
assertTrue(e.getMessage(), e.getMessage().contains("failed to parse field [_mock_metadata]"));
}
{
// A metadata field that parses a value fails to parse an object
MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapper.parse(source(b -> b.field(MockMetadataMapperPlugin.MockMetadataMapper.CONTENT_TYPE)
.startObject().field("sub-field", "true").endObject())));
assertTrue(e.getMessage(), e.getMessage().contains("failed to parse field [_mock_metadata]"));
}
{
ParsedDocument doc = mapper.parse(source(b ->
b.field(MockMetadataMapperPlugin.MockMetadataMapper.CONTENT_TYPE, "mock-metadata-field-value")
));
IndexableField field = doc.rootDoc().getField(MockMetadataMapperPlugin.MockMetadataMapper.CONTENT_TYPE);
assertEquals("mock-metadata-field-value", field.stringValue());
}
}
public void testSimpleMapper() throws Exception {
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
b.startObject("name");
@ -970,7 +1008,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testParseToJsonAndParse() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/simple/test-mapping.json");
MapperService mapperService = createMapperService(mapping(b -> {}));
MapperService mapperService = createMapperService(mapping(b -> {
}));
merge("person", mapperService, mapping);
String builtMapping = mapperService.documentMapper().mappingSource().string();
// reparse it
@ -1015,7 +1054,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
}
public void testNoDocumentSent() throws Exception {
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
}));
BytesReference json = new BytesArray("".getBytes(StandardCharsets.UTF_8));
MapperParsingException e = expectThrows(MapperParsingException.class,
() -> docMapper.parse(new SourceToParse("test", "_doc", "1", json, XContentType.JSON)));
@ -1023,7 +1063,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
}
public void testNoLevel() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> {
b.field("test1", "value1");
b.field("test2", "value2");
@ -1038,7 +1079,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
// we no longer have types?
public void testTypeLevel() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> {
b.startObject("type");
@ -1056,7 +1098,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
}
public void testNoLevelWithFieldTypeAsValue() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> {
b.field("type", "value_type");
@ -1073,7 +1116,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testTypeLevelWithFieldTypeAsValue() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> {
b.startObject("type");
@ -1093,7 +1137,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
}
public void testNoLevelWithFieldTypeAsObject() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> {
b.startObject("type").field("type_field", "type_value").endObject();
@ -1109,7 +1154,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
}
public void testTypeLevelWithFieldTypeAsObject() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> {
b.startObject("type");
@ -1129,7 +1175,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
}
public void testNoLevelWithFieldTypeAsValueNotFirst() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> {
b.startObject("type");
@ -1149,7 +1196,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
}
public void testTypeLevelWithFieldTypeAsValueNotFirst() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> {
b.startObject("type");
@ -1169,7 +1217,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
}
public void testNoLevelWithFieldTypeAsObjectNotFirst() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> {
b.field("test1", "value1");
@ -1186,7 +1235,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
}
public void testTypeLevelWithFieldTypeAsObjectNotFirst() throws Exception {
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper defaultMapper = createDocumentMapper(mapping(b -> {
}));
ParsedDocument doc = defaultMapper.parse(source(b -> {
b.startObject("type");
@ -1231,7 +1281,8 @@ public class DocumentParserTests extends MapperServiceTestCase {
}
public void testDynamicFieldsStartingAndEndingWithDot() throws Exception {
MapperService mapperService = createMapperService(mapping(b -> {}));
MapperService mapperService = createMapperService(mapping(b -> {
}));
merge(mapperService, dynamicMapping(mapperService.documentMapper().parse(source(b -> {
b.startArray("top.");
{
@ -1250,64 +1301,66 @@ public class DocumentParserTests extends MapperServiceTestCase {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> mapperService.documentMapper().parse(source(b -> {
b.startArray("top.");
{
b.startObject();
b.startArray("top.");
{
b.startArray("foo.");
b.startObject();
{
b.startObject();
b.startArray("foo.");
{
b.startObject("bar.");
b.startObject();
{
b.startObject("aoeu").field("a", 1).field("b", 2).endObject();
b.startObject("bar.");
{
b.startObject("aoeu").field("a", 1).field("b", 2).endObject();
}
b.endObject();
}
b.endObject();
}
b.endObject();
b.endArray();
}
b.endArray();
b.endObject();
}
b.endObject();
}
b.endArray();
})));
b.endArray();
})));
assertThat(e.getMessage(),
containsString("object field starting or ending with a [.] makes object resolution ambiguous: [top..foo..bar]"));
containsString("object field starting or ending with a [.] makes object resolution ambiguous: [top..foo..bar]"));
}
public void testDynamicFieldsEmptyName() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
IllegalArgumentException emptyFieldNameException = expectThrows(IllegalArgumentException.class,
() -> mapper.parse(source(b -> {
b.startArray("top.");
() -> mapper.parse(source(b -> {
b.startArray("top.");
{
b.startObject();
{
b.startObject();
{
b.startObject("aoeu").field("a", 1).field(" ", 2).endObject();
}
b.endObject();
b.startObject("aoeu").field("a", 1).field(" ", 2).endObject();
}
b.endArray();
})));
b.endObject();
}
b.endArray();
})));
assertThat(emptyFieldNameException.getMessage(), containsString(
"object field cannot contain only whitespace: ['top.aoeu. ']"));
"object field cannot contain only whitespace: ['top.aoeu. ']"));
}
public void testBlankFieldNames() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {}));
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
}));
MapperParsingException err = expectThrows(MapperParsingException.class, () ->
mapper.parse(source(b -> b.field("", "foo"))));
mapper.parse(source(b -> b.field("", "foo"))));
assertThat(err.getCause(), notNullValue());
assertThat(err.getCause().getMessage(), containsString("field name cannot be an empty string"));
err = expectThrows(MapperParsingException.class, () ->
mapper.parse(source(b -> b.startObject("foo").field("", "bar").endObject())));
mapper.parse(source(b -> b.startObject("foo").field("", "bar").endObject())));
assertThat(err.getCause(), notNullValue());
assertThat(err.getCause().getMessage(), containsString("field name cannot be an empty string"));
}
@ -1329,7 +1382,7 @@ public class DocumentParserTests extends MapperServiceTestCase {
assertEquals("Cannot write to a field alias [alias-field].", exception.getCause().getMessage());
}
public void testCopyToFieldAlias() throws Exception {
public void testCopyToFieldAlias() throws Exception {
DocumentMapper mapper = createDocumentMapper(mapping(b -> {
b.startObject("alias-field");
{
@ -1372,9 +1425,9 @@ public class DocumentParserTests extends MapperServiceTestCase {
public void testTypeless() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject().startObject("type").startObject("properties")
.startObject("foo").field("type", "keyword").endObject()
.endObject().endObject().endObject());
.startObject().startObject("type").startObject("properties")
.startObject("foo").field("type", "keyword").endObject()
.endObject().endObject().endObject());
DocumentMapper mapper = createDocumentMapper("type", mapping);
ParsedDocument doc = mapper.parse(source(b -> b.field("foo", "1234")));

View File

@ -37,11 +37,6 @@ public class ExternalMetadataMapper extends MetadataFieldMapper {
super(new BooleanFieldMapper.BooleanFieldType(FIELD_NAME));
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
// handled in post parse
}
@Override
public Iterator<Mapper> iterator() {
return Collections.emptyIterator();
@ -52,10 +47,6 @@ public class ExternalMetadataMapper extends MetadataFieldMapper {
return CONTENT_TYPE;
}
@Override
public void preParse(ParseContext context) throws IOException {
}
@Override
public void postParse(ParseContext context) throws IOException {
context.doc().add(new StringField(FIELD_NAME, FIELD_VALUE, Store.YES));

View File

@ -59,7 +59,8 @@ public class IdFieldMapperTests extends ESSingleNodeTestCase {
.startObject().field("_id", "1").endObject()), XContentType.JSON));
fail("Expected failure to parse metadata field");
} catch (MapperParsingException e) {
assertTrue(e.getMessage(), e.getMessage().contains("Field [_id] is a metadata field and cannot be added inside a document"));
assertTrue(e.getCause().getMessage(),
e.getCause().getMessage().contains("Field [_id] is a metadata field and cannot be added inside a document"));
}
}

View File

@ -0,0 +1,98 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;
import java.io.IOException;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
* Mapper plugin providing a mock metadata field mapper implementation that supports setting its value
* through the document source.
*/
public class MockMetadataMapperPlugin extends Plugin implements MapperPlugin {
/**
* A mock metadata field mapper that supports being set from the document source.
*/
public static class MockMetadataMapper extends MetadataFieldMapper {
static final String CONTENT_TYPE = "_mock_metadata";
static final String FIELD_NAME = "_mock_metadata";
protected MockMetadataMapper() {
super(new KeywordFieldMapper.KeywordFieldType(FIELD_NAME));
}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
if (context.parser().currentToken() == XContentParser.Token.VALUE_STRING) {
context.doc().add(new StringField(FIELD_NAME, context.parser().text(), Field.Store.YES));
} else {
throw new IllegalArgumentException("Field [" + fieldType().name() + "] must be a string.");
}
}
@Override
public Iterator<Mapper> iterator() {
return Collections.emptyIterator();
}
@Override
protected String contentType() {
return CONTENT_TYPE;
}
public static class Builder extends MetadataFieldMapper.Builder {
protected Builder() {
super(FIELD_NAME);
}
@Override
protected List<Parameter<?>> getParameters() {
return Collections.emptyList();
}
@Override
public MockMetadataMapper build(BuilderContext context) {
return new MockMetadataMapper();
}
}
public static final TypeParser PARSER = new ConfigurableTypeParser(
c -> new MockMetadataMapper(),
c -> new MockMetadataMapper.Builder()) {
};
}
@Override
public Map<String, MetadataFieldMapper.TypeParser> getMetadataMappers() {
return Collections.singletonMap(MockMetadataMapper.CONTENT_TYPE, MockMetadataMapper.PARSER);
}
}

View File

@ -58,7 +58,7 @@ public class RoutingFieldMapperTests extends ESSingleNodeTestCase {
.startObject().field("_routing", "foo").endObject()),XContentType.JSON));
fail("Expected failure to parse metadata field");
} catch (MapperParsingException e) {
assertThat(e.getMessage(), e.getMessage(),
assertThat(e.getCause().getMessage(), e.getCause().getMessage(),
containsString("Field [_routing] is a metadata field and cannot be added inside a document"));
}
}

View File

@ -172,15 +172,6 @@ public class DataStreamTimestampFieldMapper extends MetadataFieldMapper {
}
}
@Override
public void preParse(ParseContext context) throws IOException {}
@Override
protected void parseCreateField(ParseContext context) throws IOException {
// Meta field doesn't create any fields, so this shouldn't happen.
throw new IllegalStateException(NAME + " field mapper cannot create fields");
}
@Override
public void postParse(ParseContext context) throws IOException {
if (enabled == false) {