Mapping: Moved dynamic field handling in doc parsing to end of parsing

Currently dynamic mappings propgate through call semantics, where deeper
dynamic mappings are merged into higher level mappings through
return values of recursive method calls. This makese it tricky
to handle multiple updates in the same method, for example when
trying to create parent object mappers dynamically for a field name
that contains dots.

This change makes the api for adding mappers a simple list
of new mappers, and moves construction of the root level mapping
update to the end of doc parsing.
This commit is contained in:
Ryan Ernst 2016-02-23 07:22:33 -08:00
parent a9eb668497
commit 551e6bd66f
8 changed files with 411 additions and 216 deletions

View File

@ -19,12 +19,21 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.CloseableThreadLocal; import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
@ -48,15 +57,8 @@ import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper; import org.elasticsearch.index.mapper.object.RootObjectMapper;
import java.io.Closeable;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/** A parser for documents, given mappings from a DocumentMapper */ /** A parser for documents, given mappings from a DocumentMapper */
class DocumentParser implements Closeable { final class DocumentParser implements Closeable {
private CloseableThreadLocal<ParseContext.InternalParseContext> cache = new CloseableThreadLocal<ParseContext.InternalParseContext>() { private CloseableThreadLocal<ParseContext.InternalParseContext> cache = new CloseableThreadLocal<ParseContext.InternalParseContext>() {
@Override @Override
@ -120,10 +122,7 @@ class DocumentParser implements Closeable {
// entire type is disabled // entire type is disabled
parser.skipChildren(); parser.skipChildren();
} else if (emptyDoc == false) { } else if (emptyDoc == false) {
Mapper update = parseObject(context, mapping.root, true); parseObjectOrNested(context, mapping.root, true);
if (update != null) {
context.addDynamicMappingsUpdate(update);
}
} }
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) { for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
@ -178,12 +177,7 @@ class DocumentParser implements Closeable {
} }
} }
Mapper rootDynamicUpdate = context.dynamicMappingsUpdate(); Mapping update = createDynamicUpdate(mapping, docMapper, context.getDynamicMappers());
Mapping update = null;
if (rootDynamicUpdate != null) {
update = mapping.mappingUpdate(rootDynamicUpdate);
}
ParsedDocument doc = new ParsedDocument(context.uid(), context.version(), context.id(), context.type(), source.routing(), source.timestamp(), source.ttl(), context.docs(), ParsedDocument doc = new ParsedDocument(context.uid(), context.version(), context.id(), context.type(), source.routing(), source.timestamp(), source.ttl(), context.docs(),
context.source(), update).parent(source.parent()); context.source(), update).parent(source.parent());
// reset the context to free up memory // reset the context to free up memory
@ -191,10 +185,89 @@ class DocumentParser implements Closeable {
return doc; return doc;
} }
static ObjectMapper parseObject(ParseContext context, ObjectMapper mapper, boolean atRoot) throws IOException { /** Creates a Mapping containing any dynamically added fields, or returns null if there were no dynamic mappings. */
static Mapping createDynamicUpdate(Mapping mapping, DocumentMapper docMapper, List<Mapper> dynamicMappers) {
if (dynamicMappers.isEmpty()) {
return null;
}
// We build a mapping by first sorting the mappers, so that all mappers containing a common prefix
// will be processed in a contiguous block. When the prefix is no longer seen, we pop the extra elements
// off the stack, merging them upwards into the existing mappers.
Collections.sort(dynamicMappers, (Mapper o1, Mapper o2) -> o1.name().compareTo(o2.name()));
List<ObjectMapper> parentMappers = new ArrayList<>();
// create an empty root object which updates will be propagated into
RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder(docMapper.type());
RootObjectMapper.BuilderContext context = new RootObjectMapper.BuilderContext(Settings.EMPTY, new ContentPath());
parentMappers.add(rootBuilder.build(context));
Mapper previousMapper = null;
for (Mapper newMapper : dynamicMappers) {
if (previousMapper != null && newMapper.name().equals(previousMapper.name())) {
// We can see the same mapper more than once, for example, if we had foo.bar and foo.baz, where
// foo did not yet exist. This will create 2 copies in dynamic mappings, which should be identical.
// Here we just skip over the duplicates, but we merge them to ensure there are no conflicts.
newMapper.merge(previousMapper, false);
continue;
}
previousMapper = newMapper;
String[] nameParts = newMapper.name().split("\\.");
// find common elements with the previously processed dynamic mapper
int keepBefore = 1;
while (keepBefore < parentMappers.size() &&
parentMappers.get(keepBefore).simpleName().equals(nameParts[keepBefore - 1])) {
++keepBefore;
}
popMappers(parentMappers, keepBefore);
// Add parent mappers that don't exist in dynamic mappers
while (keepBefore < nameParts.length) {
ObjectMapper parent = parentMappers.get(parentMappers.size() - 1);
Mapper newLast = parent.getMapper(nameParts[keepBefore - 1]);
if (newLast == null) {
String objectName = nameParts[keepBefore - 1];
if (keepBefore > 1) {
// only prefix with parent mapper if the parent mapper isn't the root (which has a fake name)
objectName = parent.name() + '.' + objectName;
}
newLast = docMapper.objectMappers().get(objectName);
}
assert newLast instanceof ObjectMapper;
parentMappers.add((ObjectMapper)newLast);
++keepBefore;
}
if (newMapper instanceof ObjectMapper) {
parentMappers.add((ObjectMapper)newMapper);
} else {
addToLastMapper(parentMappers, newMapper);
}
}
popMappers(parentMappers, 1);
assert parentMappers.size() == 1;
return mapping.mappingUpdate(parentMappers.get(0));
}
private static void popMappers(List<ObjectMapper> parentMappers, int keepBefore) {
assert keepBefore >= 1; // never remove the root mapper
// pop off parent mappers not needed by the current mapper,
// merging them backwards since they are immutable
for (int i = parentMappers.size() - 1; i >= keepBefore; --i) {
addToLastMapper(parentMappers, parentMappers.remove(i));
}
}
private static void addToLastMapper(List<ObjectMapper> parentMappers, Mapper mapper) {
assert parentMappers.size() >= 1;
int lastIndex = parentMappers.size() - 1;
ObjectMapper withNewMapper = parentMappers.get(lastIndex).mappingUpdate(mapper);
ObjectMapper merged = parentMappers.get(lastIndex).merge(withNewMapper, false);
parentMappers.set(lastIndex, merged);
}
static void parseObjectOrNested(ParseContext context, ObjectMapper mapper, boolean atRoot) throws IOException {
if (mapper.isEnabled() == false) { if (mapper.isEnabled() == false) {
context.parser().skipChildren(); context.parser().skipChildren();
return null; return;
} }
XContentParser parser = context.parser(); XContentParser parser = context.parser();
@ -205,7 +278,7 @@ class DocumentParser implements Closeable {
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.VALUE_NULL) { if (token == XContentParser.Token.VALUE_NULL) {
// the object is null ("obj1" : null), simply bail // the object is null ("obj1" : null), simply bail
return null; return;
} }
if (token.isValue()) { if (token.isValue()) {
@ -245,9 +318,9 @@ class DocumentParser implements Closeable {
while (token != XContentParser.Token.END_OBJECT) { while (token != XContentParser.Token.END_OBJECT) {
ObjectMapper newUpdate = null; ObjectMapper newUpdate = null;
if (token == XContentParser.Token.START_OBJECT) { if (token == XContentParser.Token.START_OBJECT) {
newUpdate = parseObject(context, mapper, currentFieldName); parseObject(context, mapper, currentFieldName);
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
newUpdate = parseArray(context, mapper, currentFieldName); parseArray(context, mapper, currentFieldName);
} else if (token == XContentParser.Token.FIELD_NAME) { } else if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_NULL) {
@ -255,7 +328,7 @@ class DocumentParser implements Closeable {
} else if (token == null) { } else if (token == null) {
throw new MapperParsingException("object mapping for [" + mapper.name() + "] tried to parse field [" + currentFieldName + "] as object, but got EOF, has a concrete value been provided to it?"); throw new MapperParsingException("object mapping for [" + mapper.name() + "] tried to parse field [" + currentFieldName + "] as object, but got EOF, has a concrete value been provided to it?");
} else if (token.isValue()) { } else if (token.isValue()) {
newUpdate = parseValue(context, mapper, currentFieldName, token); parseValue(context, mapper, currentFieldName, token);
} }
token = parser.nextToken(); token = parser.nextToken();
if (newUpdate != null) { if (newUpdate != null) {
@ -293,36 +366,31 @@ class DocumentParser implements Closeable {
} }
} }
} }
return update;
} }
private static Mapper parseObjectOrField(ParseContext context, Mapper mapper) throws IOException { private static void parseObjectOrField(ParseContext context, Mapper mapper) throws IOException {
if (mapper instanceof ObjectMapper) { if (mapper instanceof ObjectMapper) {
return parseObject(context, (ObjectMapper) mapper, false); parseObjectOrNested(context, (ObjectMapper) mapper, false);
} else { } else {
FieldMapper fieldMapper = (FieldMapper)mapper; FieldMapper fieldMapper = (FieldMapper)mapper;
Mapper update = fieldMapper.parse(context); Mapper update = fieldMapper.parse(context);
if (update != null) {
context.addDynamicMapper(update);
}
if (fieldMapper.copyTo() != null) { if (fieldMapper.copyTo() != null) {
parseCopyFields(context, fieldMapper, fieldMapper.copyTo().copyToFields()); parseCopyFields(context, fieldMapper, fieldMapper.copyTo().copyToFields());
} }
return update;
} }
} }
private static ObjectMapper parseObject(final ParseContext context, ObjectMapper mapper, String currentFieldName) throws IOException { private static ObjectMapper parseObject(final ParseContext context, ObjectMapper mapper, String currentFieldName) throws IOException {
if (currentFieldName == null) { assert currentFieldName != null;
throw new MapperParsingException("object mapping [" + mapper.name() + "] trying to serialize an object with no field associated with it, current value [" + context.parser().textOrNull() + "]");
}
context.path().add(currentFieldName); context.path().add(currentFieldName);
ObjectMapper update = null; ObjectMapper update = null;
Mapper objectMapper = mapper.getMapper(currentFieldName); Mapper objectMapper = mapper.getMapper(currentFieldName);
if (objectMapper != null) { if (objectMapper != null) {
final Mapper subUpdate = parseObjectOrField(context, objectMapper); parseObjectOrField(context, objectMapper);
if (subUpdate != null) {
// propagate mapping update
update = mapper.mappingUpdate(subUpdate);
}
} else { } else {
ObjectMapper.Dynamic dynamic = mapper.dynamic(); ObjectMapper.Dynamic dynamic = mapper.dynamic();
if (dynamic == null) { if (dynamic == null) {
@ -343,8 +411,9 @@ class DocumentParser implements Closeable {
} }
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
objectMapper = builder.build(builderContext); objectMapper = builder.build(builderContext);
context.addDynamicMapper(objectMapper);
context.path().add(currentFieldName); context.path().add(currentFieldName);
update = mapper.mappingUpdate(parseAndMergeUpdate(objectMapper, context)); parseObjectOrField(context, objectMapper);
} else { } else {
// not dynamic, read everything up to end object // not dynamic, read everything up to end object
context.parser().skipChildren(); context.parser().skipChildren();
@ -355,7 +424,7 @@ class DocumentParser implements Closeable {
return update; return update;
} }
private static ObjectMapper parseArray(ParseContext context, ObjectMapper parentMapper, String lastFieldName) throws IOException { private static void parseArray(ParseContext context, ObjectMapper parentMapper, String lastFieldName) throws IOException {
String arrayFieldName = lastFieldName; String arrayFieldName = lastFieldName;
Mapper mapper = parentMapper.getMapper(lastFieldName); Mapper mapper = parentMapper.getMapper(lastFieldName);
if (mapper != null) { if (mapper != null) {
@ -363,15 +432,9 @@ class DocumentParser implements Closeable {
// expects an array, if so we pass the context straight to the mapper and if not // expects an array, if so we pass the context straight to the mapper and if not
// we serialize the array components // we serialize the array components
if (mapper instanceof ArrayValueMapperParser) { if (mapper instanceof ArrayValueMapperParser) {
final Mapper subUpdate = parseObjectOrField(context, mapper); parseObjectOrField(context, mapper);
if (subUpdate != null) {
// propagate the mapping update
return parentMapper.mappingUpdate(subUpdate);
} else {
return null;
}
} else { } else {
return parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName);
} }
} else { } else {
@ -384,31 +447,35 @@ class DocumentParser implements Closeable {
} else if (dynamic == ObjectMapper.Dynamic.TRUE) { } else if (dynamic == ObjectMapper.Dynamic.TRUE) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, arrayFieldName, "object"); Mapper.Builder builder = context.root().findTemplateBuilder(context, arrayFieldName, "object");
if (builder == null) { if (builder == null) {
return parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); // TODO: shouldn't this create a default object mapper builder?
parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName);
return;
} }
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
mapper = builder.build(builderContext); mapper = builder.build(builderContext);
if (mapper != null && mapper instanceof ArrayValueMapperParser) { assert mapper != null;
if (mapper instanceof ArrayValueMapperParser) {
context.addDynamicMapper(mapper);
context.path().add(arrayFieldName); context.path().add(arrayFieldName);
mapper = parseAndMergeUpdate(mapper, context); parseObjectOrField(context, mapper);
return parentMapper.mappingUpdate(mapper);
} else { } else {
return parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName);
} }
} else { } else {
return parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); // TODO: shouldn't this skip, not parse?
parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName);
} }
} }
} }
private static ObjectMapper parseNonDynamicArray(ParseContext context, ObjectMapper mapper, String lastFieldName, String arrayFieldName) throws IOException { private static void parseNonDynamicArray(ParseContext context, ObjectMapper mapper, String lastFieldName, String arrayFieldName) throws IOException {
XContentParser parser = context.parser(); XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) { if (token == XContentParser.Token.START_OBJECT) {
return parseObject(context, mapper, lastFieldName); parseObject(context, mapper, lastFieldName);
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
return parseArray(context, mapper, lastFieldName); parseArray(context, mapper, lastFieldName);
} else if (token == XContentParser.Token.FIELD_NAME) { } else if (token == XContentParser.Token.FIELD_NAME) {
lastFieldName = parser.currentName(); lastFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_NULL) {
@ -416,25 +483,20 @@ class DocumentParser implements Closeable {
} else if (token == null) { } else if (token == null) {
throw new MapperParsingException("object mapping for [" + mapper.name() + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?"); throw new MapperParsingException("object mapping for [" + mapper.name() + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?");
} else { } else {
return parseValue(context, mapper, lastFieldName, token); parseValue(context, mapper, lastFieldName, token);
} }
} }
return null;
} }
private static ObjectMapper parseValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException { private static void parseValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException {
if (currentFieldName == null) { if (currentFieldName == null) {
throw new MapperParsingException("object mapping [" + parentMapper.name() + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]"); throw new MapperParsingException("object mapping [" + parentMapper.name() + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]");
} }
Mapper mapper = parentMapper.getMapper(currentFieldName); Mapper mapper = parentMapper.getMapper(currentFieldName);
if (mapper != null) { if (mapper != null) {
Mapper subUpdate = parseObjectOrField(context, mapper); parseObjectOrField(context, mapper);
if (subUpdate == null) {
return null;
}
return parentMapper.mappingUpdate(subUpdate);
} else { } else {
return parseDynamicValue(context, parentMapper, currentFieldName, token); parseDynamicValue(context, parentMapper, currentFieldName, token);
} }
} }
@ -602,7 +664,7 @@ class DocumentParser implements Closeable {
throw new IllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]"); throw new IllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]");
} }
private static ObjectMapper parseDynamicValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException { private static void parseDynamicValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException {
ObjectMapper.Dynamic dynamic = parentMapper.dynamic(); ObjectMapper.Dynamic dynamic = parentMapper.dynamic();
if (dynamic == null) { if (dynamic == null) {
dynamic = dynamicOrDefault(context.root().dynamic()); dynamic = dynamicOrDefault(context.root().dynamic());
@ -611,7 +673,7 @@ class DocumentParser implements Closeable {
throw new StrictDynamicMappingException(parentMapper.fullPath(), currentFieldName); throw new StrictDynamicMappingException(parentMapper.fullPath(), currentFieldName);
} }
if (dynamic == ObjectMapper.Dynamic.FALSE) { if (dynamic == ObjectMapper.Dynamic.FALSE) {
return null; return;
} }
final String path = context.path().pathAsText(currentFieldName); final String path = context.path().pathAsText(currentFieldName);
final Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); final Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
@ -629,14 +691,9 @@ class DocumentParser implements Closeable {
// try to not introduce a conflict // try to not introduce a conflict
mapper = mapper.updateFieldType(Collections.singletonMap(path, existingFieldType)); mapper = mapper.updateFieldType(Collections.singletonMap(path, existingFieldType));
} }
context.addDynamicMapper(mapper);
mapper = parseAndMergeUpdate(mapper, context); parseObjectOrField(context, mapper);
ObjectMapper update = null;
if (mapper != null) {
update = parentMapper.mappingUpdate(mapper);
}
return update;
} }
/** Creates instances of the fields that the current field should be copied to */ /** Creates instances of the fields that the current field should be copied to */
@ -674,8 +731,9 @@ class DocumentParser implements Closeable {
// The path of the dest field might be completely different from the current one so we need to reset it // The path of the dest field might be completely different from the current one so we need to reset it
context = context.overridePath(new ContentPath(0)); context = context.overridePath(new ContentPath(0));
String[] paths = Strings.splitStringToArray(field, '.'); // TODO: why Strings.splitStringToArray instead of String.split?
String fieldName = paths[paths.length-1]; final String[] paths = Strings.splitStringToArray(field, '.');
final String fieldName = paths[paths.length-1];
ObjectMapper mapper = context.root(); ObjectMapper mapper = context.root();
ObjectMapper[] mappers = new ObjectMapper[paths.length-1]; ObjectMapper[] mappers = new ObjectMapper[paths.length-1];
if (paths.length > 1) { if (paths.length > 1) {
@ -706,6 +764,7 @@ class DocumentParser implements Closeable {
if (mapper.nested() != ObjectMapper.Nested.NO) { if (mapper.nested() != ObjectMapper.Nested.NO) {
throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) + "]) through `copy_to`"); throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) + "]) through `copy_to`");
} }
context.addDynamicMapper(mapper);
break; break;
case FALSE: case FALSE:
// Maybe we should log something to tell the user that the copy_to is ignored in this case. // Maybe we should log something to tell the user that the copy_to is ignored in this case.
@ -720,36 +779,10 @@ class DocumentParser implements Closeable {
parent = mapper; parent = mapper;
} }
} }
ObjectMapper update = parseDynamicValue(context, mapper, fieldName, context.parser().currentToken()); parseDynamicValue(context, mapper, fieldName, context.parser().currentToken());
assert update != null; // we are parsing a dynamic value so we necessarily created a new mapping
if (paths.length > 1) {
for (int i = paths.length - 2; i >= 0; i--) {
ObjectMapper parent = context.root();
if (i > 0) {
parent = mappers[i-1];
}
assert parent != null;
update = parent.mappingUpdate(update);
}
}
context.addDynamicMappingsUpdate(update);
} }
} }
/**
* Parse the given {@code context} with the given {@code mapper} and apply
* the potential mapping update in-place. This method is useful when
* composing mapping updates.
*/
private static <M extends Mapper> M parseAndMergeUpdate(M mapper, ParseContext context) throws IOException {
final Mapper update = parseObjectOrField(context, mapper);
if (update != null) {
mapper = (M) mapper.merge(update, false);
}
return mapper;
}
private static ObjectMapper.Dynamic dynamicOrDefault(ObjectMapper.Dynamic dynamic) { private static ObjectMapper.Dynamic dynamicOrDefault(ObjectMapper.Dynamic dynamic) {
return dynamic == null ? ObjectMapper.Dynamic.TRUE : dynamic; return dynamic == null ? ObjectMapper.Dynamic.TRUE : dynamic;
} }

View File

@ -76,6 +76,7 @@ public abstract class Mapper implements ToXContent, Iterable<Mapper> {
return this.name; return this.name;
} }
/** Returns a newly built mapper. */
public abstract Y build(BuilderContext context); public abstract Y build(BuilderContext context);
} }

View File

@ -337,13 +337,13 @@ public abstract class ParseContext {
} }
@Override @Override
public void addDynamicMappingsUpdate(Mapper update) { public void addDynamicMapper(Mapper update) {
in.addDynamicMappingsUpdate(update); in.addDynamicMapper(update);
} }
@Override @Override
public Mapper dynamicMappingsUpdate() { public List<Mapper> getDynamicMappers() {
return in.dynamicMappingsUpdate(); return in.getDynamicMappers();
} }
} }
@ -377,7 +377,7 @@ public abstract class ParseContext {
private float docBoost = 1.0f; private float docBoost = 1.0f;
private Mapper dynamicMappingsUpdate = null; private List<Mapper> dynamicMappers = new ArrayList<>();
public InternalParseContext(@Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ContentPath path) { public InternalParseContext(@Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ContentPath path) {
this.indexSettings = indexSettings; this.indexSettings = indexSettings;
@ -403,7 +403,7 @@ public abstract class ParseContext {
this.path.reset(); this.path.reset();
this.allEntries = new AllEntries(); this.allEntries = new AllEntries();
this.docBoost = 1.0f; this.docBoost = 1.0f;
this.dynamicMappingsUpdate = null; this.dynamicMappers = new ArrayList<>();
} }
@Override @Override
@ -555,18 +555,13 @@ public abstract class ParseContext {
} }
@Override @Override
public void addDynamicMappingsUpdate(Mapper mapper) { public void addDynamicMapper(Mapper mapper) {
assert mapper instanceof RootObjectMapper : mapper; dynamicMappers.add(mapper);
if (dynamicMappingsUpdate == null) {
dynamicMappingsUpdate = mapper;
} else {
dynamicMappingsUpdate = dynamicMappingsUpdate.merge(mapper, false);
}
} }
@Override @Override
public Mapper dynamicMappingsUpdate() { public List<Mapper> getDynamicMappers() {
return dynamicMappingsUpdate; return dynamicMappers;
} }
} }
@ -770,12 +765,12 @@ public abstract class ParseContext {
public abstract StringBuilder stringBuilder(); public abstract StringBuilder stringBuilder();
/** /**
* Add a dynamic update to the root object mapper. * Add a new mapper dynamically created while parsing.
*/ */
public abstract void addDynamicMappingsUpdate(Mapper update); public abstract void addDynamicMapper(Mapper update);
/** /**
* Get dynamic updates to the root object mapper. * Get dynamic mappers created while parsing.
*/ */
public abstract Mapper dynamicMappingsUpdate(); public abstract List<Mapper> getDynamicMappers();
} }

View File

@ -19,12 +19,20 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
// TODO: make this a real unit test // TODO: make this a real unit test
public class DocumentParserTests extends ESSingleNodeTestCase { public class DocumentParserTests extends ESSingleNodeTestCase {
@ -61,4 +69,120 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
assertNotNull(doc.rootDoc().getField("bar")); assertNotNull(doc.rootDoc().getField("bar"));
assertNotNull(doc.rootDoc().getField(UidFieldMapper.NAME)); assertNotNull(doc.rootDoc().getField(UidFieldMapper.NAME));
} }
public void testDotsAsObject() throws Exception {
DocumentMapperParser mapperParser = createIndex("test").mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").startObject("properties")
.startObject("bar").startObject("properties")
.startObject("baz").field("type", "integer")
.endObject().endObject().endObject().endObject().endObject().endObject().endObject().endObject().string();
DocumentMapper mapper = mapperParser.parse("type", new CompressedXContent(mapping));
BytesReference bytes = XContentFactory.jsonBuilder()
.startObject()
.field("foo.bar.baz", 123)
.startObject("foo")
.field("bar.baz", 456)
.endObject()
.startObject("foo.bar")
.field("baz", 789)
.endObject()
.endObject().bytes();
ParsedDocument doc = mapper.parse("test", "type", "1", bytes);
String[] values = doc.rootDoc().getValues("foo.bar.baz");
assertEquals(3, values.length);
assertEquals("123", values[0]);
assertEquals("456", values[1]);
assertEquals("789", values[2]);
}
DocumentMapper createDummyMapping(MapperService mapperService) throws Exception {
String mapping = jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("a").startObject("properties")
.startObject("b").field("type", "object")
.endObject().endObject().endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping));
return defaultMapper;
}
// creates an object mapper, which is about 100x harder than it should be....
ObjectMapper createObjectMapper(MapperService mapperService, String name) throws Exception {
String[] nameParts = name.split("\\.");
ContentPath path = new ContentPath();
for (int i = 0; i < nameParts.length - 1; ++i) {
path.add(nameParts[i]);
}
ParseContext context = new ParseContext.InternalParseContext(Settings.EMPTY,
mapperService.documentMapperParser(), mapperService.documentMapper("type"), path);
Mapper.Builder builder = new ObjectMapper.Builder(nameParts[nameParts.length - 1]).enabled(true);
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
return (ObjectMapper)builder.build(builderContext);
}
public void testEmptyMappingUpdate() throws Exception {
DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService());
assertNull(DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, Collections.emptyList()));
}
public void testSingleMappingUpdate() throws Exception {
DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService());
List<Mapper> updates = Collections.singletonList(new MockFieldMapper("foo"));
Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates);
assertNotNull(mapping.root().getMapper("foo"));
}
public void testSubfieldMappingUpdate() throws Exception {
DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService());
List<Mapper> updates = Collections.singletonList(new MockFieldMapper("a.foo"));
Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates);
Mapper aMapper = mapping.root().getMapper("a");
assertNotNull(aMapper);
assertTrue(aMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)aMapper).getMapper("foo"));
}
public void testMultipleSubfieldMappingUpdate() throws Exception {
DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService());
List<Mapper> updates = new ArrayList<>();
updates.add(new MockFieldMapper("a.foo"));
updates.add(new MockFieldMapper("a.bar"));
Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates);
Mapper aMapper = mapping.root().getMapper("a");
assertNotNull(aMapper);
assertTrue(aMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)aMapper).getMapper("foo"));
assertNotNull(((ObjectMapper)aMapper).getMapper("bar"));
}
public void testDeepSubfieldMappingUpdate() throws Exception {
DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService());
List<Mapper> updates = Collections.singletonList(new MockFieldMapper("a.b.foo"));
Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates);
Mapper aMapper = mapping.root().getMapper("a");
assertNotNull(aMapper);
assertTrue(aMapper instanceof ObjectMapper);
Mapper bMapper = ((ObjectMapper)aMapper).getMapper("b");
assertTrue(bMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)bMapper).getMapper("foo"));
}
public void testObjectMappingUpdate() throws Exception {
MapperService mapperService = createIndex("test").mapperService();
DocumentMapper docMapper = createDummyMapping(mapperService);
List<Mapper> updates = new ArrayList<>();
updates.add(createObjectMapper(mapperService, "foo"));
updates.add(createObjectMapper(mapperService, "foo.bar"));
updates.add(new MockFieldMapper("foo.bar.baz"));
updates.add(new MockFieldMapper("foo.field"));
Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates);
Mapper fooMapper = mapping.root().getMapper("foo");
assertNotNull(fooMapper);
assertTrue(fooMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)fooMapper).getMapper("field"));
Mapper barMapper = ((ObjectMapper)fooMapper).getMapper("bar");
assertTrue(barMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)barMapper).getMapper("baz"));
}
} }

View File

@ -42,6 +42,7 @@ import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException; import java.io.IOException;
import java.util.List;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
@ -211,7 +212,9 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source); ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source);
assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken()); assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken());
ctx.parser().nextToken(); ctx.parser().nextToken();
return DocumentParser.parseObject(ctx, mapper.root(), true); DocumentParser.parseObjectOrNested(ctx, mapper.root(), true);
Mapping mapping = DocumentParser.createDynamicUpdate(mapper.mapping(), mapper, ctx.getDynamicMappers());
return mapping == null ? null : mapping.root();
} }
public void testDynamicMappingsNotNeeded() throws Exception { public void testDynamicMappingsNotNeeded() throws Exception {

View File

@ -19,12 +19,8 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
@ -59,7 +55,7 @@ public class FieldTypeLookupTests extends ESTestCase {
public void testAddNewField() { public void testAddNewField() {
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
FakeFieldMapper f = new FakeFieldMapper("foo"); MockFieldMapper f = new MockFieldMapper("foo");
FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f), randomBoolean()); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f), randomBoolean());
assertNull(lookup.get("foo")); assertNull(lookup.get("foo"));
assertNull(lookup.get("bar")); assertNull(lookup.get("bar"));
@ -73,8 +69,8 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
public void testAddExistingField() { public void testAddExistingField() {
FakeFieldMapper f = new FakeFieldMapper("foo"); MockFieldMapper f = new MockFieldMapper("foo");
FakeFieldMapper f2 = new FakeFieldMapper("foo"); MockFieldMapper f2 = new MockFieldMapper("foo");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean());
FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean());
@ -84,8 +80,8 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
public void testAddExistingIndexName() { public void testAddExistingIndexName() {
FakeFieldMapper f = new FakeFieldMapper("foo"); MockFieldMapper f = new MockFieldMapper("foo");
FakeFieldMapper f2 = new FakeFieldMapper("bar"); MockFieldMapper f2 = new MockFieldMapper("bar");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean());
FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean());
@ -96,8 +92,8 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
public void testAddExistingFullName() { public void testAddExistingFullName() {
FakeFieldMapper f = new FakeFieldMapper("foo"); MockFieldMapper f = new MockFieldMapper("foo");
FakeFieldMapper f2 = new FakeFieldMapper("foo"); MockFieldMapper f2 = new MockFieldMapper("foo");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
try { try {
lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); lookup.copyAndAddAll("type2", newList(f2), randomBoolean());
@ -107,12 +103,13 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
public void testCheckCompatibilityMismatchedTypes() { public void testCheckCompatibilityMismatchedTypes() {
FieldMapper f1 = new FakeFieldMapper("foo"); FieldMapper f1 = new MockFieldMapper("foo");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean());
MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo"); OtherFakeFieldType ft2 = new OtherFakeFieldType();
FieldMapper f2 = new FakeFieldMapper("foo", ft2); ft2.setName("foo");
FieldMapper f2 = new MockFieldMapper("foo", ft2);
try { try {
lookup.copyAndAddAll("type2", newList(f2), false); lookup.copyAndAddAll("type2", newList(f2), false);
fail("expected type mismatch"); fail("expected type mismatch");
@ -129,13 +126,14 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
public void testCheckCompatibilityConflict() { public void testCheckCompatibilityConflict() {
FieldMapper f1 = new FakeFieldMapper("foo"); FieldMapper f1 = new MockFieldMapper("foo");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean());
MappedFieldType ft2 = FakeFieldMapper.makeFieldType("foo"); MappedFieldType ft2 = new MockFieldMapper.FakeFieldType();
ft2.setName("foo");
ft2.setBoost(2.0f); ft2.setBoost(2.0f);
FieldMapper f2 = new FakeFieldMapper("foo", ft2); FieldMapper f2 = new MockFieldMapper("foo", ft2);
try { try {
// different type // different type
lookup.copyAndAddAll("type2", newList(f2), false); lookup.copyAndAddAll("type2", newList(f2), false);
@ -146,9 +144,10 @@ public class FieldTypeLookupTests extends ESTestCase {
lookup.copyAndAddAll("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types lookup.copyAndAddAll("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types
lookup.copyAndAddAll("type2", newList(f2), true); // boost is updateable, so ok if forcing lookup.copyAndAddAll("type2", newList(f2), true); // boost is updateable, so ok if forcing
// now with a non changeable setting // now with a non changeable setting
MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo"); MappedFieldType ft3 = new MockFieldMapper.FakeFieldType();
ft3.setName("foo");
ft3.setStored(true); ft3.setStored(true);
FieldMapper f3 = new FakeFieldMapper("foo", ft3); FieldMapper f3 = new MockFieldMapper("foo", ft3);
try { try {
lookup.copyAndAddAll("type2", newList(f3), false); lookup.copyAndAddAll("type2", newList(f3), false);
fail("expected conflict"); fail("expected conflict");
@ -165,8 +164,8 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
public void testSimpleMatchFullNames() { public void testSimpleMatchFullNames() {
FakeFieldMapper f1 = new FakeFieldMapper("foo"); MockFieldMapper f1 = new MockFieldMapper("foo");
FakeFieldMapper f2 = new FakeFieldMapper("bar"); MockFieldMapper f2 = new MockFieldMapper("bar");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type", newList(f1, f2), randomBoolean()); lookup = lookup.copyAndAddAll("type", newList(f1, f2), randomBoolean());
Collection<String> names = lookup.simpleMatchToFullName("b*"); Collection<String> names = lookup.simpleMatchToFullName("b*");
@ -175,7 +174,7 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
public void testIteratorImmutable() { public void testIteratorImmutable() {
FakeFieldMapper f1 = new FakeFieldMapper("foo"); MockFieldMapper f1 = new MockFieldMapper("foo");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean());
@ -194,59 +193,6 @@ public class FieldTypeLookupTests extends ESTestCase {
return Arrays.asList(mapper); return Arrays.asList(mapper);
} }
// this sucks how much must be overridden just do get a dummy field mapper...
static class FakeFieldMapper extends FieldMapper {
static Settings dummySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
public FakeFieldMapper(String fullName) {
super(fullName, makeFieldType(fullName), makeFieldType(fullName), dummySettings, null, null);
}
public FakeFieldMapper(String fullName, MappedFieldType fieldType) {
super(fullName, fieldType, fieldType, dummySettings, null, null);
}
static MappedFieldType makeFieldType(String fullName) {
FakeFieldType fieldType = new FakeFieldType();
fieldType.setName(fullName);
return fieldType;
}
static MappedFieldType makeOtherFieldType(String fullName) {
OtherFakeFieldType fieldType = new OtherFakeFieldType();
fieldType.setName(fullName);
return fieldType;
}
static class FakeFieldType extends MappedFieldType {
public FakeFieldType() {}
protected FakeFieldType(FakeFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new FakeFieldType(this);
}
@Override
public String typeName() {
return "faketype";
}
}
static class OtherFakeFieldType extends MappedFieldType {
public OtherFakeFieldType() {}
protected OtherFakeFieldType(OtherFakeFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new OtherFakeFieldType(this);
}
@Override
public String typeName() {
return "otherfaketype";
}
}
@Override
protected String contentType() { return null; }
@Override
protected void parseCreateField(ParseContext context, List list) throws IOException {}
}
private int size(Iterator<MappedFieldType> iterator) { private int size(Iterator<MappedFieldType> iterator) {
if (iterator == null) { if (iterator == null) {
throw new NullPointerException("iterator"); throw new NullPointerException("iterator");
@ -258,4 +204,23 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
return count; return count;
} }
static class OtherFakeFieldType extends MappedFieldType {
public OtherFakeFieldType() {
}
protected OtherFakeFieldType(OtherFakeFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new OtherFakeFieldType(this);
}
@Override
public String typeName() {
return "otherfaketype";
}
}
} }

View File

@ -28,32 +28,28 @@ import org.elasticsearch.test.ESSingleNodeTestCase;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
/**
*/
public class SimpleObjectMappingTests extends ESSingleNodeTestCase { public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
public void testDifferentInnerObjectTokenFailure() throws Exception { public void testDifferentInnerObjectTokenFailure() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
defaultMapper.parse("test", "type", "1", new BytesArray(" {\n" + defaultMapper.parse("test", "type", "1", new BytesArray(" {\n" +
" \"object\": {\n" + " \"object\": {\n" +
" \"array\":[\n" + " \"array\":[\n" +
" {\n" + " {\n" +
" \"object\": { \"value\": \"value\" }\n" + " \"object\": { \"value\": \"value\" }\n" +
" },\n" + " },\n" +
" {\n" + " {\n" +
" \"object\":\"value\"\n" + " \"object\":\"value\"\n" +
" }\n" + " }\n" +
" ]\n" + " ]\n" +
" },\n" + " },\n" +
" \"value\":\"value\"\n" + " \"value\":\"value\"\n" +
" }")); " }"));
fail(); });
} catch (MapperParsingException e) { assertTrue(e.getMessage(), e.getMessage().contains("different type"));
// all is well
}
} }
public void testEmptyArrayProperties() throws Exception { public void testEmptyArrayProperties() throws Exception {

View File

@ -0,0 +1,78 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import java.io.IOException;
import java.util.List;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
// this sucks how much must be overridden just do get a dummy field mapper...
public class MockFieldMapper extends FieldMapper {
static Settings dummySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
public MockFieldMapper(String fullName) {
this(fullName, new FakeFieldType());
}
public MockFieldMapper(String fullName, MappedFieldType fieldType) {
super(findSimpleName(fullName), setName(fullName, fieldType), setName(fullName, fieldType), dummySettings, null, null);
}
static MappedFieldType setName(String fullName, MappedFieldType fieldType) {
fieldType.setName(fullName);
return fieldType;
}
static String findSimpleName(String fullName) {
int ndx = fullName.lastIndexOf('.');
return fullName.substring(ndx + 1);
}
static class FakeFieldType extends MappedFieldType {
public FakeFieldType() {
}
protected FakeFieldType(FakeFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new FakeFieldType(this);
}
@Override
public String typeName() {
return "faketype";
}
}
@Override
protected String contentType() {
return null;
}
@Override
protected void parseCreateField(ParseContext context, List list) throws IOException {
}
}