Merge pull request #16798 from rjernst/dots2

Moved dynamic field handling in doc parsing to end of parsing
This commit is contained in:
Ryan Ernst 2016-03-10 10:03:52 -08:00
commit f3195cb514
8 changed files with 416 additions and 227 deletions

View File

@ -19,12 +19,22 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.CloseableThreadLocal; import org.apache.lucene.util.CloseableThreadLocal;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
@ -48,15 +58,8 @@ import org.elasticsearch.index.mapper.object.ArrayValueMapperParser;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper; import org.elasticsearch.index.mapper.object.RootObjectMapper;
import java.io.Closeable;
import java.io.IOException;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/** A parser for documents, given mappings from a DocumentMapper */ /** A parser for documents, given mappings from a DocumentMapper */
class DocumentParser implements Closeable { final class DocumentParser implements Closeable {
private CloseableThreadLocal<ParseContext.InternalParseContext> cache = new CloseableThreadLocal<ParseContext.InternalParseContext>() { private CloseableThreadLocal<ParseContext.InternalParseContext> cache = new CloseableThreadLocal<ParseContext.InternalParseContext>() {
@Override @Override
@ -99,7 +102,7 @@ class DocumentParser implements Closeable {
reverseOrder(context); reverseOrder(context);
ParsedDocument doc = parsedDocument(source, context, update(context, mapping)); ParsedDocument doc = parsedDocument(source, context, createDynamicUpdate(mapping, docMapper, context.getDynamicMappers()));
// reset the context to free up memory // reset the context to free up memory
context.reset(null, null, null); context.reset(null, null, null);
return doc; return doc;
@ -116,10 +119,7 @@ class DocumentParser implements Closeable {
// entire type is disabled // entire type is disabled
parser.skipChildren(); parser.skipChildren();
} else if (emptyDoc == false) { } else if (emptyDoc == false) {
Mapper update = parseObject(context, mapping.root, true); parseObjectOrNested(context, mapping.root, true);
if (update != null) {
context.addDynamicMappingsUpdate(update);
}
} }
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) { for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
@ -201,11 +201,6 @@ class DocumentParser implements Closeable {
} }
private static Mapping update(ParseContext.InternalParseContext context, Mapping mapping) {
Mapper rootDynamicUpdate = context.dynamicMappingsUpdate();
return rootDynamicUpdate != null ? mapping.mappingUpdate(rootDynamicUpdate) : null;
}
private static MapperParsingException wrapInMapperParsingException(SourceToParse source, Throwable e) { private static MapperParsingException wrapInMapperParsingException(SourceToParse source, Throwable e) {
// if its already a mapper parsing exception, no need to wrap it... // if its already a mapper parsing exception, no need to wrap it...
if (e instanceof MapperParsingException) { if (e instanceof MapperParsingException) {
@ -220,10 +215,113 @@ class DocumentParser implements Closeable {
return new MapperParsingException("failed to parse", e); return new MapperParsingException("failed to parse", e);
} }
static ObjectMapper parseObject(ParseContext context, ObjectMapper mapper, boolean atRoot) throws IOException { /** Creates a Mapping containing any dynamically added fields, or returns null if there were no dynamic mappings. */
static Mapping createDynamicUpdate(Mapping mapping, DocumentMapper docMapper, List<Mapper> dynamicMappers) {
if (dynamicMappers.isEmpty()) {
return null;
}
// We build a mapping by first sorting the mappers, so that all mappers containing a common prefix
// will be processed in a contiguous block. When the prefix is no longer seen, we pop the extra elements
// off the stack, merging them upwards into the existing mappers.
Collections.sort(dynamicMappers, (Mapper o1, Mapper o2) -> o1.name().compareTo(o2.name()));
Iterator<Mapper> dynamicMapperItr = dynamicMappers.iterator();
List<ObjectMapper> parentMappers = new ArrayList<>();
Mapper firstUpdate = dynamicMapperItr.next();
parentMappers.add(createUpdate(mapping.root(), firstUpdate.name().split("\\."), 0, firstUpdate));
Mapper previousMapper = null;
while (dynamicMapperItr.hasNext()) {
Mapper newMapper = dynamicMapperItr.next();
if (previousMapper != null && newMapper.name().equals(previousMapper.name())) {
// We can see the same mapper more than once, for example, if we had foo.bar and foo.baz, where
// foo did not yet exist. This will create 2 copies in dynamic mappings, which should be identical.
// Here we just skip over the duplicates, but we merge them to ensure there are no conflicts.
newMapper.merge(previousMapper, false);
continue;
}
previousMapper = newMapper;
String[] nameParts = newMapper.name().split("\\.");
// find common elements with the previously processed dynamic mapper
int keepBefore = 1;
while (keepBefore < parentMappers.size() &&
parentMappers.get(keepBefore).simpleName().equals(nameParts[keepBefore - 1])) {
++keepBefore;
}
popMappers(parentMappers, keepBefore, true);
if (keepBefore < nameParts.length) {
String updateParentName = nameParts[keepBefore - 1];
final ObjectMapper lastParent = parentMappers.get(parentMappers.size() - 1);
Mapper updateParent = lastParent.getMapper(updateParentName);
if (updateParent == null) {
// the parent we need is not on the stack, so look it up in the full mappings
if (keepBefore > 1) {
// only prefix with parent mapper if the parent mapper isn't the root (which has a fake name)
updateParentName = lastParent.name() + '.' + updateParentName;
}
updateParent = docMapper.objectMappers().get(updateParentName);
}
assert updateParent instanceof ObjectMapper;
newMapper = createUpdate((ObjectMapper)updateParent, nameParts, keepBefore, newMapper);
}
if (newMapper instanceof ObjectMapper) {
parentMappers.add((ObjectMapper)newMapper);
} else {
addToLastMapper(parentMappers, newMapper, true);
}
}
popMappers(parentMappers, 1, true);
assert parentMappers.size() == 1;
return mapping.mappingUpdate(parentMappers.get(0));
}
private static void popMappers(List<ObjectMapper> parentMappers, int keepBefore, boolean merge) {
assert keepBefore >= 1; // never remove the root mapper
// pop off parent mappers not needed by the current mapper,
// merging them backwards since they are immutable
for (int i = parentMappers.size() - 1; i >= keepBefore; --i) {
addToLastMapper(parentMappers, parentMappers.remove(i), merge);
}
}
/**
* Adds a mapper as an update into the last mapper. If merge is true, the new mapper
* will be merged in with other child mappers of the last parent, otherwise it will be a new update.
*/
private static void addToLastMapper(List<ObjectMapper> parentMappers, Mapper mapper, boolean merge) {
assert parentMappers.size() >= 1;
int lastIndex = parentMappers.size() - 1;
ObjectMapper withNewMapper = parentMappers.get(lastIndex).mappingUpdate(mapper);
if (merge) {
withNewMapper = parentMappers.get(lastIndex).merge(withNewMapper, false);
}
parentMappers.set(lastIndex, withNewMapper);
}
/** Build an update for the parent which will contain the given mapper and any intermediate fields. */
private static ObjectMapper createUpdate(ObjectMapper parent, String[] nameParts, int i, Mapper mapper) {
List<ObjectMapper> parentMappers = new ArrayList<>();
ObjectMapper previousIntermediate = parent;
for (; i < nameParts.length - 1; ++i) {
Mapper intermediate = previousIntermediate.getMapper(nameParts[i]);
assert intermediate instanceof ObjectMapper;
parentMappers.add((ObjectMapper)intermediate);
previousIntermediate = (ObjectMapper)intermediate;
}
if (parentMappers.isEmpty() == false) {
// add the new mapper to the stack, and pop down to the original parent level
addToLastMapper(parentMappers, mapper, false);
popMappers(parentMappers, 1, false);
mapper = parentMappers.get(0);
}
return parent.mappingUpdate(mapper);
}
static void parseObjectOrNested(ParseContext context, ObjectMapper mapper, boolean atRoot) throws IOException {
if (mapper.isEnabled() == false) { if (mapper.isEnabled() == false) {
context.parser().skipChildren(); context.parser().skipChildren();
return null; return;
} }
XContentParser parser = context.parser(); XContentParser parser = context.parser();
@ -234,7 +332,7 @@ class DocumentParser implements Closeable {
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.VALUE_NULL) { if (token == XContentParser.Token.VALUE_NULL) {
// the object is null ("obj1" : null), simply bail // the object is null ("obj1" : null), simply bail
return null; return;
} }
if (token.isValue()) { if (token.isValue()) {
@ -256,21 +354,19 @@ class DocumentParser implements Closeable {
} }
ObjectMapper update = null; ObjectMapper update = null;
update = innerParseObject(context, mapper, parser, currentFieldName, token, update); innerParseObject(context, mapper, parser, currentFieldName, token);
// restore the enable path flag // restore the enable path flag
if (nested.isNested()) { if (nested.isNested()) {
nested(context, nested); nested(context, nested);
} }
return update;
} }
private static ObjectMapper innerParseObject(ParseContext context, ObjectMapper mapper, XContentParser parser, String currentFieldName, XContentParser.Token token, ObjectMapper update) throws IOException { private static void innerParseObject(ParseContext context, ObjectMapper mapper, XContentParser parser, String currentFieldName, XContentParser.Token token) throws IOException {
while (token != XContentParser.Token.END_OBJECT) { while (token != XContentParser.Token.END_OBJECT) {
ObjectMapper newUpdate = null;
if (token == XContentParser.Token.START_OBJECT) { if (token == XContentParser.Token.START_OBJECT) {
newUpdate = parseObject(context, mapper, currentFieldName); parseObject(context, mapper, currentFieldName);
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
newUpdate = parseArray(context, mapper, currentFieldName); parseArray(context, mapper, currentFieldName);
} else if (token == XContentParser.Token.FIELD_NAME) { } else if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_NULL) {
@ -278,18 +374,10 @@ class DocumentParser implements Closeable {
} else if (token == null) { } else if (token == null) {
throw new MapperParsingException("object mapping for [" + mapper.name() + "] tried to parse field [" + currentFieldName + "] as object, but got EOF, has a concrete value been provided to it?"); throw new MapperParsingException("object mapping for [" + mapper.name() + "] tried to parse field [" + currentFieldName + "] as object, but got EOF, has a concrete value been provided to it?");
} else if (token.isValue()) { } else if (token.isValue()) {
newUpdate = parseValue(context, mapper, currentFieldName, token); parseValue(context, mapper, currentFieldName, token);
} }
token = parser.nextToken(); token = parser.nextToken();
if (newUpdate != null) {
if (update == null) {
update = newUpdate;
} else {
update = update.merge(newUpdate, false);
}
}
} }
return update;
} }
private static void nested(ParseContext context, ObjectMapper.Nested nested) { private static void nested(ParseContext context, ObjectMapper.Nested nested) {
@ -335,33 +423,29 @@ class DocumentParser implements Closeable {
return context; return context;
} }
private static Mapper parseObjectOrField(ParseContext context, Mapper mapper) throws IOException { private static void parseObjectOrField(ParseContext context, Mapper mapper) throws IOException {
if (mapper instanceof ObjectMapper) { if (mapper instanceof ObjectMapper) {
return parseObject(context, (ObjectMapper) mapper, false); parseObjectOrNested(context, (ObjectMapper) mapper, false);
} else { } else {
FieldMapper fieldMapper = (FieldMapper)mapper; FieldMapper fieldMapper = (FieldMapper)mapper;
Mapper update = fieldMapper.parse(context); Mapper update = fieldMapper.parse(context);
if (update != null) {
context.addDynamicMapper(update);
}
if (fieldMapper.copyTo() != null) { if (fieldMapper.copyTo() != null) {
parseCopyFields(context, fieldMapper, fieldMapper.copyTo().copyToFields()); parseCopyFields(context, fieldMapper, fieldMapper.copyTo().copyToFields());
} }
return update;
} }
} }
private static ObjectMapper parseObject(final ParseContext context, ObjectMapper mapper, String currentFieldName) throws IOException { private static ObjectMapper parseObject(final ParseContext context, ObjectMapper mapper, String currentFieldName) throws IOException {
if (currentFieldName == null) { assert currentFieldName != null;
throw new MapperParsingException("object mapping [" + mapper.name() + "] trying to serialize an object with no field associated with it, current value [" + context.parser().textOrNull() + "]");
}
context.path().add(currentFieldName); context.path().add(currentFieldName);
ObjectMapper update = null; ObjectMapper update = null;
Mapper objectMapper = mapper.getMapper(currentFieldName); Mapper objectMapper = mapper.getMapper(currentFieldName);
if (objectMapper != null) { if (objectMapper != null) {
final Mapper subUpdate = parseObjectOrField(context, objectMapper); parseObjectOrField(context, objectMapper);
if (subUpdate != null) {
// propagate mapping update
update = mapper.mappingUpdate(subUpdate);
}
} else { } else {
ObjectMapper.Dynamic dynamic = mapper.dynamic(); ObjectMapper.Dynamic dynamic = mapper.dynamic();
if (dynamic == null) { if (dynamic == null) {
@ -382,8 +466,9 @@ class DocumentParser implements Closeable {
} }
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
objectMapper = builder.build(builderContext); objectMapper = builder.build(builderContext);
context.addDynamicMapper(objectMapper);
context.path().add(currentFieldName); context.path().add(currentFieldName);
update = mapper.mappingUpdate(parseAndMergeUpdate(objectMapper, context)); parseObjectOrField(context, objectMapper);
} else { } else {
// not dynamic, read everything up to end object // not dynamic, read everything up to end object
context.parser().skipChildren(); context.parser().skipChildren();
@ -394,7 +479,7 @@ class DocumentParser implements Closeable {
return update; return update;
} }
private static ObjectMapper parseArray(ParseContext context, ObjectMapper parentMapper, String lastFieldName) throws IOException { private static void parseArray(ParseContext context, ObjectMapper parentMapper, String lastFieldName) throws IOException {
String arrayFieldName = lastFieldName; String arrayFieldName = lastFieldName;
Mapper mapper = parentMapper.getMapper(lastFieldName); Mapper mapper = parentMapper.getMapper(lastFieldName);
if (mapper != null) { if (mapper != null) {
@ -402,15 +487,9 @@ class DocumentParser implements Closeable {
// expects an array, if so we pass the context straight to the mapper and if not // expects an array, if so we pass the context straight to the mapper and if not
// we serialize the array components // we serialize the array components
if (mapper instanceof ArrayValueMapperParser) { if (mapper instanceof ArrayValueMapperParser) {
final Mapper subUpdate = parseObjectOrField(context, mapper); parseObjectOrField(context, mapper);
if (subUpdate != null) {
// propagate the mapping update
return parentMapper.mappingUpdate(subUpdate);
} else {
return null;
}
} else { } else {
return parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName);
} }
} else { } else {
@ -423,31 +502,34 @@ class DocumentParser implements Closeable {
} else if (dynamic == ObjectMapper.Dynamic.TRUE) { } else if (dynamic == ObjectMapper.Dynamic.TRUE) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, arrayFieldName, "object"); Mapper.Builder builder = context.root().findTemplateBuilder(context, arrayFieldName, "object");
if (builder == null) { if (builder == null) {
return parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName);
return;
} }
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
mapper = builder.build(builderContext); mapper = builder.build(builderContext);
if (mapper != null && mapper instanceof ArrayValueMapperParser) { assert mapper != null;
if (mapper instanceof ArrayValueMapperParser) {
context.addDynamicMapper(mapper);
context.path().add(arrayFieldName); context.path().add(arrayFieldName);
mapper = parseAndMergeUpdate(mapper, context); parseObjectOrField(context, mapper);
return parentMapper.mappingUpdate(mapper);
} else { } else {
return parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName);
} }
} else { } else {
return parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName); // TODO: shouldn't this skip, not parse?
parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName);
} }
} }
} }
private static ObjectMapper parseNonDynamicArray(ParseContext context, ObjectMapper mapper, String lastFieldName, String arrayFieldName) throws IOException { private static void parseNonDynamicArray(ParseContext context, ObjectMapper mapper, String lastFieldName, String arrayFieldName) throws IOException {
XContentParser parser = context.parser(); XContentParser parser = context.parser();
XContentParser.Token token; XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) { if (token == XContentParser.Token.START_OBJECT) {
return parseObject(context, mapper, lastFieldName); parseObject(context, mapper, lastFieldName);
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
return parseArray(context, mapper, lastFieldName); parseArray(context, mapper, lastFieldName);
} else if (token == XContentParser.Token.FIELD_NAME) { } else if (token == XContentParser.Token.FIELD_NAME) {
lastFieldName = parser.currentName(); lastFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_NULL) { } else if (token == XContentParser.Token.VALUE_NULL) {
@ -455,25 +537,20 @@ class DocumentParser implements Closeable {
} else if (token == null) { } else if (token == null) {
throw new MapperParsingException("object mapping for [" + mapper.name() + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?"); throw new MapperParsingException("object mapping for [" + mapper.name() + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?");
} else { } else {
return parseValue(context, mapper, lastFieldName, token); parseValue(context, mapper, lastFieldName, token);
} }
} }
return null;
} }
private static ObjectMapper parseValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException { private static void parseValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException {
if (currentFieldName == null) { if (currentFieldName == null) {
throw new MapperParsingException("object mapping [" + parentMapper.name() + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]"); throw new MapperParsingException("object mapping [" + parentMapper.name() + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]");
} }
Mapper mapper = parentMapper.getMapper(currentFieldName); Mapper mapper = parentMapper.getMapper(currentFieldName);
if (mapper != null) { if (mapper != null) {
Mapper subUpdate = parseObjectOrField(context, mapper); parseObjectOrField(context, mapper);
if (subUpdate == null) {
return null;
}
return parentMapper.mappingUpdate(subUpdate);
} else { } else {
return parseDynamicValue(context, parentMapper, currentFieldName, token); parseDynamicValue(context, parentMapper, currentFieldName, token);
} }
} }
@ -641,7 +718,7 @@ class DocumentParser implements Closeable {
throw new IllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]"); throw new IllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]");
} }
private static ObjectMapper parseDynamicValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException { private static void parseDynamicValue(final ParseContext context, ObjectMapper parentMapper, String currentFieldName, XContentParser.Token token) throws IOException {
ObjectMapper.Dynamic dynamic = parentMapper.dynamic(); ObjectMapper.Dynamic dynamic = parentMapper.dynamic();
if (dynamic == null) { if (dynamic == null) {
dynamic = dynamicOrDefault(context.root().dynamic()); dynamic = dynamicOrDefault(context.root().dynamic());
@ -650,7 +727,7 @@ class DocumentParser implements Closeable {
throw new StrictDynamicMappingException(parentMapper.fullPath(), currentFieldName); throw new StrictDynamicMappingException(parentMapper.fullPath(), currentFieldName);
} }
if (dynamic == ObjectMapper.Dynamic.FALSE) { if (dynamic == ObjectMapper.Dynamic.FALSE) {
return null; return;
} }
final String path = context.path().pathAsText(currentFieldName); final String path = context.path().pathAsText(currentFieldName);
final Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path()); final Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
@ -668,14 +745,9 @@ class DocumentParser implements Closeable {
// try to not introduce a conflict // try to not introduce a conflict
mapper = mapper.updateFieldType(Collections.singletonMap(path, existingFieldType)); mapper = mapper.updateFieldType(Collections.singletonMap(path, existingFieldType));
} }
context.addDynamicMapper(mapper);
mapper = parseAndMergeUpdate(mapper, context); parseObjectOrField(context, mapper);
ObjectMapper update = null;
if (mapper != null) {
update = parentMapper.mappingUpdate(mapper);
}
return update;
} }
/** Creates instances of the fields that the current field should be copied to */ /** Creates instances of the fields that the current field should be copied to */
@ -713,8 +785,9 @@ class DocumentParser implements Closeable {
// The path of the dest field might be completely different from the current one so we need to reset it // The path of the dest field might be completely different from the current one so we need to reset it
context = context.overridePath(new ContentPath(0)); context = context.overridePath(new ContentPath(0));
String[] paths = Strings.splitStringToArray(field, '.'); // TODO: why Strings.splitStringToArray instead of String.split?
String fieldName = paths[paths.length-1]; final String[] paths = Strings.splitStringToArray(field, '.');
final String fieldName = paths[paths.length-1];
ObjectMapper mapper = context.root(); ObjectMapper mapper = context.root();
ObjectMapper[] mappers = new ObjectMapper[paths.length-1]; ObjectMapper[] mappers = new ObjectMapper[paths.length-1];
if (paths.length > 1) { if (paths.length > 1) {
@ -745,6 +818,7 @@ class DocumentParser implements Closeable {
if (mapper.nested() != ObjectMapper.Nested.NO) { if (mapper.nested() != ObjectMapper.Nested.NO) {
throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) + "]) through `copy_to`"); throw new MapperParsingException("It is forbidden to create dynamic nested objects ([" + context.path().pathAsText(paths[i]) + "]) through `copy_to`");
} }
context.addDynamicMapper(mapper);
break; break;
case FALSE: case FALSE:
// Maybe we should log something to tell the user that the copy_to is ignored in this case. // Maybe we should log something to tell the user that the copy_to is ignored in this case.
@ -759,36 +833,10 @@ class DocumentParser implements Closeable {
parent = mapper; parent = mapper;
} }
} }
ObjectMapper update = parseDynamicValue(context, mapper, fieldName, context.parser().currentToken()); parseDynamicValue(context, mapper, fieldName, context.parser().currentToken());
assert update != null; // we are parsing a dynamic value so we necessarily created a new mapping
if (paths.length > 1) {
for (int i = paths.length - 2; i >= 0; i--) {
ObjectMapper parent = context.root();
if (i > 0) {
parent = mappers[i-1];
}
assert parent != null;
update = parent.mappingUpdate(update);
}
}
context.addDynamicMappingsUpdate(update);
} }
} }
/**
* Parse the given {@code context} with the given {@code mapper} and apply
* the potential mapping update in-place. This method is useful when
* composing mapping updates.
*/
private static <M extends Mapper> M parseAndMergeUpdate(M mapper, ParseContext context) throws IOException {
final Mapper update = parseObjectOrField(context, mapper);
if (update != null) {
mapper = (M) mapper.merge(update, false);
}
return mapper;
}
private static ObjectMapper.Dynamic dynamicOrDefault(ObjectMapper.Dynamic dynamic) { private static ObjectMapper.Dynamic dynamicOrDefault(ObjectMapper.Dynamic dynamic) {
return dynamic == null ? ObjectMapper.Dynamic.TRUE : dynamic; return dynamic == null ? ObjectMapper.Dynamic.TRUE : dynamic;
} }

View File

@ -76,6 +76,7 @@ public abstract class Mapper implements ToXContent, Iterable<Mapper> {
return this.name; return this.name;
} }
/** Returns a newly built mapper. */
public abstract Y build(BuilderContext context); public abstract Y build(BuilderContext context);
} }

View File

@ -331,13 +331,13 @@ public abstract class ParseContext {
} }
@Override @Override
public void addDynamicMappingsUpdate(Mapper update) { public void addDynamicMapper(Mapper update) {
in.addDynamicMappingsUpdate(update); in.addDynamicMapper(update);
} }
@Override @Override
public Mapper dynamicMappingsUpdate() { public List<Mapper> getDynamicMappers() {
return in.dynamicMappingsUpdate(); return in.getDynamicMappers();
} }
} }
@ -369,7 +369,7 @@ public abstract class ParseContext {
private AllEntries allEntries = new AllEntries(); private AllEntries allEntries = new AllEntries();
private Mapper dynamicMappingsUpdate = null; private List<Mapper> dynamicMappers = new ArrayList<>();
public InternalParseContext(@Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ContentPath path) { public InternalParseContext(@Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ContentPath path) {
this.indexSettings = indexSettings; this.indexSettings = indexSettings;
@ -394,7 +394,7 @@ public abstract class ParseContext {
this.source = source == null ? null : sourceToParse.source(); this.source = source == null ? null : sourceToParse.source();
this.path.reset(); this.path.reset();
this.allEntries = new AllEntries(); this.allEntries = new AllEntries();
this.dynamicMappingsUpdate = null; this.dynamicMappers = new ArrayList<>();
} }
@Override @Override
@ -536,18 +536,13 @@ public abstract class ParseContext {
} }
@Override @Override
public void addDynamicMappingsUpdate(Mapper mapper) { public void addDynamicMapper(Mapper mapper) {
assert mapper instanceof RootObjectMapper : mapper; dynamicMappers.add(mapper);
if (dynamicMappingsUpdate == null) {
dynamicMappingsUpdate = mapper;
} else {
dynamicMappingsUpdate = dynamicMappingsUpdate.merge(mapper, false);
}
} }
@Override @Override
public Mapper dynamicMappingsUpdate() { public List<Mapper> getDynamicMappers() {
return dynamicMappingsUpdate; return dynamicMappers;
} }
} }
@ -747,12 +742,12 @@ public abstract class ParseContext {
public abstract StringBuilder stringBuilder(); public abstract StringBuilder stringBuilder();
/** /**
* Add a dynamic update to the root object mapper. * Add a new mapper dynamically created while parsing.
*/ */
public abstract void addDynamicMappingsUpdate(Mapper update); public abstract void addDynamicMapper(Mapper update);
/** /**
* Get dynamic updates to the root object mapper. * Get dynamic mappers created while parsing.
*/ */
public abstract Mapper dynamicMappingsUpdate(); public abstract List<Mapper> getDynamicMappers();
} }

View File

@ -19,12 +19,20 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.mapper.internal.UidFieldMapper; import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
// TODO: make this a real unit test // TODO: make this a real unit test
public class DocumentParserTests extends ESSingleNodeTestCase { public class DocumentParserTests extends ESSingleNodeTestCase {
@ -61,4 +69,97 @@ public class DocumentParserTests extends ESSingleNodeTestCase {
assertNotNull(doc.rootDoc().getField("bar")); assertNotNull(doc.rootDoc().getField("bar"));
assertNotNull(doc.rootDoc().getField(UidFieldMapper.NAME)); assertNotNull(doc.rootDoc().getField(UidFieldMapper.NAME));
} }
DocumentMapper createDummyMapping(MapperService mapperService) throws Exception {
String mapping = jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("a").startObject("properties")
.startObject("b").field("type", "object").startObject("properties")
.startObject("c").field("type", "object")
.endObject().endObject().endObject().endObject().endObject().endObject().endObject().endObject().string();
DocumentMapper defaultMapper = mapperService.documentMapperParser().parse("type", new CompressedXContent(mapping));
return defaultMapper;
}
// creates an object mapper, which is about 100x harder than it should be....
ObjectMapper createObjectMapper(MapperService mapperService, String name) throws Exception {
String[] nameParts = name.split("\\.");
ContentPath path = new ContentPath();
for (int i = 0; i < nameParts.length - 1; ++i) {
path.add(nameParts[i]);
}
ParseContext context = new ParseContext.InternalParseContext(Settings.EMPTY,
mapperService.documentMapperParser(), mapperService.documentMapper("type"), path);
Mapper.Builder builder = new ObjectMapper.Builder(nameParts[nameParts.length - 1]).enabled(true);
Mapper.BuilderContext builderContext = new Mapper.BuilderContext(context.indexSettings(), context.path());
return (ObjectMapper)builder.build(builderContext);
}
public void testEmptyMappingUpdate() throws Exception {
DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService());
assertNull(DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, Collections.emptyList()));
}
public void testSingleMappingUpdate() throws Exception {
DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService());
List<Mapper> updates = Collections.singletonList(new MockFieldMapper("foo"));
Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates);
assertNotNull(mapping.root().getMapper("foo"));
}
public void testSubfieldMappingUpdate() throws Exception {
DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService());
List<Mapper> updates = Collections.singletonList(new MockFieldMapper("a.foo"));
Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates);
Mapper aMapper = mapping.root().getMapper("a");
assertNotNull(aMapper);
assertTrue(aMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)aMapper).getMapper("foo"));
assertNull(((ObjectMapper)aMapper).getMapper("b"));
}
public void testMultipleSubfieldMappingUpdate() throws Exception {
DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService());
List<Mapper> updates = new ArrayList<>();
updates.add(new MockFieldMapper("a.foo"));
updates.add(new MockFieldMapper("a.bar"));
Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates);
Mapper aMapper = mapping.root().getMapper("a");
assertNotNull(aMapper);
assertTrue(aMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)aMapper).getMapper("foo"));
assertNotNull(((ObjectMapper)aMapper).getMapper("bar"));
assertNull(((ObjectMapper)aMapper).getMapper("b"));
}
public void testDeepSubfieldMappingUpdate() throws Exception {
DocumentMapper docMapper = createDummyMapping(createIndex("test").mapperService());
List<Mapper> updates = Collections.singletonList(new MockFieldMapper("a.b.foo"));
Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates);
Mapper aMapper = mapping.root().getMapper("a");
assertNotNull(aMapper);
assertTrue(aMapper instanceof ObjectMapper);
Mapper bMapper = ((ObjectMapper)aMapper).getMapper("b");
assertTrue(bMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)bMapper).getMapper("foo"));
assertNull(((ObjectMapper)bMapper).getMapper("c"));
}
public void testObjectMappingUpdate() throws Exception {
MapperService mapperService = createIndex("test").mapperService();
DocumentMapper docMapper = createDummyMapping(mapperService);
List<Mapper> updates = new ArrayList<>();
updates.add(createObjectMapper(mapperService, "foo"));
updates.add(createObjectMapper(mapperService, "foo.bar"));
updates.add(new MockFieldMapper("foo.bar.baz"));
updates.add(new MockFieldMapper("foo.field"));
Mapping mapping = DocumentParser.createDynamicUpdate(docMapper.mapping(), docMapper, updates);
Mapper fooMapper = mapping.root().getMapper("foo");
assertNotNull(fooMapper);
assertTrue(fooMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)fooMapper).getMapper("field"));
Mapper barMapper = ((ObjectMapper)fooMapper).getMapper("bar");
assertTrue(barMapper instanceof ObjectMapper);
assertNotNull(((ObjectMapper)barMapper).getMapper("baz"));
}
} }

View File

@ -42,6 +42,7 @@ import org.elasticsearch.index.mapper.core.TextFieldMapper;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import java.io.IOException; import java.io.IOException;
import java.util.List;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
@ -211,7 +212,9 @@ public class DynamicMappingTests extends ESSingleNodeTestCase {
ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source); ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source);
assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken()); assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken());
ctx.parser().nextToken(); ctx.parser().nextToken();
return DocumentParser.parseObject(ctx, mapper.root(), true); DocumentParser.parseObjectOrNested(ctx, mapper.root(), true);
Mapping mapping = DocumentParser.createDynamicUpdate(mapper.mapping(), mapper, ctx.getDynamicMappers());
return mapping == null ? null : mapping.root();
} }
public void testDynamicMappingsNotNeeded() throws Exception { public void testDynamicMappingsNotNeeded() throws Exception {

View File

@ -19,12 +19,8 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
@ -59,7 +55,7 @@ public class FieldTypeLookupTests extends ESTestCase {
public void testAddNewField() { public void testAddNewField() {
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
FakeFieldMapper f = new FakeFieldMapper("foo"); MockFieldMapper f = new MockFieldMapper("foo");
FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f), randomBoolean()); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type", newList(f), randomBoolean());
assertNull(lookup.get("foo")); assertNull(lookup.get("foo"));
assertNull(lookup.get("bar")); assertNull(lookup.get("bar"));
@ -73,8 +69,8 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
public void testAddExistingField() { public void testAddExistingField() {
FakeFieldMapper f = new FakeFieldMapper("foo"); MockFieldMapper f = new MockFieldMapper("foo");
FakeFieldMapper f2 = new FakeFieldMapper("foo"); MockFieldMapper f2 = new MockFieldMapper("foo");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean());
FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean());
@ -84,8 +80,8 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
public void testAddExistingIndexName() { public void testAddExistingIndexName() {
FakeFieldMapper f = new FakeFieldMapper("foo"); MockFieldMapper f = new MockFieldMapper("foo");
FakeFieldMapper f2 = new FakeFieldMapper("bar"); MockFieldMapper f2 = new MockFieldMapper("bar");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean()); lookup = lookup.copyAndAddAll("type1", newList(f), randomBoolean());
FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); FieldTypeLookup lookup2 = lookup.copyAndAddAll("type2", newList(f2), randomBoolean());
@ -96,8 +92,8 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
public void testAddExistingFullName() { public void testAddExistingFullName() {
FakeFieldMapper f = new FakeFieldMapper("foo"); MockFieldMapper f = new MockFieldMapper("foo");
FakeFieldMapper f2 = new FakeFieldMapper("foo"); MockFieldMapper f2 = new MockFieldMapper("foo");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
try { try {
lookup.copyAndAddAll("type2", newList(f2), randomBoolean()); lookup.copyAndAddAll("type2", newList(f2), randomBoolean());
@ -107,12 +103,13 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
public void testCheckCompatibilityMismatchedTypes() { public void testCheckCompatibilityMismatchedTypes() {
FieldMapper f1 = new FakeFieldMapper("foo"); FieldMapper f1 = new MockFieldMapper("foo");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean());
MappedFieldType ft2 = FakeFieldMapper.makeOtherFieldType("foo"); OtherFakeFieldType ft2 = new OtherFakeFieldType();
FieldMapper f2 = new FakeFieldMapper("foo", ft2); ft2.setName("foo");
FieldMapper f2 = new MockFieldMapper("foo", ft2);
try { try {
lookup.copyAndAddAll("type2", newList(f2), false); lookup.copyAndAddAll("type2", newList(f2), false);
fail("expected type mismatch"); fail("expected type mismatch");
@ -129,13 +126,14 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
public void testCheckCompatibilityConflict() { public void testCheckCompatibilityConflict() {
FieldMapper f1 = new FakeFieldMapper("foo"); FieldMapper f1 = new MockFieldMapper("foo");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean());
MappedFieldType ft2 = FakeFieldMapper.makeFieldType("foo"); MappedFieldType ft2 = new MockFieldMapper.FakeFieldType();
ft2.setName("foo");
ft2.setBoost(2.0f); ft2.setBoost(2.0f);
FieldMapper f2 = new FakeFieldMapper("foo", ft2); FieldMapper f2 = new MockFieldMapper("foo", ft2);
try { try {
// different type // different type
lookup.copyAndAddAll("type2", newList(f2), false); lookup.copyAndAddAll("type2", newList(f2), false);
@ -146,9 +144,10 @@ public class FieldTypeLookupTests extends ESTestCase {
lookup.copyAndAddAll("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types lookup.copyAndAddAll("type", newList(f2), false); // boost is updateable, so ok since we are implicitly updating all types
lookup.copyAndAddAll("type2", newList(f2), true); // boost is updateable, so ok if forcing lookup.copyAndAddAll("type2", newList(f2), true); // boost is updateable, so ok if forcing
// now with a non changeable setting // now with a non changeable setting
MappedFieldType ft3 = FakeFieldMapper.makeFieldType("foo"); MappedFieldType ft3 = new MockFieldMapper.FakeFieldType();
ft3.setName("foo");
ft3.setStored(true); ft3.setStored(true);
FieldMapper f3 = new FakeFieldMapper("foo", ft3); FieldMapper f3 = new MockFieldMapper("foo", ft3);
try { try {
lookup.copyAndAddAll("type2", newList(f3), false); lookup.copyAndAddAll("type2", newList(f3), false);
fail("expected conflict"); fail("expected conflict");
@ -165,8 +164,8 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
public void testSimpleMatchFullNames() { public void testSimpleMatchFullNames() {
FakeFieldMapper f1 = new FakeFieldMapper("foo"); MockFieldMapper f1 = new MockFieldMapper("foo");
FakeFieldMapper f2 = new FakeFieldMapper("bar"); MockFieldMapper f2 = new MockFieldMapper("bar");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type", newList(f1, f2), randomBoolean()); lookup = lookup.copyAndAddAll("type", newList(f1, f2), randomBoolean());
Collection<String> names = lookup.simpleMatchToFullName("b*"); Collection<String> names = lookup.simpleMatchToFullName("b*");
@ -175,7 +174,7 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
public void testIteratorImmutable() { public void testIteratorImmutable() {
FakeFieldMapper f1 = new FakeFieldMapper("foo"); MockFieldMapper f1 = new MockFieldMapper("foo");
FieldTypeLookup lookup = new FieldTypeLookup(); FieldTypeLookup lookup = new FieldTypeLookup();
lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean()); lookup = lookup.copyAndAddAll("type", newList(f1), randomBoolean());
@ -194,59 +193,6 @@ public class FieldTypeLookupTests extends ESTestCase {
return Arrays.asList(mapper); return Arrays.asList(mapper);
} }
// this sucks how much must be overridden just do get a dummy field mapper...
static class FakeFieldMapper extends FieldMapper {
static Settings dummySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
public FakeFieldMapper(String fullName) {
super(fullName, makeFieldType(fullName), makeFieldType(fullName), dummySettings, null, null);
}
public FakeFieldMapper(String fullName, MappedFieldType fieldType) {
super(fullName, fieldType, fieldType, dummySettings, null, null);
}
static MappedFieldType makeFieldType(String fullName) {
FakeFieldType fieldType = new FakeFieldType();
fieldType.setName(fullName);
return fieldType;
}
static MappedFieldType makeOtherFieldType(String fullName) {
OtherFakeFieldType fieldType = new OtherFakeFieldType();
fieldType.setName(fullName);
return fieldType;
}
static class FakeFieldType extends MappedFieldType {
public FakeFieldType() {}
protected FakeFieldType(FakeFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new FakeFieldType(this);
}
@Override
public String typeName() {
return "faketype";
}
}
static class OtherFakeFieldType extends MappedFieldType {
public OtherFakeFieldType() {}
protected OtherFakeFieldType(OtherFakeFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new OtherFakeFieldType(this);
}
@Override
public String typeName() {
return "otherfaketype";
}
}
@Override
protected String contentType() { return null; }
@Override
protected void parseCreateField(ParseContext context, List list) throws IOException {}
}
private int size(Iterator<MappedFieldType> iterator) { private int size(Iterator<MappedFieldType> iterator) {
if (iterator == null) { if (iterator == null) {
throw new NullPointerException("iterator"); throw new NullPointerException("iterator");
@ -258,4 +204,23 @@ public class FieldTypeLookupTests extends ESTestCase {
} }
return count; return count;
} }
static class OtherFakeFieldType extends MappedFieldType {
public OtherFakeFieldType() {
}
protected OtherFakeFieldType(OtherFakeFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new OtherFakeFieldType(this);
}
@Override
public String typeName() {
return "otherfaketype";
}
}
} }

View File

@ -28,32 +28,28 @@ import org.elasticsearch.test.ESSingleNodeTestCase;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
/**
*/
public class SimpleObjectMappingTests extends ESSingleNodeTestCase { public class SimpleObjectMappingTests extends ESSingleNodeTestCase {
public void testDifferentInnerObjectTokenFailure() throws Exception { public void testDifferentInnerObjectTokenFailure() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping)); DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
try { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
defaultMapper.parse("test", "type", "1", new BytesArray(" {\n" + defaultMapper.parse("test", "type", "1", new BytesArray(" {\n" +
" \"object\": {\n" + " \"object\": {\n" +
" \"array\":[\n" + " \"array\":[\n" +
" {\n" + " {\n" +
" \"object\": { \"value\": \"value\" }\n" + " \"object\": { \"value\": \"value\" }\n" +
" },\n" + " },\n" +
" {\n" + " {\n" +
" \"object\":\"value\"\n" + " \"object\":\"value\"\n" +
" }\n" + " }\n" +
" ]\n" + " ]\n" +
" },\n" + " },\n" +
" \"value\":\"value\"\n" + " \"value\":\"value\"\n" +
" }")); " }"));
fail(); });
} catch (MapperParsingException e) { assertTrue(e.getMessage(), e.getMessage().contains("different type"));
// all is well
}
} }
public void testEmptyArrayProperties() throws Exception { public void testEmptyArrayProperties() throws Exception {

View File

@ -0,0 +1,80 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import java.io.IOException;
import java.util.List;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Settings;
// this sucks how much must be overridden just do get a dummy field mapper...
public class MockFieldMapper extends FieldMapper {
static Settings dummySettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
public MockFieldMapper(String fullName) {
this(fullName, new FakeFieldType());
}
public MockFieldMapper(String fullName, MappedFieldType fieldType) {
super(findSimpleName(fullName), setName(fullName, fieldType), setName(fullName, fieldType), dummySettings,
MultiFields.empty(), new CopyTo.Builder().build());
}
static MappedFieldType setName(String fullName, MappedFieldType fieldType) {
fieldType.setName(fullName);
return fieldType;
}
static String findSimpleName(String fullName) {
int ndx = fullName.lastIndexOf('.');
return fullName.substring(ndx + 1);
}
static class FakeFieldType extends MappedFieldType {
public FakeFieldType() {
}
protected FakeFieldType(FakeFieldType ref) {
super(ref);
}
@Override
public MappedFieldType clone() {
return new FakeFieldType(this);
}
@Override
public String typeName() {
return "faketype";
}
}
@Override
protected String contentType() {
return null;
}
@Override
protected void parseCreateField(ParseContext context, List list) throws IOException {
}
}