Merge pull request #10593 from jpountz/fix/dynamic_mappings_updates

Mappings: Same code path for dynamic mappings updates and updates coming from the API.

Close #10593
This commit is contained in:
Adrien Grand 2015-04-16 10:17:38 +02:00
commit 5806e85771
38 changed files with 804 additions and 535 deletions

View File

@ -226,6 +226,11 @@ def generate_index(client, version, index_name):
}
}
}
mappings['auto_boost'] = {
'_all': {
'auto_boost': True
}
}
client.indices.create(index=index_name, body={
'settings': {

View File

@ -19,11 +19,14 @@
package org.elasticsearch.common.lucene.all;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.payloads.AveragePayloadFunction;
import org.apache.lucene.search.payloads.PayloadTermQuery;
import org.apache.lucene.search.similarities.Similarity;
@ -119,4 +122,22 @@ public final class AllTermQuery extends PayloadTermQuery {
return true;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
boolean hasPayloads = false;
for (LeafReaderContext context : reader.leaves()) {
final Terms terms = context.reader().terms(term.field());
if (terms.hasPayloads()) {
hasPayloads = true;
break;
}
}
if (hasPayloads == false) {
TermQuery rewritten = new TermQuery(term);
rewritten.setBoost(getBoost());
return rewritten;
}
return this;
}
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.mapper;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
@ -33,6 +34,7 @@ import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Preconditions;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.compress.CompressedString;
@ -70,6 +72,7 @@ import org.elasticsearch.script.ScriptService.ScriptType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
@ -438,10 +441,11 @@ public class DocumentMapper implements ToXContent {
ParseContext.InternalParseContext context = cache.get();
if (source.type() != null && !source.type().equals(this.type)) {
throw new MapperParsingException("Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + this.type + "]", context.mappingsModified());
throw new MapperParsingException("Type mismatch, provide type [" + source.type() + "] but mapper is of type [" + this.type + "]");
}
source.type(this.type);
boolean mappingsModified = false;
XContentParser parser = source.parser();
try {
if (parser == null) {
@ -456,7 +460,7 @@ public class DocumentMapper implements ToXContent {
int countDownTokens = 0;
XContentParser.Token token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new MapperParsingException("Malformed content, must start with an object", context.mappingsModified());
throw new MapperParsingException("Malformed content, must start with an object");
}
boolean emptyDoc = false;
token = parser.nextToken();
@ -464,7 +468,7 @@ public class DocumentMapper implements ToXContent {
// empty doc, we can handle it...
emptyDoc = true;
} else if (token != XContentParser.Token.FIELD_NAME) {
throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist", context.mappingsModified());
throw new MapperParsingException("Malformed content, after first object, either the type field or the actual properties should exist");
}
for (RootMapper rootMapper : rootMappersOrdered) {
@ -472,7 +476,31 @@ public class DocumentMapper implements ToXContent {
}
if (!emptyDoc) {
rootObjectMapper.parse(context);
Mapper update = rootObjectMapper.parse(context);
for (RootObjectMapper mapper : context.updates()) {
if (update == null) {
update = mapper;
} else {
MapperUtils.merge(update, mapper);
}
}
if (update != null) {
// TODO: validate the mapping update on the master node
// lock to avoid concurrency issues with mapping updates coming from the API
synchronized(this) {
// simulate on the first time to check if the mapping update is applicable
MergeContext mergeContext = newMmergeContext(new MergeFlags().simulate(true));
rootObjectMapper.merge(update, mergeContext);
if (mergeContext.hasConflicts()) {
throw new MapperParsingException("Could not apply generated dynamic mappings: " + Arrays.toString(mergeContext.buildConflicts()));
} else {
// then apply it for real
mappingsModified = true;
mergeContext = newMmergeContext(new MergeFlags().simulate(false));
rootObjectMapper.merge(update, mergeContext);
}
}
}
}
for (int i = 0; i < countDownTokens; i++) {
@ -490,10 +518,10 @@ public class DocumentMapper implements ToXContent {
// Throw a more meaningful message if the document is empty.
if (source.source() != null && source.source().length() == 0) {
throw new MapperParsingException("failed to parse, document is empty", context.mappingsModified());
throw new MapperParsingException("failed to parse, document is empty");
}
throw new MapperParsingException("failed to parse", e, context.mappingsModified());
throw new MapperParsingException("failed to parse", e);
} finally {
// only close the parser when its not provided externally
if (source.parser() == null && parser != null) {
@ -521,7 +549,7 @@ public class DocumentMapper implements ToXContent {
}
ParsedDocument doc = new ParsedDocument(context.uid(), context.version(), context.id(), context.type(), source.routing(), source.timestamp(), source.ttl(), context.docs(),
context.source(), context.mappingsModified()).parent(source.parent());
context.source(), mappingsModified).parent(source.parent());
// reset the context to free up memory
context.reset(null, null, null, null);
return doc;
@ -637,8 +665,41 @@ public class DocumentMapper implements ToXContent {
rootObjectMapper.traverse(listener);
}
private MergeContext newMmergeContext(MergeFlags mergeFlags) {
return new MergeContext(mergeFlags) {
List<String> conflicts = new ArrayList<>();
@Override
public void addFieldMappers(List<FieldMapper<?>> fieldMappers) {
DocumentMapper.this.addFieldMappers(fieldMappers);
}
@Override
public void addObjectMappers(Collection<ObjectMapper> objectMappers) {
DocumentMapper.this.addObjectMappers(objectMappers);
}
@Override
public void addConflict(String mergeFailure) {
conflicts.add(mergeFailure);
}
@Override
public boolean hasConflicts() {
return conflicts.isEmpty() == false;
}
@Override
public String[] buildConflicts() {
return conflicts.toArray(Strings.EMPTY_ARRAY);
}
};
}
public synchronized MergeResult merge(DocumentMapper mergeWith, MergeFlags mergeFlags) {
MergeContext mergeContext = new MergeContext(this, mergeFlags);
final MergeContext mergeContext = newMmergeContext(mergeFlags);
assert rootMappers.size() == mergeWith.rootMappers.size();
rootObjectMapper.merge(mergeWith.rootObjectMapper, mergeContext);

View File

@ -125,7 +125,12 @@ public interface Mapper extends ToXContent {
String name();
void parse(ParseContext context) throws IOException;
/**
* Parse using the provided {@link ParseContext} and return a mapping
* update if dynamic mappings modified the mappings, or {@code null} if
* mappings were not modified.
*/
Mapper parse(ParseContext context) throws IOException;
void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException;

View File

@ -28,28 +28,10 @@ public class MapperParsingException extends MapperException {
public MapperParsingException(String message) {
super(message);
mappingsModified = false;
}
public boolean isMappingsModified() {
return mappingsModified;
}
private boolean mappingsModified = false;
public MapperParsingException(String message, boolean mappingsModified) {
super(message);
this.mappingsModified = mappingsModified;
}
public MapperParsingException(String message, Throwable cause, boolean mappingsModified) {
super(message, cause);
this.mappingsModified = mappingsModified;
}
public MapperParsingException(String message, Throwable cause) {
super(message, cause);
this.mappingsModified = false;
}
@Override

View File

@ -0,0 +1,82 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import java.io.IOException;
import java.util.Collection;
import java.util.List;
public enum MapperUtils {
;
/**
* Parse the given {@code context} with the given {@code mapper} and apply
* the potential mapping update in-place. This method is useful when
* composing mapping updates.
*/
public static <M extends Mapper> M parseAndMergeUpdate(M mapper, ParseContext context) throws IOException {
final Mapper update = mapper.parse(context);
if (update != null) {
merge(mapper, update);
}
return mapper;
}
/**
* Merge {@code mergeWith} into {@code mergeTo}. Note: this method only
* merges mappings, not lookup structures. Conflicts are returned as exceptions.
*/
public static void merge(Mapper mergeInto, Mapper mergeWith) {
MergeContext ctx = new MergeContext(new DocumentMapper.MergeFlags().simulate(false)) {
@Override
public boolean hasConflicts() {
return false;
}
@Override
public String[] buildConflicts() {
return Strings.EMPTY_ARRAY;
}
@Override
public void addObjectMappers(Collection<ObjectMapper> objectMappers) {
// no-op
}
@Override
public void addFieldMappers(List<FieldMapper<?>> fieldMappers) {
// no-op
}
@Override
public void addConflict(String mergeFailure) {
throw new ElasticsearchIllegalStateException("Merging dynamic updates triggered a conflict: " + mergeFailure);
}
};
mergeInto.merge(mergeWith, ctx);
}
}

View File

@ -19,41 +19,33 @@
package org.elasticsearch.index.mapper;
import com.google.common.collect.Lists;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import java.util.Collection;
import java.util.List;
/**
*
*/
public class MergeContext {
public abstract class MergeContext {
private final DocumentMapper documentMapper;
private final DocumentMapper.MergeFlags mergeFlags;
private final List<String> mergeConflicts = Lists.newArrayList();
public MergeContext(DocumentMapper documentMapper, DocumentMapper.MergeFlags mergeFlags) {
this.documentMapper = documentMapper;
public MergeContext(DocumentMapper.MergeFlags mergeFlags) {
this.mergeFlags = mergeFlags;
}
public DocumentMapper docMapper() {
return documentMapper;
}
public abstract void addFieldMappers(List<FieldMapper<?>> fieldMappers);
public abstract void addObjectMappers(Collection<ObjectMapper> objectMappers);
public DocumentMapper.MergeFlags mergeFlags() {
return mergeFlags;
}
public void addConflict(String mergeFailure) {
mergeConflicts.add(mergeFailure);
}
public abstract void addConflict(String mergeFailure);
public boolean hasConflicts() {
return !mergeConflicts.isEmpty();
}
public abstract boolean hasConflicts();
public String[] buildConflicts() {
return mergeConflicts.toArray(new String[mergeConflicts.size()]);
}
public abstract String[] buildConflicts();
}

View File

@ -22,7 +22,7 @@ package org.elasticsearch.index.mapper;
import com.carrotsearch.hppc.ObjectObjectMap;
import com.carrotsearch.hppc.ObjectObjectOpenHashMap;
import com.google.common.collect.Lists;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
@ -38,7 +38,11 @@ import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.mapper.DocumentMapper.ParseListener;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import java.util.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
/**
*
@ -194,31 +198,6 @@ public abstract class ParseContext {
return in.docMapperParser();
}
@Override
public boolean mappingsModified() {
return in.mappingsModified();
}
@Override
public void setMappingsModified() {
in.setMappingsModified();
}
@Override
public void setWithinNewMapper() {
in.setWithinNewMapper();
}
@Override
public void clearWithinNewMapper() {
in.clearWithinNewMapper();
}
@Override
public boolean isWithinNewMapper() {
return in.isWithinNewMapper();
}
@Override
public boolean isWithinCopyTo() {
return in.isWithinCopyTo();
@ -379,6 +358,15 @@ public abstract class ParseContext {
return in.stringBuilder();
}
@Override
public void addRootObjectUpdate(RootObjectMapper update) {
in.addRootObjectUpdate(update);
}
@Override
public List<RootObjectMapper> updates() {
return in.updates();
}
}
public static class InternalParseContext extends ParseContext {
@ -414,12 +402,13 @@ public abstract class ParseContext {
private Map<String, String> ignoredValues = new HashMap<>();
private boolean mappingsModified = false;
private boolean withinNewMapper = false;
private AllEntries allEntries = new AllEntries();
private float docBoost = 1.0f;
private final List<RootObjectMapper> rootMapperDynamicUpdates = new ArrayList<>();
public InternalParseContext(String index, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ContentPath path) {
this.index = index;
this.indexSettings = indexSettings;
@ -444,11 +433,11 @@ public abstract class ParseContext {
this.source = source == null ? null : sourceToParse.source();
this.path.reset();
this.mappingsModified = false;
this.withinNewMapper = false;
this.listener = listener == null ? DocumentMapper.ParseListener.EMPTY : listener;
this.allEntries = new AllEntries();
this.ignoredValues.clear();
this.docBoost = 1.0f;
this.rootMapperDynamicUpdates.clear();
}
@Override
@ -461,31 +450,6 @@ public abstract class ParseContext {
return this.docMapperParser;
}
@Override
public boolean mappingsModified() {
return this.mappingsModified;
}
@Override
public void setMappingsModified() {
this.mappingsModified = true;
}
@Override
public void setWithinNewMapper() {
this.withinNewMapper = true;
}
@Override
public void clearWithinNewMapper() {
this.withinNewMapper = false;
}
@Override
public boolean isWithinNewMapper() {
return withinNewMapper;
}
@Override
public String index() {
return this.index;
@ -638,22 +602,22 @@ public abstract class ParseContext {
stringBuilder.setLength(0);
return this.stringBuilder;
}
@Override
public void addRootObjectUpdate(RootObjectMapper mapper) {
rootMapperDynamicUpdates.add(mapper);
}
@Override
public List<RootObjectMapper> updates() {
return rootMapperDynamicUpdates;
}
}
public abstract boolean flyweight();
public abstract DocumentMapperParser docMapperParser();
public abstract boolean mappingsModified();
public abstract void setMappingsModified();
public abstract void setWithinNewMapper();
public abstract void clearWithinNewMapper();
public abstract boolean isWithinNewMapper();
/**
* Return a new context that will be within a copy-to operation.
*/
@ -854,4 +818,15 @@ public abstract class ParseContext {
*/
public abstract StringBuilder stringBuilder();
/**
* Add a dynamic update to the root object mapper.
* TODO: can we nuke it, it is only needed for copy_to
*/
public abstract void addRootObjectUpdate(RootObjectMapper update);
/**
* Get dynamic updates to the root object mapper.
* TODO: can we nuke it, it is only needed for copy_to
*/
public abstract List<RootObjectMapper> updates();
}

View File

@ -24,8 +24,8 @@ import org.elasticsearch.rest.RestStatus;
*/
public class StrictDynamicMappingException extends MapperParsingException {
public StrictDynamicMappingException(String path, String fieldName, boolean mappingsModified) {
super("mapping set to strict, dynamic introduction of [" + fieldName + "] within [" + path + "] is not allowed", mappingsModified);
public StrictDynamicMappingException(String path, String fieldName) {
super("mapping set to strict, dynamic introduction of [" + fieldName + "] within [" + path + "] is not allowed");
}
@Override

View File

@ -44,6 +44,7 @@ import org.apache.lucene.search.TermRangeFilter;
import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
@ -70,6 +71,7 @@ import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import org.elasticsearch.index.query.QueryParseContext;
import org.elasticsearch.index.search.FieldDataTermsFilter;
import org.elasticsearch.index.similarity.SimilarityLookupService;
@ -81,7 +83,6 @@ import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.TreeMap;
/**
@ -434,7 +435,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
final List<Field> fields = new ArrayList<>(2);
try {
parseCreateField(context, fields);
@ -447,12 +448,13 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
}
}
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e, context.mappingsModified());
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e);
}
multiFields.parse(this, context);
if (copyTo != null) {
copyTo.parse(context);
}
return null;
}
/**
@ -968,7 +970,7 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
// first add all field mappers
if (newFieldMappers != null) {
mergeContext.docMapper().addFieldMappers(newFieldMappers);
mergeContext.addFieldMappers(newFieldMappers);
}
// now publish mappers
if (newMappersBuilder != null) {
@ -1089,54 +1091,41 @@ public abstract class AbstractFieldMapper<T> implements FieldMapper<T> {
// The path of the dest field might be completely different from the current one so we need to reset it
context = context.overridePath(new ContentPath(0));
ObjectMapper mapper = context.root();
String objectPath = "";
String fieldPath = field;
int posDot = field.lastIndexOf('.');
if (posDot > 0) {
// Compound name
String objectPath = field.substring(0, posDot);
String fieldPath = field.substring(posDot + 1);
ObjectMapper mapper = context.docMapper().objectMappers().get(objectPath);
if (mapper == null) {
//TODO: Create an object dynamically?
throw new MapperParsingException("attempt to copy value to non-existing object [" + field + "]", context.mappingsModified());
}
objectPath = field.substring(0, posDot);
context.path().add(objectPath);
// We might be in dynamically created field already, so need to clean withinNewMapper flag
// and then restore it, so we wouldn't miss new mappers created from copy_to fields
boolean origWithinNewMapper = context.isWithinNewMapper();
context.clearWithinNewMapper();
try {
mapper.parseDynamicValue(context, fieldPath, context.parser().currentToken());
} finally {
if (origWithinNewMapper) {
context.setWithinNewMapper();
} else {
context.clearWithinNewMapper();
}
}
} else {
// We might be in dynamically created field already, so need to clean withinNewMapper flag
// and then restore it, so we wouldn't miss new mappers created from copy_to fields
boolean origWithinNewMapper = context.isWithinNewMapper();
context.clearWithinNewMapper();
try {
context.docMapper().root().parseDynamicValue(context, field, context.parser().currentToken());
} finally {
if (origWithinNewMapper) {
context.setWithinNewMapper();
} else {
context.clearWithinNewMapper();
}
}
mapper = context.docMapper().objectMappers().get(objectPath);
fieldPath = field.substring(posDot + 1);
}
if (mapper == null) {
//TODO: Create an object dynamically?
throw new MapperParsingException("attempt to copy value to non-existing object [" + field + "]");
}
ObjectMapper update = mapper.parseDynamicValue(context, fieldPath, context.parser().currentToken());
assert update != null; // we are parsing a dynamic value so we necessarily created a new mapping
// propagate the update to the root
while (objectPath.length() > 0) {
String parentPath = "";
ObjectMapper parent = context.root();
posDot = objectPath.lastIndexOf('.');
if (posDot > 0) {
parentPath = objectPath.substring(0, posDot);
parent = context.docMapper().objectMappers().get(parentPath);
}
if (parent == null) {
throw new ElasticsearchIllegalStateException("[" + objectPath + "] has no parent for path [" + parentPath + "]");
}
update = parent.mappingUpdate(update);
objectPath = parentPath;
}
context.addRootObjectUpdate((RootObjectMapper) update);
}
}
}
/**

View File

@ -266,7 +266,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper<String> {
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken();
@ -382,6 +382,7 @@ public class CompletionFieldMapper extends AbstractFieldMapper<String> {
context.doc().add(getCompletionField(ctx, input, suggestPayload));
}
}
return null;
}
private void checkWeight(long weight) {

View File

@ -515,7 +515,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper<GeoPoint> implement
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
ContentPath.Type origPathType = context.path().pathType();
context.path().pathType(pathType);
context.path().add(name());
@ -565,6 +565,7 @@ public class GeoPointFieldMapper extends AbstractFieldMapper<GeoPoint> implement
context.path().remove();
context.path().pathType(origPathType);
return null;
}
private void parseGeohashField(ParseContext context, String geohash) throws IOException {

View File

@ -237,19 +237,19 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper<String> {
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
try {
Shape shape = context.parseExternalValue(Shape.class);
if (shape == null) {
ShapeBuilder shapeBuilder = ShapeBuilder.parse(context.parser(), this);
if (shapeBuilder == null) {
return;
return null;
}
shape = shapeBuilder.build();
}
Field[] fields = defaultStrategy.createIndexableFields(shape);
if (fields == null || fields.length == 0) {
return;
return null;
}
for (Field field : fields) {
if (!customBoost()) {
@ -262,6 +262,7 @@ public class GeoShapeFieldMapper extends AbstractFieldMapper<String> {
} catch (Exception e) {
throw new MapperParsingException("failed to parse [" + names.fullName() + "]", e);
}
return null;
}
@Override

View File

@ -25,7 +25,6 @@ import org.apache.lucene.document.FieldType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
@ -33,9 +32,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.all.AllField;
import org.elasticsearch.common.lucene.all.AllTermQuery;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.loader.SettingsLoader;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.fielddata.FieldDataType;
@ -97,9 +94,6 @@ public class AllFieldMapper extends AbstractFieldMapper<String> implements Inter
private EnabledAttributeMapper enabled = Defaults.ENABLED;
// an internal flag, automatically set if we encounter boosting
boolean autoBoost = false;
public Builder() {
super(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE));
builder = this;
@ -120,7 +114,7 @@ public class AllFieldMapper extends AbstractFieldMapper<String> implements Inter
}
fieldType.setTokenized(true);
return new AllFieldMapper(name, fieldType, indexAnalyzer, searchAnalyzer, enabled, autoBoost, similarity, normsLoading, fieldDataSettings, context.indexSettings());
return new AllFieldMapper(name, fieldType, indexAnalyzer, searchAnalyzer, enabled, similarity, normsLoading, fieldDataSettings, context.indexSettings());
}
}
@ -154,8 +148,8 @@ public class AllFieldMapper extends AbstractFieldMapper<String> implements Inter
if (fieldName.equals("enabled")) {
builder.enabled(nodeBooleanValue(fieldNode) ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED);
iterator.remove();
} else if (fieldName.equals("auto_boost")) {
builder.autoBoost = nodeBooleanValue(fieldNode);
} else if (fieldName.equals("auto_boost") && parserContext.indexVersionCreated().before(Version.V_2_0_0)) {
// Old 1.x setting which is now ignored
iterator.remove();
}
}
@ -165,24 +159,17 @@ public class AllFieldMapper extends AbstractFieldMapper<String> implements Inter
private EnabledAttributeMapper enabledState;
// The autoBoost flag is automatically set based on indexed docs on the mappings
// if a doc is indexed with a specific boost value and part of _all, it is automatically
// set to true. This allows to optimize (automatically, which we like) for the common case
// where fields don't usually have boost associated with them, and we don't need to use the
// special SpanTermQuery to look at payloads
private volatile boolean autoBoost;
public AllFieldMapper(Settings indexSettings) {
this(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE), null, null, Defaults.ENABLED, false, null, null, null, indexSettings);
this(Defaults.NAME, new FieldType(Defaults.FIELD_TYPE), null, null, Defaults.ENABLED, null, null, null, indexSettings);
}
protected AllFieldMapper(String name, FieldType fieldType, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer,
EnabledAttributeMapper enabled, boolean autoBoost, SimilarityProvider similarity, Loading normsLoading,
EnabledAttributeMapper enabled, SimilarityProvider similarity, Loading normsLoading,
@Nullable Settings fieldDataSettings, Settings indexSettings) {
super(new Names(name, name, name, name), 1.0f, fieldType, false, indexAnalyzer, searchAnalyzer,
similarity, normsLoading, fieldDataSettings, indexSettings);
this.enabledState = enabled;
this.autoBoost = autoBoost;
}
@ -202,13 +189,7 @@ public class AllFieldMapper extends AbstractFieldMapper<String> implements Inter
@Override
public Query queryStringTermQuery(Term term) {
if (!autoBoost) {
return new TermQuery(term);
}
if (fieldType.indexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) {
return new AllTermQuery(term);
}
return new TermQuery(term);
return new AllTermQuery(term);
}
@Override
@ -226,8 +207,9 @@ public class AllFieldMapper extends AbstractFieldMapper<String> implements Inter
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
// we parse in post parse
return null;
}
@Override
@ -242,14 +224,6 @@ public class AllFieldMapper extends AbstractFieldMapper<String> implements Inter
}
// reset the entries
context.allEntries().reset();
// if the autoBoost flag is not set, and we indexed a doc with custom boost, make
// sure to update the flag, and notify mappings on change
if (!autoBoost && context.allEntries().customBoost()) {
autoBoost = true;
context.setMappingsModified();
}
Analyzer analyzer = findAnalyzer(context);
fields.add(new AllField(names.indexName(), context.allEntries(), analyzer, fieldType));
}
@ -305,9 +279,6 @@ public class AllFieldMapper extends AbstractFieldMapper<String> implements Inter
if (includeDefaults || enabledState != Defaults.ENABLED) {
builder.field("enabled", enabledState.enabled);
}
if (includeDefaults || autoBoost != false) {
builder.field("auto_boost", autoBoost);
}
if (includeDefaults || fieldType.stored() != Defaults.FIELD_TYPE.stored()) {
builder.field("store", fieldType.stored());
}

View File

@ -184,8 +184,9 @@ public class FieldNamesFieldMapper extends AbstractFieldMapper<String> implement
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
// we parse in post parse
return null;
}
@Override

View File

@ -307,16 +307,11 @@ public class IdFieldMapper extends AbstractFieldMapper<String> implements Intern
@Override
public void postParse(ParseContext context) throws IOException {
if (context.id() == null && !context.sourceToParse().flyweight()) {
throw new MapperParsingException("No id found while parsing the content source", context.mappingsModified());
throw new MapperParsingException("No id found while parsing the content source");
}
// it either get built in the preParse phase, or get parsed...
}
@Override
public void parse(ParseContext context) throws IOException {
super.parse(context);
}
@Override
public boolean includeInObject() {
return true;
@ -329,7 +324,7 @@ public class IdFieldMapper extends AbstractFieldMapper<String> implements Intern
// we are in the parse Phase
String id = parser.text();
if (context.id() != null && !context.id().equals(id)) {
throw new MapperParsingException("Provided id [" + context.id() + "] does not match the content one [" + id + "]", context.mappingsModified());
throw new MapperParsingException("Provided id [" + context.id() + "] does not match the content one [" + id + "]");
}
context.id(id);
} // else we are in the pre/post parse phase

View File

@ -166,8 +166,8 @@ public class IndexFieldMapper extends AbstractFieldMapper<String> implements Int
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
return null;
}
@Override

View File

@ -182,10 +182,11 @@ public class RoutingFieldMapper extends AbstractFieldMapper<String> implements I
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
// no need ot parse here, we either get the routing in the sourceToParse
// or we don't have routing, if we get it in sourceToParse, we process it in preParse
// which will always be called
return null;
}
@Override

View File

@ -134,8 +134,9 @@ public class SizeFieldMapper extends IntegerFieldMapper implements RootMapper {
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
// nothing to do here, we call the parent in postParse
return null;
}
@Override

View File

@ -251,8 +251,9 @@ public class SourceFieldMapper extends AbstractFieldMapper<byte[]> implements In
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
// nothing to do here, we will call it in pre parse
return null;
}
@Override

View File

@ -175,7 +175,7 @@ public class TTLFieldMapper extends LongFieldMapper implements InternalMapper, R
}
@Override
public void parse(ParseContext context) throws IOException, MapperParsingException {
public Mapper parse(ParseContext context) throws IOException, MapperParsingException {
if (context.sourceToParse().ttl() < 0) { // no ttl has been provided externally
long ttl;
if (context.parser().currentToken() == XContentParser.Token.VALUE_STRING) {
@ -188,6 +188,7 @@ public class TTLFieldMapper extends LongFieldMapper implements InternalMapper, R
}
context.sourceToParse().ttl(ttl);
}
return null;
}
@Override

View File

@ -273,8 +273,9 @@ public class TimestampFieldMapper extends DateFieldMapper implements InternalMap
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
// nothing to do here, we call the parent in preParse
return null;
}
@Override

View File

@ -158,8 +158,9 @@ public class TypeFieldMapper extends AbstractFieldMapper<String> implements Inte
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
// we parse in pre parse
return null;
}
@Override

View File

@ -167,8 +167,9 @@ public class UidFieldMapper extends AbstractFieldMapper<Uid> implements Internal
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
// nothing to do here, we either do it in post parse, or in pre parse.
return null;
}
@Override

View File

@ -113,8 +113,9 @@ public class VersionFieldMapper extends AbstractFieldMapper<Long> implements Int
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
// _version added in preparse
return null;
}
@Override

View File

@ -20,6 +20,7 @@
package org.elasticsearch.index.mapper.object;
import com.google.common.collect.Iterables;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.Term;
@ -38,6 +39,7 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.FieldMapperListener;
@ -45,6 +47,7 @@ import org.elasticsearch.index.mapper.InternalMapper;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperBuilders;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MapperUtils;
import org.elasticsearch.index.mapper.MergeContext;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.ObjectMapperListener;
@ -84,7 +87,7 @@ import static org.elasticsearch.index.mapper.core.TypeParsers.parsePathType;
/**
*
*/
public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll, Cloneable {
public static final String CONTENT_TYPE = "object";
public static final String NESTED_CONTENT_TYPE = "nested";
@ -370,8 +373,6 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
private volatile CopyOnWriteHashMap<String, Mapper> mappers;
private final Object mutex = new Object();
ObjectMapper(String name, String fullPath, boolean enabled, Nested nested, Dynamic dynamic, ContentPath.Type pathType, Map<String, Mapper> mappers) {
this.name = name;
this.fullPath = fullPath;
@ -389,6 +390,28 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
this.nestedTypeFilter = new TermFilter(new Term(TypeFieldMapper.NAME, nestedTypePathAsBytes));
}
@Override
protected ObjectMapper clone() {
ObjectMapper clone;
try {
clone = (ObjectMapper) super.clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeException();
}
return clone;
}
/**
* Build a mapping update with the provided sub mapping update.
*/
public ObjectMapper mappingUpdate(Mapper mapper) {
ObjectMapper mappingUpdate = clone();
// reset the sub mappers
mappingUpdate.mappers = new CopyOnWriteHashMap<>();
mappingUpdate.putMapper(mapper);
return mappingUpdate;
}
@Override
public String name() {
return this.name;
@ -440,14 +463,16 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
return this.nestedTypeFilter;
}
public ObjectMapper putMapper(Mapper mapper) {
/**
* Put a new mapper.
* NOTE: this method must be called under the current {@link DocumentMapper}
* lock if concurrent updates are expected.
*/
public void putMapper(Mapper mapper) {
if (mapper instanceof AllFieldMapper.IncludeInAll) {
((AllFieldMapper.IncludeInAll) mapper).includeInAllIfNotSet(includeInAll);
}
synchronized (mutex) {
mappers = mappers.copyAndPut(mapper.name(), mapper);
}
return this;
mappers = mappers.copyAndPut(mapper.name(), mapper);
}
@Override
@ -482,10 +507,10 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
}
@Override
public void parse(ParseContext context) throws IOException {
public ObjectMapper parse(ParseContext context) throws IOException {
if (!enabled) {
context.parser().skipChildren();
return;
return null;
}
XContentParser parser = context.parser();
@ -493,13 +518,13 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.VALUE_NULL) {
// the object is null ("obj1" : null), simply bail
return;
return null;
}
if (token.isValue() && !allowValue()) {
// if we are parsing an object but it is just a value, its only allowed on root level parsers with there
// is a field name with the same name as the type
throw new MapperParsingException("object mapping for [" + name + "] tried to parse field [" + currentFieldName + "] as object, but found a concrete value", context.mappingsModified());
throw new MapperParsingException("object mapping for [" + name + "] tried to parse field [" + currentFieldName + "] as object, but found a concrete value");
}
if (nested.isNested()) {
@ -533,21 +558,30 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
token = parser.nextToken();
}
ObjectMapper update = null;
while (token != XContentParser.Token.END_OBJECT) {
ObjectMapper newUpdate = null;
if (token == XContentParser.Token.START_OBJECT) {
serializeObject(context, currentFieldName);
newUpdate = serializeObject(context, currentFieldName);
} else if (token == XContentParser.Token.START_ARRAY) {
serializeArray(context, currentFieldName);
newUpdate = serializeArray(context, currentFieldName);
} else if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_NULL) {
serializeNullValue(context, currentFieldName);
} else if (token == null) {
throw new MapperParsingException("object mapping for [" + name + "] tried to parse field [" + currentFieldName + "] as object, but got EOF, has a concrete value been provided to it?", context.mappingsModified());
throw new MapperParsingException("object mapping for [" + name + "] tried to parse field [" + currentFieldName + "] as object, but got EOF, has a concrete value been provided to it?");
} else if (token.isValue()) {
serializeValue(context, currentFieldName, token);
newUpdate = serializeValue(context, currentFieldName, token);
}
token = parser.nextToken();
if (newUpdate != null) {
if (update == null) {
update = newUpdate;
} else {
MapperUtils.merge(update, newUpdate);
}
}
}
// restore the enable path flag
context.path().pathType(origPathType);
@ -577,6 +611,7 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
}
}
}
return update;
}
private void serializeNullValue(ParseContext context, String lastFieldName) throws IOException {
@ -585,54 +620,51 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
if (mapper != null) {
if (mapper instanceof FieldMapper) {
if (!((FieldMapper) mapper).supportsNullValue()) {
throw new MapperParsingException("no object mapping found for null value in [" + lastFieldName + "]", context.mappingsModified());
throw new MapperParsingException("no object mapping found for null value in [" + lastFieldName + "]");
}
}
mapper.parse(context);
} else if (dynamic == Dynamic.STRICT) {
throw new StrictDynamicMappingException(fullPath, lastFieldName, context.mappingsModified());
throw new StrictDynamicMappingException(fullPath, lastFieldName);
}
}
private void serializeObject(final ParseContext context, String currentFieldName) throws IOException {
private ObjectMapper serializeObject(final ParseContext context, String currentFieldName) throws IOException {
if (currentFieldName == null) {
throw new MapperParsingException("object mapping [" + name + "] trying to serialize an object with no field associated with it, current value [" + context.parser().textOrNull() + "]", context.mappingsModified());
throw new MapperParsingException("object mapping [" + name + "] trying to serialize an object with no field associated with it, current value [" + context.parser().textOrNull() + "]");
}
context.path().add(currentFieldName);
ObjectMapper update = null;
Mapper objectMapper = mappers.get(currentFieldName);
if (objectMapper != null) {
objectMapper.parse(context);
final Mapper subUpdate = objectMapper.parse(context);
if (subUpdate != null) {
// propagate mapping update
update = mappingUpdate(subUpdate);
}
} else {
Dynamic dynamic = this.dynamic;
if (dynamic == null) {
dynamic = context.root().dynamic();
}
if (dynamic == Dynamic.STRICT) {
throw new StrictDynamicMappingException(fullPath, currentFieldName, context.mappingsModified());
throw new StrictDynamicMappingException(fullPath, currentFieldName);
} else if (dynamic == Dynamic.TRUE) {
// we sync here just so we won't add it twice. Its not the end of the world
// to sync here since next operations will get it before
synchronized (mutex) {
objectMapper = mappers.get(currentFieldName);
if (objectMapper == null) {
// remove the current field name from path, since template search and the object builder add it as well...
context.path().remove();
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "object");
if (builder == null) {
builder = MapperBuilders.object(currentFieldName).enabled(true).pathType(pathType);
// if this is a non root object, then explicitly set the dynamic behavior if set
if (!(this instanceof RootObjectMapper) && this.dynamic != Defaults.DYNAMIC) {
((Builder) builder).dynamic(this.dynamic);
}
}
BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path());
objectMapper = builder.build(builderContext);
putDynamicMapper(context, currentFieldName, objectMapper);
} else {
objectMapper.parse(context);
// remove the current field name from path, since template search and the object builder add it as well...
context.path().remove();
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "object");
if (builder == null) {
builder = MapperBuilders.object(currentFieldName).enabled(true).pathType(pathType);
// if this is a non root object, then explicitly set the dynamic behavior if set
if (!(this instanceof RootObjectMapper) && this.dynamic != Defaults.DYNAMIC) {
((Builder) builder).dynamic(this.dynamic);
}
}
BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path());
objectMapper = builder.build(builderContext);
context.path().add(currentFieldName);
update = mappingUpdate(MapperUtils.parseAndMergeUpdate(objectMapper, context));
} else {
// not dynamic, read everything up to end object
context.parser().skipChildren();
@ -640,9 +672,10 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
}
context.path().remove();
return update;
}
private void serializeArray(ParseContext context, String lastFieldName) throws IOException {
private ObjectMapper serializeArray(ParseContext context, String lastFieldName) throws IOException {
String arrayFieldName = lastFieldName;
Mapper mapper = mappers.get(lastFieldName);
if (mapper != null) {
@ -650,9 +683,15 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
// expects an array, if so we pass the context straight to the mapper and if not
// we serialize the array components
if (mapper instanceof ArrayValueMapperParser) {
mapper.parse(context);
final Mapper subUpdate = mapper.parse(context);
if (subUpdate != null) {
// propagate the mapping update
return mappingUpdate(subUpdate);
} else {
return null;
}
} else {
serializeNonDynamicArray(context, lastFieldName, arrayFieldName);
return serializeNonDynamicArray(context, lastFieldName, arrayFieldName);
}
} else {
@ -661,278 +700,217 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
dynamic = context.root().dynamic();
}
if (dynamic == Dynamic.STRICT) {
throw new StrictDynamicMappingException(fullPath, arrayFieldName, context.mappingsModified());
throw new StrictDynamicMappingException(fullPath, arrayFieldName);
} else if (dynamic == Dynamic.TRUE) {
// we sync here just so we won't add it twice. Its not the end of the world
// to sync here since next operations will get it before
synchronized (mutex) {
mapper = mappers.get(arrayFieldName);
if (mapper == null) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, arrayFieldName, "object");
if (builder == null) {
serializeNonDynamicArray(context, lastFieldName, arrayFieldName);
return;
}
BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path());
mapper = builder.build(builderContext);
if (mapper != null && mapper instanceof ArrayValueMapperParser) {
putDynamicMapper(context, arrayFieldName, mapper);
} else {
serializeNonDynamicArray(context, lastFieldName, arrayFieldName);
}
} else {
serializeNonDynamicArray(context, lastFieldName, arrayFieldName);
}
Mapper.Builder builder = context.root().findTemplateBuilder(context, arrayFieldName, "object");
if (builder == null) {
return serializeNonDynamicArray(context, lastFieldName, arrayFieldName);
}
BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path());
mapper = builder.build(builderContext);
if (mapper != null && mapper instanceof ArrayValueMapperParser) {
context.path().add(arrayFieldName);
mapper = MapperUtils.parseAndMergeUpdate(mapper, context);
return mappingUpdate(mapper);
} else {
return serializeNonDynamicArray(context, lastFieldName, arrayFieldName);
}
} else {
serializeNonDynamicArray(context, lastFieldName, arrayFieldName);
return serializeNonDynamicArray(context, lastFieldName, arrayFieldName);
}
}
}
private void putDynamicMapper(ParseContext context, String arrayFieldName, Mapper mapper) throws IOException {
// ...now re add it
context.path().add(arrayFieldName);
context.setMappingsModified();
if (context.isWithinNewMapper()) {
// within a new mapper, no need to traverse,
// just parse
mapper.parse(context);
} else {
// create a context of new mapper, so we batch
// aggregate all the changes within
// this object mapper once, and traverse all of
// them to add them in a single go
context.setWithinNewMapper();
try {
mapper.parse(context);
FieldMapperListener.Aggregator newFields = new FieldMapperListener.Aggregator();
ObjectMapperListener.Aggregator newObjects = new ObjectMapperListener.Aggregator();
mapper.traverse(newFields);
mapper.traverse(newObjects);
// callback on adding those fields!
context.docMapper().addFieldMappers(newFields.mappers);
context.docMapper().addObjectMappers(newObjects.mappers);
} finally {
context.clearWithinNewMapper();
}
}
// only put after we traversed and did the
// callbacks, so other parsing won't see it only
// after we
// properly traversed it and adding the mappers
putMapper(mapper);
}
private void serializeNonDynamicArray(ParseContext context, String lastFieldName, String arrayFieldName) throws IOException {
private ObjectMapper serializeNonDynamicArray(ParseContext context, String lastFieldName, String arrayFieldName) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token;
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) {
serializeObject(context, lastFieldName);
return serializeObject(context, lastFieldName);
} else if (token == XContentParser.Token.START_ARRAY) {
serializeArray(context, lastFieldName);
return serializeArray(context, lastFieldName);
} else if (token == XContentParser.Token.FIELD_NAME) {
lastFieldName = parser.currentName();
} else if (token == XContentParser.Token.VALUE_NULL) {
serializeNullValue(context, lastFieldName);
} else if (token == null) {
throw new MapperParsingException("object mapping for [" + name + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?", context.mappingsModified());
throw new MapperParsingException("object mapping for [" + name + "] with array for [" + arrayFieldName + "] tried to parse as array, but got EOF, is there a mismatch in types for the same field?");
} else {
serializeValue(context, lastFieldName, token);
return serializeValue(context, lastFieldName, token);
}
}
return null;
}
private void serializeValue(final ParseContext context, String currentFieldName, XContentParser.Token token) throws IOException {
private ObjectMapper serializeValue(final ParseContext context, String currentFieldName, XContentParser.Token token) throws IOException {
if (currentFieldName == null) {
throw new MapperParsingException("object mapping [" + name + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]", context.mappingsModified());
throw new MapperParsingException("object mapping [" + name + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]");
}
Mapper mapper = mappers.get(currentFieldName);
if (mapper != null) {
mapper.parse(context);
Mapper subUpdate = mapper.parse(context);
if (subUpdate == null) {
return null;
}
return mappingUpdate(subUpdate);
} else {
parseDynamicValue(context, currentFieldName, token);
return parseDynamicValue(context, currentFieldName, token);
}
}
public void parseDynamicValue(final ParseContext context, String currentFieldName, XContentParser.Token token) throws IOException {
public ObjectMapper parseDynamicValue(final ParseContext context, String currentFieldName, XContentParser.Token token) throws IOException {
Dynamic dynamic = this.dynamic;
if (dynamic == null) {
dynamic = context.root().dynamic();
}
if (dynamic == Dynamic.STRICT) {
throw new StrictDynamicMappingException(fullPath, currentFieldName, context.mappingsModified());
throw new StrictDynamicMappingException(fullPath, currentFieldName);
}
if (dynamic == Dynamic.FALSE) {
return;
return null;
}
// we sync here since we don't want to add this field twice to the document mapper
// its not the end of the world, since we add it to the mappers once we create it
// so next time we won't even get here for this field
synchronized (mutex) {
Mapper mapper = mappers.get(currentFieldName);
if (mapper == null) {
BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path());
if (token == XContentParser.Token.VALUE_STRING) {
boolean resolved = false;
Mapper mapper = null;
BuilderContext builderContext = new BuilderContext(context.indexSettings(), context.path());
if (token == XContentParser.Token.VALUE_STRING) {
boolean resolved = false;
// do a quick test to see if its fits a dynamic template, if so, use it.
// we need to do it here so we can handle things like attachment templates, where calling
// text (to see if its a date) causes the binary value to be cleared
if (!resolved) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string", null);
if (builder != null) {
mapper = builder.build(builderContext);
resolved = true;
}
}
// do a quick test to see if its fits a dynamic template, if so, use it.
// we need to do it here so we can handle things like attachment templates, where calling
// text (to see if its a date) causes the binary value to be cleared
if (!resolved) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string", null);
if (builder != null) {
mapper = builder.build(builderContext);
resolved = true;
}
}
if (!resolved && context.root().dateDetection()) {
String text = context.parser().text();
// a safe check since "1" gets parsed as well
if (Strings.countOccurrencesOf(text, ":") > 1 || Strings.countOccurrencesOf(text, "-") > 1 || Strings.countOccurrencesOf(text, "/") > 1) {
for (FormatDateTimeFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) {
try {
dateTimeFormatter.parser().parseMillis(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "date");
if (builder == null) {
builder = dateField(currentFieldName).dateTimeFormatter(dateTimeFormatter);
}
mapper = builder.build(builderContext);
resolved = true;
break;
} catch (Exception e) {
// failure to parse this, continue
}
}
}
}
if (!resolved && context.root().numericDetection()) {
String text = context.parser().text();
if (!resolved && context.root().dateDetection()) {
String text = context.parser().text();
// a safe check since "1" gets parsed as well
if (Strings.countOccurrencesOf(text, ":") > 1 || Strings.countOccurrencesOf(text, "-") > 1 || Strings.countOccurrencesOf(text, "/") > 1) {
for (FormatDateTimeFormatter dateTimeFormatter : context.root().dynamicDateTimeFormatters()) {
try {
Long.parseLong(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
dateTimeFormatter.parser().parseMillis(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "date");
if (builder == null) {
builder = longField(currentFieldName);
builder = dateField(currentFieldName).dateTimeFormatter(dateTimeFormatter);
}
mapper = builder.build(builderContext);
resolved = true;
break;
} catch (Exception e) {
// not a long number
}
if (!resolved) {
try {
Double.parseDouble(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = doubleField(currentFieldName);
}
mapper = builder.build(builderContext);
resolved = true;
} catch (Exception e) {
// not a long number
}
// failure to parse this, continue
}
}
if (!resolved) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
if (builder == null) {
builder = stringField(currentFieldName);
}
mapper = builder.build(builderContext);
}
}
if (!resolved && context.root().numericDetection()) {
String text = context.parser().text();
try {
Long.parseLong(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = longField(currentFieldName);
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
XContentParser.NumberType numberType = context.parser().numberType();
if (numberType == XContentParser.NumberType.INT) {
if (context.parser().estimatedNumberType()) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = longField(currentFieldName);
}
mapper = builder.build(builderContext);
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "integer");
if (builder == null) {
builder = integerField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (numberType == XContentParser.NumberType.LONG) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = longField(currentFieldName);
}
mapper = builder.build(builderContext);
} else if (numberType == XContentParser.NumberType.FLOAT) {
if (context.parser().estimatedNumberType()) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = doubleField(currentFieldName);
}
mapper = builder.build(builderContext);
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "float");
if (builder == null) {
builder = floatField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (numberType == XContentParser.NumberType.DOUBLE) {
mapper = builder.build(builderContext);
resolved = true;
} catch (Exception e) {
// not a long number
}
if (!resolved) {
try {
Double.parseDouble(text);
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = doubleField(currentFieldName);
}
mapper = builder.build(builderContext);
resolved = true;
} catch (Exception e) {
// not a long number
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "boolean");
}
}
if (!resolved) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "string");
if (builder == null) {
builder = stringField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (token == XContentParser.Token.VALUE_NUMBER) {
XContentParser.NumberType numberType = context.parser().numberType();
if (numberType == XContentParser.NumberType.INT) {
if (context.parser().estimatedNumberType()) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = booleanField(currentFieldName);
}
mapper = builder.build(builderContext);
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "binary");
if (builder == null) {
builder = binaryField(currentFieldName);
builder = longField(currentFieldName);
}
mapper = builder.build(builderContext);
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, null);
if (builder != null) {
mapper = builder.build(builderContext);
} else {
// TODO how do we identify dynamically that its a binary value?
throw new ElasticsearchIllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]");
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "integer");
if (builder == null) {
builder = integerField(currentFieldName);
}
mapper = builder.build(builderContext);
}
if (context.isWithinNewMapper()) {
mapper.parse(context);
} else if (numberType == XContentParser.NumberType.LONG) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "long");
if (builder == null) {
builder = longField(currentFieldName);
}
mapper = builder.build(builderContext);
} else if (numberType == XContentParser.NumberType.FLOAT) {
if (context.parser().estimatedNumberType()) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = doubleField(currentFieldName);
}
mapper = builder.build(builderContext);
} else {
context.setWithinNewMapper();
try {
mapper.parse(context);
FieldMapperListener.Aggregator newFields = new FieldMapperListener.Aggregator();
mapper.traverse(newFields);
context.docMapper().addFieldMappers(newFields.mappers);
} finally {
context.clearWithinNewMapper();
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "float");
if (builder == null) {
builder = floatField(currentFieldName);
}
mapper = builder.build(builderContext);
}
// only put after we traversed and did the callbacks, so other parsing won't see it only after we
// properly traversed it and adding the mappers
putMapper(mapper);
context.setMappingsModified();
} else if (numberType == XContentParser.NumberType.DOUBLE) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "double");
if (builder == null) {
builder = doubleField(currentFieldName);
}
mapper = builder.build(builderContext);
}
} else if (token == XContentParser.Token.VALUE_BOOLEAN) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "boolean");
if (builder == null) {
builder = booleanField(currentFieldName);
}
mapper = builder.build(builderContext);
} else if (token == XContentParser.Token.VALUE_EMBEDDED_OBJECT) {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, "binary");
if (builder == null) {
builder = binaryField(currentFieldName);
}
mapper = builder.build(builderContext);
} else {
Mapper.Builder builder = context.root().findTemplateBuilder(context, currentFieldName, null);
if (builder != null) {
mapper = builder.build(builderContext);
} else {
mapper.parse(context);
// TODO how do we identify dynamically that its a binary value?
throw new ElasticsearchIllegalStateException("Can't handle serializing a dynamic type with content token [" + token + "] and field name [" + currentFieldName + "]");
}
}
mapper = MapperUtils.parseAndMergeUpdate(mapper, context);
ObjectMapper update = null;
if (mapper != null) {
update = mappingUpdate(mapper);
}
return update;
}
@Override
@ -966,33 +944,30 @@ public class ObjectMapper implements Mapper, AllFieldMapper.IncludeInAll {
List<Mapper> mappersToPut = new ArrayList<>();
FieldMapperListener.Aggregator newFieldMappers = new FieldMapperListener.Aggregator();
ObjectMapperListener.Aggregator newObjectMappers = new ObjectMapperListener.Aggregator();
synchronized (mutex) {
for (Mapper mapper : mergeWithObject.mappers.values()) {
Mapper mergeWithMapper = mapper;
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
if (mergeIntoMapper == null) {
// no mapping, simply add it if not simulating
if (!mergeContext.mergeFlags().simulate()) {
mappersToPut.add(mergeWithMapper);
mergeWithMapper.traverse(newFieldMappers);
mergeWithMapper.traverse(newObjectMappers);
}
} else {
mergeIntoMapper.merge(mergeWithMapper, mergeContext);
for (Mapper mapper : mergeWithObject.mappers.values()) {
Mapper mergeWithMapper = mapper;
Mapper mergeIntoMapper = mappers.get(mergeWithMapper.name());
if (mergeIntoMapper == null) {
// no mapping, simply add it if not simulating
if (!mergeContext.mergeFlags().simulate()) {
mappersToPut.add(mergeWithMapper);
mergeWithMapper.traverse(newFieldMappers);
mergeWithMapper.traverse(newObjectMappers);
}
}
if (!newFieldMappers.mappers.isEmpty()) {
mergeContext.docMapper().addFieldMappers(newFieldMappers.mappers);
}
if (!newObjectMappers.mappers.isEmpty()) {
mergeContext.docMapper().addObjectMappers(newObjectMappers.mappers);
}
// and the mappers only after the administration have been done, so it will not be visible to parser (which first try to read with no lock)
for (Mapper mapper : mappersToPut) {
putMapper(mapper);
} else {
mergeIntoMapper.merge(mergeWithMapper, mergeContext);
}
}
if (!newFieldMappers.mappers.isEmpty()) {
mergeContext.addFieldMappers(newFieldMappers.mappers);
}
if (!newObjectMappers.mappers.isEmpty()) {
mergeContext.addObjectMappers(newObjectMappers.mappers);
}
// add the mappers only after the administration have been done, so it will not be visible to parser (which first try to read with no lock)
for (Mapper mapper : mappersToPut) {
putMapper(mapper);
}
}
protected void doMerge(ObjectMapper mergeWith, MergeContext mergeContext) {

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.object;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
@ -206,6 +207,14 @@ public class RootObjectMapper extends ObjectMapper {
this.numericDetection = numericDetection;
}
@Override
public ObjectMapper mappingUpdate(Mapper mapper) {
RootObjectMapper update = (RootObjectMapper) super.mappingUpdate(mapper);
// dynamic templates are irrelevant for dynamic mappings updates
update.dynamicTemplates = new DynamicTemplate[0];
return update;
}
public boolean dateDetection() {
return this.dateDetection;
}
@ -231,7 +240,7 @@ public class RootObjectMapper extends ObjectMapper {
String mappingType = dynamicTemplate.mappingType(dynamicType);
Mapper.TypeParser typeParser = parserContext.typeParser(mappingType);
if (typeParser == null) {
throw new MapperParsingException("failed to find type parsed [" + mappingType + "] for [" + name + "]", context.mappingsModified());
throw new MapperParsingException("failed to find type parsed [" + mappingType + "] for [" + name + "]");
}
return typeParser.parse(name, dynamicTemplate.mappingForName(name, dynamicType), parserContext);
}

View File

@ -457,7 +457,7 @@ public class IndexShard extends AbstractIndexShardComponent {
ParsedDocument doc = docMapper.v1().parse(source).setMappingsModified(docMapper);
return new Engine.Create(docMapper.v1(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, canHaveDuplicates, autoGeneratedId);
} catch (Throwable t) {
if (docMapper.v2() || (t instanceof MapperParsingException && ((MapperParsingException)t).isMappingsModified())) {
if (docMapper.v2()) {
throw new WriteFailureException(t, docMapper.v1().type());
} else {
throw t;
@ -493,7 +493,7 @@ public class IndexShard extends AbstractIndexShardComponent {
ParsedDocument doc = docMapper.v1().parse(source).setMappingsModified(docMapper);
return new Engine.Index(docMapper.v1(), docMapper.v1().uidMapper().term(doc.uid().stringValue()), doc, version, versionType, origin, startTime, canHaveDuplicates);
} catch (Throwable t) {
if (docMapper.v2() || (t instanceof MapperParsingException && ((MapperParsingException) t).isMappingsModified())) {
if (docMapper.v2()) {
throw new WriteFailureException(t, docMapper.v1().type());
} else {
throw t;

View File

@ -37,23 +37,38 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.engine.Engine.Searcher;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
import org.elasticsearch.index.mapper.internal.SizeFieldMapper;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.io.IOException;
import java.util.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.common.io.Streams.copyToBytesFromClasspath;
import static org.elasticsearch.common.io.Streams.copyToStringFromClasspath;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.hamcrest.Matchers.*;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
/**
*
@ -82,7 +97,8 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest {
@Test
public void testAllMappersNoBoost() throws Exception {
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/all/noboost-mapping.json");
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
IndexService index = createIndex("test");
DocumentMapper docMapper = index.mapperService().documentMapperParser().parse(mapping);
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/all/test1.json");
Document doc = docMapper.parse(new BytesArray(json)).rootDoc();
AllField field = (AllField) doc.getField("_all");
@ -93,7 +109,6 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest {
assertThat(allEntries.fields().contains("simple1"), equalTo(true));
FieldMapper mapper = docMapper.mappers().smartNameFieldMapper("_all");
assertThat(field.fieldType().omitNorms(), equalTo(false));
assertThat(mapper.queryStringTermQuery(new Term("_all", "foobar")), Matchers.instanceOf(TermQuery.class));
}
@Test
@ -110,7 +125,7 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest {
assertThat(allEntries.fields().contains("simple1"), equalTo(true));
FieldMapper mapper = docMapper.mappers().smartNameFieldMapper("_all");
assertThat(field.fieldType().omitNorms(), equalTo(false));
assertThat(mapper.queryStringTermQuery(new Term("_all", "foobar")), Matchers.instanceOf(TermQuery.class));
assertThat(mapper.queryStringTermQuery(new Term("_all", "foobar")), Matchers.instanceOf(AllTermQuery.class));
}
@ -223,7 +238,6 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest {
boolean omitNorms = false;
boolean stored = false;
boolean enabled = true;
boolean autoBoost = false;
boolean tv_stored = false;
boolean tv_payloads = false;
boolean tv_offsets = false;
@ -249,9 +263,6 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest {
if (randomBoolean()) {
booleanOptionList.add(new Tuple<>("enabled", enabled = randomBoolean()));
}
if (randomBoolean()) {
booleanOptionList.add(new Tuple<>("auto_boost", autoBoost = randomBoolean()));
}
if (randomBoolean()) {
booleanOptionList.add(new Tuple<>("store_term_vector_offsets", tv_offsets = randomBoolean()));
}
@ -312,14 +323,6 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest {
} else {
assertThat(field, nullValue());
}
Term term = new Term("foo", "bar");
Query query = builtDocMapper.allFieldMapper().queryStringTermQuery(term);
if (autoBoost) {
assertThat(query, equalTo((Query)new AllTermQuery(term)));
} else {
assertThat(query, equalTo((Query)new TermQuery(term)));
}
if (similarity == null || similarity.equals("TF/IDF")) {
assertThat(builtDocMapper.allFieldMapper().similarity(), nullValue());
} else {
@ -458,4 +461,19 @@ public class SimpleAllMapperTests extends ElasticsearchSingleNodeTest {
assertThat(e.getDetailedMessage(), containsString("[_all] is always tokenized and cannot have doc values"));
}
}
public void testAutoBoost() throws Exception {
for (boolean boost : new boolean[] {false, true}) {
String index = "test_" + boost;
IndexService indexService = createIndex(index, client().admin().indices().prepareCreate(index).addMapping("type", "foo", "type=string" + (boost ? ",boost=2" : "")));
client().prepareIndex(index, "type").setSource("foo", "bar").get();
client().admin().indices().prepareRefresh(index).get();
Query query = indexService.mapperService().documentMapper("type").allFieldMapper().termQuery("bar", null);
try (Searcher searcher = indexService.shard(0).acquireSearcher("tests")) {
query = searcher.searcher().rewrite(query);
final Class<?> expected = boost ? AllTermQuery.class : TermQuery.class;
assertThat(query, Matchers.instanceOf(expected));
}
}
}
}

View File

@ -19,15 +19,31 @@
package org.elasticsearch.index.mapper.dynamic;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperParser;
import org.elasticsearch.index.mapper.FieldMappers;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.StrictDynamicMappingException;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;
import java.io.IOException;
import java.util.LinkedHashMap;
@ -39,7 +55,6 @@ import static org.hamcrest.Matchers.nullValue;
public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
@Test
public void testDynamicTrue() throws IOException {
String mapping = jsonBuilder().startObject().startObject("type")
.field("dynamic", "true")
@ -60,7 +75,6 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
assertThat(doc.rootDoc().get("field2"), equalTo("value2"));
}
@Test
public void testDynamicFalse() throws IOException {
String mapping = jsonBuilder().startObject().startObject("type")
.field("dynamic", "false")
@ -82,7 +96,6 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
}
@Test
public void testDynamicStrict() throws IOException {
String mapping = jsonBuilder().startObject().startObject("type")
.field("dynamic", "strict")
@ -116,7 +129,6 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
}
}
@Test
public void testDynamicFalseWithInnerObjectButDynamicSetOnRoot() throws IOException {
String mapping = jsonBuilder().startObject().startObject("type")
.field("dynamic", "false")
@ -140,7 +152,6 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
assertThat(doc.rootDoc().get("obj1.field2"), nullValue());
}
@Test
public void testDynamicStrictWithInnerObjectButDynamicSetOnRoot() throws IOException {
String mapping = jsonBuilder().startObject().startObject("type")
.field("dynamic", "strict")
@ -173,7 +184,6 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
assertTrue(mappers != null && mappers.isEmpty() == false);
}
@Test
public void testIndexingFailureDoesStillCreateType() throws IOException, InterruptedException {
XContentBuilder mapping = jsonBuilder().startObject().startObject("_default_")
.field("dynamic", "strict")
@ -202,7 +212,6 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
}
@Test
public void testTypeCreatedProperly() throws IOException, InterruptedException {
XContentBuilder mapping = jsonBuilder().startObject().startObject("_default_")
.field("dynamic", "strict")
@ -243,7 +252,6 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
assertNotNull(getMappingsResponse.getMappings().get("test").get("type"));
}
@Test
public void testFieldsCreatedWithPartialParsing() throws IOException, InterruptedException {
XContentBuilder mapping = jsonBuilder().startObject().startObject("doc")
.startObject("properties")
@ -304,4 +312,178 @@ public class DynamicMappingTests extends ElasticsearchSingleNodeTest {
}
}));
}
private String serialize(ToXContent mapper) throws Exception {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
mapper.toXContent(builder, new ToXContent.MapParams(ImmutableMap.<String, String>of()));
return builder.endObject().string();
}
private Mapper parse(DocumentMapper mapper, DocumentMapperParser parser, XContentBuilder builder) throws Exception {
Settings settings = ImmutableSettings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
ParseContext.InternalParseContext ctx = new ParseContext.InternalParseContext("test", settings, parser, mapper, new ContentPath(0));
SourceToParse source = SourceToParse.source(builder.bytes());
ctx.reset(XContentHelper.createParser(source.source()), new ParseContext.Document(), source, null);
assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken());
ctx.parser().nextToken();
return mapper.root().parse(ctx);
}
public void testDynamicMappingsNotNeeded() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("foo").field("type", "string").endObject().endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject());
// foo is already defined in the mappings
assertNull(update);
}
public void testField() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").endObject().endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
// but we have an update
assertEquals("{\"type\":{\"properties\":{\"foo\":{\"type\":\"string\"}}}}", serialize(update));
}
public void testIncremental() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
// Make sure that mapping updates are incremental, this is important for performance otherwise
// every new field introduction runs in linear time with the total number of fields
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("foo").field("type", "string").endObject().endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").field("bar", "baz").endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
// but we have an update
assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
// foo is NOT in the update
.startObject("bar").field("type", "string").endObject()
.endObject().endObject().string(), serialize(update));
}
public void testIntroduceTwoFields() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").endObject().endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().field("foo", "bar").field("bar", "baz").endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
// but we have an update
assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("bar").field("type", "string").endObject()
.startObject("foo").field("type", "string").endObject()
.endObject().endObject().string(), serialize(update));
}
public void testObject() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").endObject().endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar").field("baz", "foo").endObject().endObject().endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
// but we have an update
assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "string").endObject().endObject().endObject().endObject().endObject()
.endObject().endObject().endObject().string(), serialize(update));
}
public void testArray() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").endObject().endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startArray("foo").value("bar").value("baz").endArray().endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
// but we have an update
assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").field("type", "string").endObject()
.endObject().endObject().endObject().string(), serialize(update));
}
public void testInnerDynamicMapping() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties")
.startObject("foo").field("type", "object").endObject()
.endObject().endObject().endObject().string();
DocumentMapper mapper = parser.parse(mapping);
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startObject("foo").startObject("bar").field("baz", "foo").endObject().endObject().endObject());
assertNotNull(update);
// original mapping not modified
assertEquals(mapping, serialize(mapper));
// but we have an update
assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").startObject("properties").startObject("bar").startObject("properties").startObject("baz").field("type", "string").endObject().endObject().endObject().endObject().endObject()
.endObject().endObject().endObject().string(), serialize(update));
}
public void testComplexArray() throws Exception {
IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").endObject().endObject()
.endObject().string();
DocumentMapper mapper = parser.parse(mapping);
assertEquals(mapping, serialize(mapper));
Mapper update = parse(mapper, parser, XContentFactory.jsonBuilder().startObject().startArray("foo")
.startObject().field("bar", "baz").endObject()
.startObject().field("baz", 3).endObject()
.endArray().endObject());
assertEquals(mapping, serialize(mapper));
assertEquals(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("foo").startObject("properties")
.startObject("bar").field("type", "string").endObject()
.startObject("baz").field("type", "long").endObject()
.endObject().endObject()
.endObject().endObject().endObject().string(), serialize(update));
}
}

View File

@ -185,7 +185,7 @@ public class ExternalMapper extends AbstractFieldMapper<Object> {
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
byte[] bytes = "Hello world".getBytes(Charset.defaultCharset());
binMapper.parse(context.createExternalValueContext(bytes));
@ -210,6 +210,7 @@ public class ExternalMapper extends AbstractFieldMapper<Object> {
if (copyTo != null) {
copyTo.parse(context);
}
return null;
}
@Override

View File

@ -39,7 +39,8 @@ public class ExternalRootMapper implements RootMapper {
}
@Override
public void parse(ParseContext context) throws IOException {
public Mapper parse(ParseContext context) throws IOException {
return null;
}
@Override

View File

@ -68,7 +68,7 @@ public class UpdateMappingOnClusterTests extends ElasticsearchIntegrationTest {
"[_all] has different store_term_vector_payloads values",
"[_all] has different analyzer",
"[_all] has different similarity"};
// auto_boost and fielddata and search_analyzer should not report conflict
// fielddata and search_analyzer should not report conflict
testConflict(mapping, mappingUpdate, errorMessage);
}

View File

@ -2,7 +2,6 @@
"mappings": {
"type": {
"_all": {
"auto_boost": true,
"store": true,
"store_term_vectors": true,
"store_term_vector_offsets": true,
@ -29,4 +28,4 @@
}
}
}
}
}

View File

@ -1,7 +1,6 @@
{
"type": {
"_all": {
"auto_boost": false,
"store": false,
"enabled": false,
"store_term_vectors": false,
@ -17,4 +16,4 @@
}
}
}
}
}

View File

@ -372,11 +372,6 @@ public abstract class ElasticsearchIntegrationTest extends ElasticsearchTestCase
.field("enabled", randomBoolean())
.endObject();
}
if (randomBoolean()) {
mappings.startObject(AllFieldMapper.NAME)
.field("auto_boost", true)
.endObject();
}
if (randomBoolean()) {
mappings.startObject(SourceFieldMapper.NAME)
.field("compress", randomBoolean())